hexsha
stringlengths 40
40
| size
int64 4
996k
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
996k
| avg_line_length
float64 1.33
58.2k
| max_line_length
int64 2
323k
| alphanum_fraction
float64 0
0.97
| content_no_comment
stringlengths 0
946k
| is_comment_constant_removed
bool 2
classes | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
79062d699b07af1961840333e4032853eda92dee
| 45,068
|
py
|
Python
|
tencentcloud/mongodb/v20180408/models.py
|
qin5506/tencentcloud-sdk-python
|
e9c59d80beabf75fb96456bb8d7a53400346fe9a
|
[
"Apache-2.0"
] | null | null | null |
tencentcloud/mongodb/v20180408/models.py
|
qin5506/tencentcloud-sdk-python
|
e9c59d80beabf75fb96456bb8d7a53400346fe9a
|
[
"Apache-2.0"
] | null | null | null |
tencentcloud/mongodb/v20180408/models.py
|
qin5506/tencentcloud-sdk-python
|
e9c59d80beabf75fb96456bb8d7a53400346fe9a
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf8 -*-
# Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
from tencentcloud.common.abstract_model import AbstractModel
class AssignProjectRequest(AbstractModel):
"""AssignProject请求参数结构体
"""
def __init__(self):
"""
:param InstanceIds: 实例ID列表,格式如:cmgo-p8vnipr5。与云数据库控制台页面中显示的实例ID相同
:type InstanceIds: list of str
:param ProjectId: 项目ID
:type ProjectId: int
"""
self.InstanceIds = None
self.ProjectId = None
def _deserialize(self, params):
self.InstanceIds = params.get("InstanceIds")
self.ProjectId = params.get("ProjectId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class AssignProjectResponse(AbstractModel):
"""AssignProject返回参数结构体
"""
def __init__(self):
"""
:param FlowIds: 返回的异步任务ID列表
:type FlowIds: list of int non-negative
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowIds = None
self.RequestId = None
def _deserialize(self, params):
self.FlowIds = params.get("FlowIds")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class ClientConnection(AbstractModel):
"""客户端连接信息,包括客户端IP和连接数
"""
def __init__(self):
"""
:param IP: 连接的客户端IP
:type IP: str
:param Count: 对应客户端IP的连接数
:type Count: int
"""
self.IP = None
self.Count = None
def _deserialize(self, params):
self.IP = params.get("IP")
self.Count = params.get("Count")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class CreateDBInstanceHourRequest(AbstractModel):
"""CreateDBInstanceHour请求参数结构体
"""
def __init__(self):
"""
:param Memory: 实例内存大小,单位:GB
:type Memory: int
:param Volume: 实例硬盘大小,单位:GB
:type Volume: int
:param ReplicateSetNum: 副本集个数,1为单副本集实例,大于1为分片集群实例,最大不超过10
:type ReplicateSetNum: int
:param SecondaryNum: 每个副本集内从节点个数,目前只支持从节点数为2
:type SecondaryNum: int
:param EngineVersion: MongoDB引擎版本,值包括MONGO_3_WT 、MONGO_3_ROCKS和MONGO_36_WT
:type EngineVersion: str
:param Machine: 实例类型,GIO:高IO版;TGIO:高IO万兆
:type Machine: str
:param GoodsNum: 实例数量,默认值为1, 最小值1,最大值为10
:type GoodsNum: int
:param Zone: 可用区信息,格式如:ap-guangzhou-2
:type Zone: str
:param InstanceRole: 实例角色,支持值包括:MASTER-表示主实例,DR-表示灾备实例,RO-表示只读实例
:type InstanceRole: str
:param InstanceType: 实例类型,REPLSET-副本集,SHARD-分片集群
:type InstanceType: str
:param Encrypt: 数据是否加密,当且仅当引擎版本为MONGO_3_ROCKS,可以选择加密
:type Encrypt: int
:param VpcId: 私有网络ID,如果不传则默认选择基础网络
:type VpcId: str
:param SubnetId: 私有网络下的子网ID,如果设置了 VpcId,则 SubnetId必填
:type SubnetId: str
:param ProjectId: 项目ID,不填为默认项目
:type ProjectId: int
:param SecurityGroup: 安全组参数
:type SecurityGroup: list of str
"""
self.Memory = None
self.Volume = None
self.ReplicateSetNum = None
self.SecondaryNum = None
self.EngineVersion = None
self.Machine = None
self.GoodsNum = None
self.Zone = None
self.InstanceRole = None
self.InstanceType = None
self.Encrypt = None
self.VpcId = None
self.SubnetId = None
self.ProjectId = None
self.SecurityGroup = None
def _deserialize(self, params):
self.Memory = params.get("Memory")
self.Volume = params.get("Volume")
self.ReplicateSetNum = params.get("ReplicateSetNum")
self.SecondaryNum = params.get("SecondaryNum")
self.EngineVersion = params.get("EngineVersion")
self.Machine = params.get("Machine")
self.GoodsNum = params.get("GoodsNum")
self.Zone = params.get("Zone")
self.InstanceRole = params.get("InstanceRole")
self.InstanceType = params.get("InstanceType")
self.Encrypt = params.get("Encrypt")
self.VpcId = params.get("VpcId")
self.SubnetId = params.get("SubnetId")
self.ProjectId = params.get("ProjectId")
self.SecurityGroup = params.get("SecurityGroup")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class CreateDBInstanceHourResponse(AbstractModel):
"""CreateDBInstanceHour返回参数结构体
"""
def __init__(self):
"""
:param DealId: 订单ID
:type DealId: str
:param InstanceIds: 创建的实例ID列表
:type InstanceIds: list of str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DealId = None
self.InstanceIds = None
self.RequestId = None
def _deserialize(self, params):
self.DealId = params.get("DealId")
self.InstanceIds = params.get("InstanceIds")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class CreateDBInstanceRequest(AbstractModel):
"""CreateDBInstance请求参数结构体
"""
def __init__(self):
"""
:param SecondaryNum: 每个副本集内从节点个数
:type SecondaryNum: int
:param Memory: 实例内存大小,单位:GB
:type Memory: int
:param Volume: 实例硬盘大小,单位:GB
:type Volume: int
:param MongoVersion: 版本号,当前支持 MONGO_3_WT、MONGO_3_ROCKS、MONGO_36_WT
:type MongoVersion: str
:param MachineCode: 机器类型,GIO:高IO版;TGIO:高IO万兆
:type MachineCode: str
:param GoodsNum: 实例数量,默认值为1, 最小值1,最大值为10
:type GoodsNum: int
:param Zone: 实例所属区域名称,格式如:ap-guangzhou-2
:type Zone: str
:param TimeSpan: 时长,购买月数
:type TimeSpan: int
:param Password: 实例密码
:type Password: str
:param ProjectId: 项目ID,不填为默认项目
:type ProjectId: int
:param SecurityGroup: 安全组参数
:type SecurityGroup: list of str
:param UniqVpcId: 私有网络ID,如果不传则默认选择基础网络
:type UniqVpcId: str
:param UniqSubnetId: 私有网络下的子网ID,如果设置了 VpcId,则 SubnetId必填
:type UniqSubnetId: str
"""
self.SecondaryNum = None
self.Memory = None
self.Volume = None
self.MongoVersion = None
self.MachineCode = None
self.GoodsNum = None
self.Zone = None
self.TimeSpan = None
self.Password = None
self.ProjectId = None
self.SecurityGroup = None
self.UniqVpcId = None
self.UniqSubnetId = None
def _deserialize(self, params):
self.SecondaryNum = params.get("SecondaryNum")
self.Memory = params.get("Memory")
self.Volume = params.get("Volume")
self.MongoVersion = params.get("MongoVersion")
self.MachineCode = params.get("MachineCode")
self.GoodsNum = params.get("GoodsNum")
self.Zone = params.get("Zone")
self.TimeSpan = params.get("TimeSpan")
self.Password = params.get("Password")
self.ProjectId = params.get("ProjectId")
self.SecurityGroup = params.get("SecurityGroup")
self.UniqVpcId = params.get("UniqVpcId")
self.UniqSubnetId = params.get("UniqSubnetId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class CreateDBInstanceResponse(AbstractModel):
"""CreateDBInstance返回参数结构体
"""
def __init__(self):
"""
:param DealId: 订单ID
:type DealId: str
:param InstanceIds: 创建的实例ID列表
:type InstanceIds: list of str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DealId = None
self.InstanceIds = None
self.RequestId = None
def _deserialize(self, params):
self.DealId = params.get("DealId")
self.InstanceIds = params.get("InstanceIds")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeClientConnectionsRequest(AbstractModel):
"""DescribeClientConnections请求参数结构体
"""
def __init__(self):
"""
:param InstanceId: 实例ID,格式如:cmgo-p8vnipr5。与云数据库控制台页面中显示的实例ID相同
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeClientConnectionsResponse(AbstractModel):
"""DescribeClientConnections返回参数结构体
"""
def __init__(self):
"""
:param Clients: 客户端连接信息,包括客户端IP和对应IP的连接数量
注意:此字段可能返回 null,表示取不到有效值。
:type Clients: list of ClientConnection
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Clients = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Clients") is not None:
self.Clients = []
for item in params.get("Clients"):
obj = ClientConnection()
obj._deserialize(item)
self.Clients.append(obj)
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeDBInstancesRequest(AbstractModel):
"""DescribeDBInstances请求参数结构体
"""
def __init__(self):
"""
:param InstanceIds: 实例ID列表,格式如:cmgo-p8vnipr5。与云数据库控制台页面中显示的实例ID相同
:type InstanceIds: list of str
:param InstanceType: 实例类型,取值范围:0-所有实例,1-正式实例,2-临时实例, 3-只读实例,-1-正式实例+只读+灾备实例
:type InstanceType: int
:param ClusterType: 集群类型,取值范围:0-副本集实例,1-分片实例,-1-所有实例
:type ClusterType: int
:param Status: 实例状态,取值范围:0-待初始化,1-流程执行中,2-实例有效,-2-实例已过期
:type Status: list of int
:param VpcId: 私有网络的ID,基础网络则不传该参数
:type VpcId: str
:param SubnetId: 私有网络的子网ID,基础网络则不传该参数。入参设置该参数的同时,必须设置相应的VpcId
:type SubnetId: str
:param PayMode: 付费类型,取值范围:0-按量计费,1-包年包月,-1-按量计费+包年包月
:type PayMode: int
:param Limit: 单次请求返回的数量,最小值为1,最大值为100,默认值为20
:type Limit: int
:param Offset: 偏移量,默认值为0
:type Offset: int
:param OrderBy: 返回结果集排序的字段,目前支持:"ProjectId", "InstanceName", "CreateTime",默认为升序排序
:type OrderBy: str
:param OrderByType: 返回结果集排序方式,目前支持:"ASC"或者"DESC"
:type OrderByType: str
"""
self.InstanceIds = None
self.InstanceType = None
self.ClusterType = None
self.Status = None
self.VpcId = None
self.SubnetId = None
self.PayMode = None
self.Limit = None
self.Offset = None
self.OrderBy = None
self.OrderByType = None
def _deserialize(self, params):
self.InstanceIds = params.get("InstanceIds")
self.InstanceType = params.get("InstanceType")
self.ClusterType = params.get("ClusterType")
self.Status = params.get("Status")
self.VpcId = params.get("VpcId")
self.SubnetId = params.get("SubnetId")
self.PayMode = params.get("PayMode")
self.Limit = params.get("Limit")
self.Offset = params.get("Offset")
self.OrderBy = params.get("OrderBy")
self.OrderByType = params.get("OrderByType")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeDBInstancesResponse(AbstractModel):
"""DescribeDBInstances返回参数结构体
"""
def __init__(self):
"""
:param TotalCount: 符合查询条件的实例总数
:type TotalCount: int
:param InstanceDetails: 实例详细信息
:type InstanceDetails: list of MongoDBInstanceDetail
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.InstanceDetails = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("InstanceDetails") is not None:
self.InstanceDetails = []
for item in params.get("InstanceDetails"):
obj = MongoDBInstanceDetail()
obj._deserialize(item)
self.InstanceDetails.append(obj)
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeSlowLogRequest(AbstractModel):
"""DescribeSlowLog请求参数结构体
"""
def __init__(self):
"""
:param InstanceId: 实例ID,格式如:cmgo-p8vnipr5。与云数据库控制台页面中显示的实例ID相同
:type InstanceId: str
:param StartTime: 慢日志起始时间,格式:yyyy-mm-dd hh:mm:ss,如:2019-06-01 10:00:00。查询起止时间间隔不能超过24小时,只允许查询最近7天内慢日志。
:type StartTime: str
:param EndTime: 慢日志终止时间,格式:yyyy-mm-dd hh:mm:ss,如:2019-06-02 12:00:00。查询起止时间间隔不能超过24小时,只允许查询最近7天内慢日志。
:type EndTime: str
:param SlowMS: 慢日志执行时间阈值,返回执行时间超过该阈值的慢日志,单位为毫秒(ms),最小为100毫秒。
:type SlowMS: int
:param Offset: 偏移量,最小值为0,最大值为10000,默认值为0。
:type Offset: int
:param Limit: 分页大小,最小值为1,最大值为100,默认值为20。
:type Limit: int
"""
self.InstanceId = None
self.StartTime = None
self.EndTime = None
self.SlowMS = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.SlowMS = params.get("SlowMS")
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeSlowLogResponse(AbstractModel):
"""DescribeSlowLog返回参数结构体
"""
def __init__(self):
"""
:param TotalCount: 符合查询条件的慢查询日志总数。
:type TotalCount: int
:param SlowLogList: 符合查询条件的慢查询日志详情。
:type SlowLogList: list of str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.SlowLogList = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
self.SlowLogList = params.get("SlowLogList")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeSpecInfoRequest(AbstractModel):
"""DescribeSpecInfo请求参数结构体
"""
def __init__(self):
"""
:param Zone: 可用区
:type Zone: str
"""
self.Zone = None
def _deserialize(self, params):
self.Zone = params.get("Zone")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeSpecInfoResponse(AbstractModel):
"""DescribeSpecInfo返回参数结构体
"""
def __init__(self):
"""
:param SpecInfoList: 实例售卖规格信息列表
:type SpecInfoList: list of SpecificationInfo
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.SpecInfoList = None
self.RequestId = None
def _deserialize(self, params):
if params.get("SpecInfoList") is not None:
self.SpecInfoList = []
for item in params.get("SpecInfoList"):
obj = SpecificationInfo()
obj._deserialize(item)
self.SpecInfoList.append(obj)
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class MongoDBInstance(AbstractModel):
"""实例信息
"""
def __init__(self):
"""
:param InstanceId: 实例ID
:type InstanceId: str
:param Region: 地域信息
:type Region: str
"""
self.InstanceId = None
self.Region = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Region = params.get("Region")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class MongoDBInstanceDetail(AbstractModel):
"""实例详情
"""
def __init__(self):
"""
:param InstanceId: 实例ID
:type InstanceId: str
:param InstanceName: 实例名称
:type InstanceName: str
:param PayMode: 付费类型,可能的返回值:1-包年包月;0-按量计费
:type PayMode: int
:param ProjectId: 项目ID
:type ProjectId: int
:param ClusterType: 集群类型,可能的返回值:0-副本集实例,1-分片实例,
:type ClusterType: int
:param Region: 地域信息
:type Region: str
:param Zone: 可用区信息
:type Zone: str
:param NetType: 网络类型,可能的返回值:0-基础网络,1-私有网络
:type NetType: int
:param VpcId: 私有网络的ID
:type VpcId: str
:param SubnetId: 私有网络的子网ID
:type SubnetId: str
:param Status: 实例状态,可能的返回值:0-待初始化,1-流程处理中,2-运行中,-2-实例已过期
:type Status: int
:param Vip: 实例IP
:type Vip: str
:param Vport: 端口号
:type Vport: int
:param CreateTime: 实例创建时间
:type CreateTime: str
:param DeadLine: 实例到期时间
:type DeadLine: str
:param MongoVersion: 实例版本信息
:type MongoVersion: str
:param Memory: 实例内存规格,单位为MB
:type Memory: int
:param Volume: 实例磁盘规格,单位为MB
:type Volume: int
:param CpuNum: 实例CPU核心数
:type CpuNum: int
:param MachineType: 实例机器类型
:type MachineType: str
:param SecondaryNum: 实例从节点数
:type SecondaryNum: int
:param ReplicationSetNum: 实例分片数
:type ReplicationSetNum: int
:param AutoRenewFlag: 实例自动续费标志,可能的返回值:0-手动续费,1-自动续费,2-确认不续费
:type AutoRenewFlag: int
:param UsedVolume: 已用容量,单位MB
:type UsedVolume: int
:param MaintenanceStart: 维护窗口起始时间
:type MaintenanceStart: str
:param MaintenanceEnd: 维护窗口结束时间
:type MaintenanceEnd: str
:param ReplicaSets: 分片信息
:type ReplicaSets: list of MongodbShardInfo
:param ReadonlyInstances: 只读实例信息
注意:此字段可能返回 null,表示取不到有效值。
:type ReadonlyInstances: list of MongoDBInstance
:param StandbyInstances: 灾备实例信息
注意:此字段可能返回 null,表示取不到有效值。
:type StandbyInstances: list of MongoDBInstance
:param CloneInstances: 临时实例信息
注意:此字段可能返回 null,表示取不到有效值。
:type CloneInstances: list of MongoDBInstance
:param RelatedInstance: 关联实例信息,对于正式实例,该字段表示它的临时实例信息;对于临时实例,则表示它的正式实例信息;如果为只读/灾备实例,则表示他的主实例信息
注意:此字段可能返回 null,表示取不到有效值。
:type RelatedInstance: :class:`tencentcloud.mongodb.v20180408.models.MongoDBInstance`
:param Tags: 实例标签信息集合
注意:此字段可能返回 null,表示取不到有效值。
:type Tags: list of TagInfo
:param InstanceVer: 实例标记
:type InstanceVer: int
:param ClusterVer: 实例标记
:type ClusterVer: int
:param Protocol: 协议信息,可能的返回值:1-mongodb,2-dynamodb
:type Protocol: int
:param InstanceType: 实例类型,可能的返回值,1-正式实例,2-临时实例,3-只读实例,4-灾备实例
:type InstanceType: int
:param InstanceStatusDesc: 实例状态描述
:type InstanceStatusDesc: str
:param RealInstanceId: 实例对应的物理实例ID,回档并替换过的实例有不同的InstanceId和RealInstanceId,从barad获取监控数据等场景下需要用物理id获取
:type RealInstanceId: str
"""
self.InstanceId = None
self.InstanceName = None
self.PayMode = None
self.ProjectId = None
self.ClusterType = None
self.Region = None
self.Zone = None
self.NetType = None
self.VpcId = None
self.SubnetId = None
self.Status = None
self.Vip = None
self.Vport = None
self.CreateTime = None
self.DeadLine = None
self.MongoVersion = None
self.Memory = None
self.Volume = None
self.CpuNum = None
self.MachineType = None
self.SecondaryNum = None
self.ReplicationSetNum = None
self.AutoRenewFlag = None
self.UsedVolume = None
self.MaintenanceStart = None
self.MaintenanceEnd = None
self.ReplicaSets = None
self.ReadonlyInstances = None
self.StandbyInstances = None
self.CloneInstances = None
self.RelatedInstance = None
self.Tags = None
self.InstanceVer = None
self.ClusterVer = None
self.Protocol = None
self.InstanceType = None
self.InstanceStatusDesc = None
self.RealInstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.InstanceName = params.get("InstanceName")
self.PayMode = params.get("PayMode")
self.ProjectId = params.get("ProjectId")
self.ClusterType = params.get("ClusterType")
self.Region = params.get("Region")
self.Zone = params.get("Zone")
self.NetType = params.get("NetType")
self.VpcId = params.get("VpcId")
self.SubnetId = params.get("SubnetId")
self.Status = params.get("Status")
self.Vip = params.get("Vip")
self.Vport = params.get("Vport")
self.CreateTime = params.get("CreateTime")
self.DeadLine = params.get("DeadLine")
self.MongoVersion = params.get("MongoVersion")
self.Memory = params.get("Memory")
self.Volume = params.get("Volume")
self.CpuNum = params.get("CpuNum")
self.MachineType = params.get("MachineType")
self.SecondaryNum = params.get("SecondaryNum")
self.ReplicationSetNum = params.get("ReplicationSetNum")
self.AutoRenewFlag = params.get("AutoRenewFlag")
self.UsedVolume = params.get("UsedVolume")
self.MaintenanceStart = params.get("MaintenanceStart")
self.MaintenanceEnd = params.get("MaintenanceEnd")
if params.get("ReplicaSets") is not None:
self.ReplicaSets = []
for item in params.get("ReplicaSets"):
obj = MongodbShardInfo()
obj._deserialize(item)
self.ReplicaSets.append(obj)
if params.get("ReadonlyInstances") is not None:
self.ReadonlyInstances = []
for item in params.get("ReadonlyInstances"):
obj = MongoDBInstance()
obj._deserialize(item)
self.ReadonlyInstances.append(obj)
if params.get("StandbyInstances") is not None:
self.StandbyInstances = []
for item in params.get("StandbyInstances"):
obj = MongoDBInstance()
obj._deserialize(item)
self.StandbyInstances.append(obj)
if params.get("CloneInstances") is not None:
self.CloneInstances = []
for item in params.get("CloneInstances"):
obj = MongoDBInstance()
obj._deserialize(item)
self.CloneInstances.append(obj)
if params.get("RelatedInstance") is not None:
self.RelatedInstance = MongoDBInstance()
self.RelatedInstance._deserialize(params.get("RelatedInstance"))
if params.get("Tags") is not None:
self.Tags = []
for item in params.get("Tags"):
obj = TagInfo()
obj._deserialize(item)
self.Tags.append(obj)
self.InstanceVer = params.get("InstanceVer")
self.ClusterVer = params.get("ClusterVer")
self.Protocol = params.get("Protocol")
self.InstanceType = params.get("InstanceType")
self.InstanceStatusDesc = params.get("InstanceStatusDesc")
self.RealInstanceId = params.get("RealInstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class MongodbShardInfo(AbstractModel):
"""实例分片详情
"""
def __init__(self):
"""
:param UsedVolume: 分片已使用容量
:type UsedVolume: float
:param ReplicaSetId: 分片ID
:type ReplicaSetId: str
:param ReplicaSetName: 分片名
:type ReplicaSetName: str
:param Memory: 分片内存规格,单位为MB
:type Memory: int
:param Volume: 分片磁盘规格,单位为MB
:type Volume: int
:param OplogSize: 分片Oplog大小,单位为MB
:type OplogSize: int
:param SecondaryNum: 分片从节点数
:type SecondaryNum: int
:param RealReplicaSetId: 分片物理ID
:type RealReplicaSetId: str
"""
self.UsedVolume = None
self.ReplicaSetId = None
self.ReplicaSetName = None
self.Memory = None
self.Volume = None
self.OplogSize = None
self.SecondaryNum = None
self.RealReplicaSetId = None
def _deserialize(self, params):
self.UsedVolume = params.get("UsedVolume")
self.ReplicaSetId = params.get("ReplicaSetId")
self.ReplicaSetName = params.get("ReplicaSetName")
self.Memory = params.get("Memory")
self.Volume = params.get("Volume")
self.OplogSize = params.get("OplogSize")
self.SecondaryNum = params.get("SecondaryNum")
self.RealReplicaSetId = params.get("RealReplicaSetId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class RenameInstanceRequest(AbstractModel):
"""RenameInstance请求参数结构体
"""
def __init__(self):
"""
:param InstanceId: 实例ID,格式如:cmgo-p8vnipr5。与云数据库控制台页面中显示的实例ID相同
:type InstanceId: str
:param NewName: 实例名称
:type NewName: str
"""
self.InstanceId = None
self.NewName = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.NewName = params.get("NewName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class RenameInstanceResponse(AbstractModel):
"""RenameInstance返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class SetAutoRenewRequest(AbstractModel):
"""SetAutoRenew请求参数结构体
"""
def __init__(self):
"""
:param InstanceIds: 实例ID列表,格式如:cmgo-p8vnipr5。与云数据库控制台页面中显示的实例ID相同
:type InstanceIds: list of str
:param AutoRenewFlag: 续费选项,取值范围:0-手动续费,1-自动续费,2-确认不续费
:type AutoRenewFlag: int
"""
self.InstanceIds = None
self.AutoRenewFlag = None
def _deserialize(self, params):
self.InstanceIds = params.get("InstanceIds")
self.AutoRenewFlag = params.get("AutoRenewFlag")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class SetAutoRenewResponse(AbstractModel):
"""SetAutoRenew返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class SetPasswordRequest(AbstractModel):
"""SetPassword请求参数结构体
"""
def __init__(self):
"""
:param InstanceId: 实例ID,格式如:cmgo-p8vnipr5。与云数据库控制台页面中显示的实例ID相同
:type InstanceId: str
:param UserName: 实例账户名称
:type UserName: str
:param Password: 实例新密码,至少包含字母、数字和字符(!@#%^*())中的两种,长度为8-16个字符
:type Password: str
"""
self.InstanceId = None
self.UserName = None
self.Password = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.UserName = params.get("UserName")
self.Password = params.get("Password")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class SetPasswordResponse(AbstractModel):
"""SetPassword返回参数结构体
"""
def __init__(self):
"""
:param FlowId: 返回的异步任务ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class SpecItem(AbstractModel):
"""mongodb售卖规格
"""
def __init__(self):
"""
:param SpecCode: 规格信息标识
:type SpecCode: str
:param Status: 规格有效标志,取值:0-停止售卖,1-开放售卖
:type Status: int
:param MachineType: 机器类型,取值:0-HIO,4-HIO10G
:type MachineType: str
:param Cpu: cpu核心数
:type Cpu: int
:param Memory: 内存规格,单位为MB
:type Memory: int
:param DefaultStorage: 默认磁盘规格,单位MB
:type DefaultStorage: int
:param MaxStorage: 最大磁盘规格,单位MB
:type MaxStorage: int
:param MinStorage: 最小磁盘规格,单位MB
:type MinStorage: int
:param Qps: 可承载qps信息
:type Qps: int
:param Conns: 连接数限制
:type Conns: int
:param MongoVersionCode: 实例mongodb版本信息
:type MongoVersionCode: str
:param MongoVersionValue: 实例mongodb版本号
:type MongoVersionValue: int
:param Version: 实例mongodb版本号(短)
:type Version: str
:param EngineName: 存储引擎
:type EngineName: str
:param ClusterType: 集群类型,取值:1-分片集群,0-副本集集群
:type ClusterType: int
:param MinNodeNum: 最小副本集从节点数
:type MinNodeNum: int
:param MaxNodeNum: 最大副本集从节点数
:type MaxNodeNum: int
:param MinReplicateSetNum: 最小分片数
:type MinReplicateSetNum: int
:param MaxReplicateSetNum: 最大分片数
:type MaxReplicateSetNum: int
:param MinReplicateSetNodeNum: 最小分片从节点数
:type MinReplicateSetNodeNum: int
:param MaxReplicateSetNodeNum: 最大分片从节点数
:type MaxReplicateSetNodeNum: int
"""
self.SpecCode = None
self.Status = None
self.MachineType = None
self.Cpu = None
self.Memory = None
self.DefaultStorage = None
self.MaxStorage = None
self.MinStorage = None
self.Qps = None
self.Conns = None
self.MongoVersionCode = None
self.MongoVersionValue = None
self.Version = None
self.EngineName = None
self.ClusterType = None
self.MinNodeNum = None
self.MaxNodeNum = None
self.MinReplicateSetNum = None
self.MaxReplicateSetNum = None
self.MinReplicateSetNodeNum = None
self.MaxReplicateSetNodeNum = None
def _deserialize(self, params):
self.SpecCode = params.get("SpecCode")
self.Status = params.get("Status")
self.MachineType = params.get("MachineType")
self.Cpu = params.get("Cpu")
self.Memory = params.get("Memory")
self.DefaultStorage = params.get("DefaultStorage")
self.MaxStorage = params.get("MaxStorage")
self.MinStorage = params.get("MinStorage")
self.Qps = params.get("Qps")
self.Conns = params.get("Conns")
self.MongoVersionCode = params.get("MongoVersionCode")
self.MongoVersionValue = params.get("MongoVersionValue")
self.Version = params.get("Version")
self.EngineName = params.get("EngineName")
self.ClusterType = params.get("ClusterType")
self.MinNodeNum = params.get("MinNodeNum")
self.MaxNodeNum = params.get("MaxNodeNum")
self.MinReplicateSetNum = params.get("MinReplicateSetNum")
self.MaxReplicateSetNum = params.get("MaxReplicateSetNum")
self.MinReplicateSetNodeNum = params.get("MinReplicateSetNodeNum")
self.MaxReplicateSetNodeNum = params.get("MaxReplicateSetNodeNum")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class SpecificationInfo(AbstractModel):
"""实例规格信息
"""
def __init__(self):
"""
:param Region: 地域信息
:type Region: str
:param Zone: 可用区信息
:type Zone: str
:param SpecItems: 售卖规格信息
:type SpecItems: list of SpecItem
"""
self.Region = None
self.Zone = None
self.SpecItems = None
def _deserialize(self, params):
self.Region = params.get("Region")
self.Zone = params.get("Zone")
if params.get("SpecItems") is not None:
self.SpecItems = []
for item in params.get("SpecItems"):
obj = SpecItem()
obj._deserialize(item)
self.SpecItems.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class TagInfo(AbstractModel):
"""实例标签信息
"""
def __init__(self):
"""
:param TagKey: 标签Key值
:type TagKey: str
:param TagValue: 标签值
:type TagValue: str
"""
self.TagKey = None
self.TagValue = None
def _deserialize(self, params):
self.TagKey = params.get("TagKey")
self.TagValue = params.get("TagValue")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class TerminateDBInstanceRequest(AbstractModel):
"""TerminateDBInstance请求参数结构体
"""
def __init__(self):
"""
:param InstanceId: 实例ID,格式如:cmgo-p8vnipr5。
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class TerminateDBInstanceResponse(AbstractModel):
"""TerminateDBInstance返回参数结构体
"""
def __init__(self):
"""
:param AsyncRequestId: 订单ID,表示注销实例成功
:type AsyncRequestId: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.AsyncRequestId = None
self.RequestId = None
def _deserialize(self, params):
self.AsyncRequestId = params.get("AsyncRequestId")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class UpgradeDBInstanceHourRequest(AbstractModel):
"""UpgradeDBInstanceHour请求参数结构体
"""
def __init__(self):
"""
:param InstanceId: 实例ID,格式如:cmgo-p8vnipr5
:type InstanceId: str
:param Memory: 升级后的内存大小,单位:GB
:type Memory: int
:param Volume: 升级后的硬盘大小,单位:GB
:type Volume: int
:param OplogSize: 升级后oplog的大小,单位:GB,默认为磁盘空间的10%,允许设置的最小值为磁盘的10%,最大值为磁盘的90%
:type OplogSize: int
"""
self.InstanceId = None
self.Memory = None
self.Volume = None
self.OplogSize = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Memory = params.get("Memory")
self.Volume = params.get("Volume")
self.OplogSize = params.get("OplogSize")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class UpgradeDBInstanceHourResponse(AbstractModel):
"""UpgradeDBInstanceHour返回参数结构体
"""
def __init__(self):
"""
:param DealId: 订单ID
:type DealId: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DealId = None
self.RequestId = None
def _deserialize(self, params):
self.DealId = params.get("DealId")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class UpgradeDBInstanceRequest(AbstractModel):
"""UpgradeDBInstance请求参数结构体
"""
def __init__(self):
"""
:param InstanceId: 实例ID,格式如:cmgo-p8vnipr5。与云数据库控制台页面中显示的实例ID相同
:type InstanceId: str
:param Memory: 升级后的内存大小,单位:GB
:type Memory: int
:param Volume: 升级后的硬盘大小,单位:GB
:type Volume: int
:param OplogSize: 升级后oplog的大小,单位:GB,默认为磁盘空间的10%,允许设置的最小值为磁盘的10%,最大值为磁盘的90%
:type OplogSize: int
"""
self.InstanceId = None
self.Memory = None
self.Volume = None
self.OplogSize = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Memory = params.get("Memory")
self.Volume = params.get("Volume")
self.OplogSize = params.get("OplogSize")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class UpgradeDBInstanceResponse(AbstractModel):
"""UpgradeDBInstance返回参数结构体
"""
def __init__(self):
"""
:param DealId: 订单ID
:type DealId: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DealId = None
self.RequestId = None
def _deserialize(self, params):
self.DealId = params.get("DealId")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
| 32.031272
| 110
| 0.600071
|
import warnings
from tencentcloud.common.abstract_model import AbstractModel
class AssignProjectRequest(AbstractModel):
def __init__(self):
self.InstanceIds = None
self.ProjectId = None
def _deserialize(self, params):
self.InstanceIds = params.get("InstanceIds")
self.ProjectId = params.get("ProjectId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class AssignProjectResponse(AbstractModel):
def __init__(self):
self.FlowIds = None
self.RequestId = None
def _deserialize(self, params):
self.FlowIds = params.get("FlowIds")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class ClientConnection(AbstractModel):
def __init__(self):
self.IP = None
self.Count = None
def _deserialize(self, params):
self.IP = params.get("IP")
self.Count = params.get("Count")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class CreateDBInstanceHourRequest(AbstractModel):
def __init__(self):
self.Memory = None
self.Volume = None
self.ReplicateSetNum = None
self.SecondaryNum = None
self.EngineVersion = None
self.Machine = None
self.GoodsNum = None
self.Zone = None
self.InstanceRole = None
self.InstanceType = None
self.Encrypt = None
self.VpcId = None
self.SubnetId = None
self.ProjectId = None
self.SecurityGroup = None
def _deserialize(self, params):
self.Memory = params.get("Memory")
self.Volume = params.get("Volume")
self.ReplicateSetNum = params.get("ReplicateSetNum")
self.SecondaryNum = params.get("SecondaryNum")
self.EngineVersion = params.get("EngineVersion")
self.Machine = params.get("Machine")
self.GoodsNum = params.get("GoodsNum")
self.Zone = params.get("Zone")
self.InstanceRole = params.get("InstanceRole")
self.InstanceType = params.get("InstanceType")
self.Encrypt = params.get("Encrypt")
self.VpcId = params.get("VpcId")
self.SubnetId = params.get("SubnetId")
self.ProjectId = params.get("ProjectId")
self.SecurityGroup = params.get("SecurityGroup")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class CreateDBInstanceHourResponse(AbstractModel):
def __init__(self):
self.DealId = None
self.InstanceIds = None
self.RequestId = None
def _deserialize(self, params):
self.DealId = params.get("DealId")
self.InstanceIds = params.get("InstanceIds")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class CreateDBInstanceRequest(AbstractModel):
def __init__(self):
self.SecondaryNum = None
self.Memory = None
self.Volume = None
self.MongoVersion = None
self.MachineCode = None
self.GoodsNum = None
self.Zone = None
self.TimeSpan = None
self.Password = None
self.ProjectId = None
self.SecurityGroup = None
self.UniqVpcId = None
self.UniqSubnetId = None
def _deserialize(self, params):
self.SecondaryNum = params.get("SecondaryNum")
self.Memory = params.get("Memory")
self.Volume = params.get("Volume")
self.MongoVersion = params.get("MongoVersion")
self.MachineCode = params.get("MachineCode")
self.GoodsNum = params.get("GoodsNum")
self.Zone = params.get("Zone")
self.TimeSpan = params.get("TimeSpan")
self.Password = params.get("Password")
self.ProjectId = params.get("ProjectId")
self.SecurityGroup = params.get("SecurityGroup")
self.UniqVpcId = params.get("UniqVpcId")
self.UniqSubnetId = params.get("UniqSubnetId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class CreateDBInstanceResponse(AbstractModel):
def __init__(self):
self.DealId = None
self.InstanceIds = None
self.RequestId = None
def _deserialize(self, params):
self.DealId = params.get("DealId")
self.InstanceIds = params.get("InstanceIds")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeClientConnectionsRequest(AbstractModel):
def __init__(self):
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeClientConnectionsResponse(AbstractModel):
def __init__(self):
self.Clients = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Clients") is not None:
self.Clients = []
for item in params.get("Clients"):
obj = ClientConnection()
obj._deserialize(item)
self.Clients.append(obj)
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeDBInstancesRequest(AbstractModel):
def __init__(self):
self.InstanceIds = None
self.InstanceType = None
self.ClusterType = None
self.Status = None
self.VpcId = None
self.SubnetId = None
self.PayMode = None
self.Limit = None
self.Offset = None
self.OrderBy = None
self.OrderByType = None
def _deserialize(self, params):
self.InstanceIds = params.get("InstanceIds")
self.InstanceType = params.get("InstanceType")
self.ClusterType = params.get("ClusterType")
self.Status = params.get("Status")
self.VpcId = params.get("VpcId")
self.SubnetId = params.get("SubnetId")
self.PayMode = params.get("PayMode")
self.Limit = params.get("Limit")
self.Offset = params.get("Offset")
self.OrderBy = params.get("OrderBy")
self.OrderByType = params.get("OrderByType")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeDBInstancesResponse(AbstractModel):
def __init__(self):
self.TotalCount = None
self.InstanceDetails = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("InstanceDetails") is not None:
self.InstanceDetails = []
for item in params.get("InstanceDetails"):
obj = MongoDBInstanceDetail()
obj._deserialize(item)
self.InstanceDetails.append(obj)
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeSlowLogRequest(AbstractModel):
def __init__(self):
self.InstanceId = None
self.StartTime = None
self.EndTime = None
self.SlowMS = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.SlowMS = params.get("SlowMS")
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeSlowLogResponse(AbstractModel):
def __init__(self):
self.TotalCount = None
self.SlowLogList = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
self.SlowLogList = params.get("SlowLogList")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeSpecInfoRequest(AbstractModel):
def __init__(self):
self.Zone = None
def _deserialize(self, params):
self.Zone = params.get("Zone")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class DescribeSpecInfoResponse(AbstractModel):
def __init__(self):
self.SpecInfoList = None
self.RequestId = None
def _deserialize(self, params):
if params.get("SpecInfoList") is not None:
self.SpecInfoList = []
for item in params.get("SpecInfoList"):
obj = SpecificationInfo()
obj._deserialize(item)
self.SpecInfoList.append(obj)
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class MongoDBInstance(AbstractModel):
def __init__(self):
self.InstanceId = None
self.Region = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Region = params.get("Region")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class MongoDBInstanceDetail(AbstractModel):
def __init__(self):
self.InstanceId = None
self.InstanceName = None
self.PayMode = None
self.ProjectId = None
self.ClusterType = None
self.Region = None
self.Zone = None
self.NetType = None
self.VpcId = None
self.SubnetId = None
self.Status = None
self.Vip = None
self.Vport = None
self.CreateTime = None
self.DeadLine = None
self.MongoVersion = None
self.Memory = None
self.Volume = None
self.CpuNum = None
self.MachineType = None
self.SecondaryNum = None
self.ReplicationSetNum = None
self.AutoRenewFlag = None
self.UsedVolume = None
self.MaintenanceStart = None
self.MaintenanceEnd = None
self.ReplicaSets = None
self.ReadonlyInstances = None
self.StandbyInstances = None
self.CloneInstances = None
self.RelatedInstance = None
self.Tags = None
self.InstanceVer = None
self.ClusterVer = None
self.Protocol = None
self.InstanceType = None
self.InstanceStatusDesc = None
self.RealInstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.InstanceName = params.get("InstanceName")
self.PayMode = params.get("PayMode")
self.ProjectId = params.get("ProjectId")
self.ClusterType = params.get("ClusterType")
self.Region = params.get("Region")
self.Zone = params.get("Zone")
self.NetType = params.get("NetType")
self.VpcId = params.get("VpcId")
self.SubnetId = params.get("SubnetId")
self.Status = params.get("Status")
self.Vip = params.get("Vip")
self.Vport = params.get("Vport")
self.CreateTime = params.get("CreateTime")
self.DeadLine = params.get("DeadLine")
self.MongoVersion = params.get("MongoVersion")
self.Memory = params.get("Memory")
self.Volume = params.get("Volume")
self.CpuNum = params.get("CpuNum")
self.MachineType = params.get("MachineType")
self.SecondaryNum = params.get("SecondaryNum")
self.ReplicationSetNum = params.get("ReplicationSetNum")
self.AutoRenewFlag = params.get("AutoRenewFlag")
self.UsedVolume = params.get("UsedVolume")
self.MaintenanceStart = params.get("MaintenanceStart")
self.MaintenanceEnd = params.get("MaintenanceEnd")
if params.get("ReplicaSets") is not None:
self.ReplicaSets = []
for item in params.get("ReplicaSets"):
obj = MongodbShardInfo()
obj._deserialize(item)
self.ReplicaSets.append(obj)
if params.get("ReadonlyInstances") is not None:
self.ReadonlyInstances = []
for item in params.get("ReadonlyInstances"):
obj = MongoDBInstance()
obj._deserialize(item)
self.ReadonlyInstances.append(obj)
if params.get("StandbyInstances") is not None:
self.StandbyInstances = []
for item in params.get("StandbyInstances"):
obj = MongoDBInstance()
obj._deserialize(item)
self.StandbyInstances.append(obj)
if params.get("CloneInstances") is not None:
self.CloneInstances = []
for item in params.get("CloneInstances"):
obj = MongoDBInstance()
obj._deserialize(item)
self.CloneInstances.append(obj)
if params.get("RelatedInstance") is not None:
self.RelatedInstance = MongoDBInstance()
self.RelatedInstance._deserialize(params.get("RelatedInstance"))
if params.get("Tags") is not None:
self.Tags = []
for item in params.get("Tags"):
obj = TagInfo()
obj._deserialize(item)
self.Tags.append(obj)
self.InstanceVer = params.get("InstanceVer")
self.ClusterVer = params.get("ClusterVer")
self.Protocol = params.get("Protocol")
self.InstanceType = params.get("InstanceType")
self.InstanceStatusDesc = params.get("InstanceStatusDesc")
self.RealInstanceId = params.get("RealInstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class MongodbShardInfo(AbstractModel):
def __init__(self):
self.UsedVolume = None
self.ReplicaSetId = None
self.ReplicaSetName = None
self.Memory = None
self.Volume = None
self.OplogSize = None
self.SecondaryNum = None
self.RealReplicaSetId = None
def _deserialize(self, params):
self.UsedVolume = params.get("UsedVolume")
self.ReplicaSetId = params.get("ReplicaSetId")
self.ReplicaSetName = params.get("ReplicaSetName")
self.Memory = params.get("Memory")
self.Volume = params.get("Volume")
self.OplogSize = params.get("OplogSize")
self.SecondaryNum = params.get("SecondaryNum")
self.RealReplicaSetId = params.get("RealReplicaSetId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class RenameInstanceRequest(AbstractModel):
def __init__(self):
self.InstanceId = None
self.NewName = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.NewName = params.get("NewName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class RenameInstanceResponse(AbstractModel):
def __init__(self):
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class SetAutoRenewRequest(AbstractModel):
def __init__(self):
self.InstanceIds = None
self.AutoRenewFlag = None
def _deserialize(self, params):
self.InstanceIds = params.get("InstanceIds")
self.AutoRenewFlag = params.get("AutoRenewFlag")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class SetAutoRenewResponse(AbstractModel):
def __init__(self):
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class SetPasswordRequest(AbstractModel):
def __init__(self):
self.InstanceId = None
self.UserName = None
self.Password = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.UserName = params.get("UserName")
self.Password = params.get("Password")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class SetPasswordResponse(AbstractModel):
def __init__(self):
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class SpecItem(AbstractModel):
def __init__(self):
self.SpecCode = None
self.Status = None
self.MachineType = None
self.Cpu = None
self.Memory = None
self.DefaultStorage = None
self.MaxStorage = None
self.MinStorage = None
self.Qps = None
self.Conns = None
self.MongoVersionCode = None
self.MongoVersionValue = None
self.Version = None
self.EngineName = None
self.ClusterType = None
self.MinNodeNum = None
self.MaxNodeNum = None
self.MinReplicateSetNum = None
self.MaxReplicateSetNum = None
self.MinReplicateSetNodeNum = None
self.MaxReplicateSetNodeNum = None
def _deserialize(self, params):
self.SpecCode = params.get("SpecCode")
self.Status = params.get("Status")
self.MachineType = params.get("MachineType")
self.Cpu = params.get("Cpu")
self.Memory = params.get("Memory")
self.DefaultStorage = params.get("DefaultStorage")
self.MaxStorage = params.get("MaxStorage")
self.MinStorage = params.get("MinStorage")
self.Qps = params.get("Qps")
self.Conns = params.get("Conns")
self.MongoVersionCode = params.get("MongoVersionCode")
self.MongoVersionValue = params.get("MongoVersionValue")
self.Version = params.get("Version")
self.EngineName = params.get("EngineName")
self.ClusterType = params.get("ClusterType")
self.MinNodeNum = params.get("MinNodeNum")
self.MaxNodeNum = params.get("MaxNodeNum")
self.MinReplicateSetNum = params.get("MinReplicateSetNum")
self.MaxReplicateSetNum = params.get("MaxReplicateSetNum")
self.MinReplicateSetNodeNum = params.get("MinReplicateSetNodeNum")
self.MaxReplicateSetNodeNum = params.get("MaxReplicateSetNodeNum")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class SpecificationInfo(AbstractModel):
def __init__(self):
self.Region = None
self.Zone = None
self.SpecItems = None
def _deserialize(self, params):
self.Region = params.get("Region")
self.Zone = params.get("Zone")
if params.get("SpecItems") is not None:
self.SpecItems = []
for item in params.get("SpecItems"):
obj = SpecItem()
obj._deserialize(item)
self.SpecItems.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class TagInfo(AbstractModel):
def __init__(self):
self.TagKey = None
self.TagValue = None
def _deserialize(self, params):
self.TagKey = params.get("TagKey")
self.TagValue = params.get("TagValue")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class TerminateDBInstanceRequest(AbstractModel):
def __init__(self):
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class TerminateDBInstanceResponse(AbstractModel):
def __init__(self):
self.AsyncRequestId = None
self.RequestId = None
def _deserialize(self, params):
self.AsyncRequestId = params.get("AsyncRequestId")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class UpgradeDBInstanceHourRequest(AbstractModel):
def __init__(self):
self.InstanceId = None
self.Memory = None
self.Volume = None
self.OplogSize = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Memory = params.get("Memory")
self.Volume = params.get("Volume")
self.OplogSize = params.get("OplogSize")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class UpgradeDBInstanceHourResponse(AbstractModel):
def __init__(self):
self.DealId = None
self.RequestId = None
def _deserialize(self, params):
self.DealId = params.get("DealId")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class UpgradeDBInstanceRequest(AbstractModel):
def __init__(self):
self.InstanceId = None
self.Memory = None
self.Volume = None
self.OplogSize = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Memory = params.get("Memory")
self.Volume = params.get("Volume")
self.OplogSize = params.get("OplogSize")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class UpgradeDBInstanceResponse(AbstractModel):
def __init__(self):
self.DealId = None
self.RequestId = None
def _deserialize(self, params):
self.DealId = params.get("DealId")
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
| true
| true
|
79062f5522c6299ac2fe1bfd30769fdb10226e88
| 392
|
py
|
Python
|
recipes/gflags/all/test_package/conanfile.py
|
balintfodor/conan-center-index
|
9afc15a805f8ab323a312327c68c80f81a6ceade
|
[
"MIT"
] | 3
|
2020-04-16T15:01:33.000Z
|
2022-01-13T08:05:47.000Z
|
recipes/gflags/all/test_package/conanfile.py
|
balintfodor/conan-center-index
|
9afc15a805f8ab323a312327c68c80f81a6ceade
|
[
"MIT"
] | 33
|
2020-02-18T15:54:50.000Z
|
2022-03-28T08:54:10.000Z
|
recipes/lyra/all/test_package/conanfile.py
|
GavinNL/conan-center-index
|
0ae829a362c1cc6a20d97e023ca0aafc805797c3
|
[
"MIT"
] | 8
|
2020-03-06T14:38:18.000Z
|
2022-03-28T08:41:15.000Z
|
from conans import ConanFile, CMake, tools
import os
class TestPackageConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
generators = "cmake"
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def test(self):
bin_path = os.path.join("bin", "test_package")
self.run(bin_path, run_environment=True)
| 23.058824
| 54
| 0.632653
|
from conans import ConanFile, CMake, tools
import os
class TestPackageConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
generators = "cmake"
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def test(self):
bin_path = os.path.join("bin", "test_package")
self.run(bin_path, run_environment=True)
| true
| true
|
79062f6182eb8091b6a54c4c22672d53706fb058
| 3,466
|
py
|
Python
|
hawkmoth/__init__.py
|
mv0/hawkmoth
|
81424935ede79fd8836b8b18f6ad6210fd3c0d38
|
[
"BSD-2-Clause"
] | null | null | null |
hawkmoth/__init__.py
|
mv0/hawkmoth
|
81424935ede79fd8836b8b18f6ad6210fd3c0d38
|
[
"BSD-2-Clause"
] | null | null | null |
hawkmoth/__init__.py
|
mv0/hawkmoth
|
81424935ede79fd8836b8b18f6ad6210fd3c0d38
|
[
"BSD-2-Clause"
] | null | null | null |
# Copyright (c) 2016-2017, Jani Nikula <jani@nikula.org>
# Licensed under the terms of BSD 2-Clause, see LICENSE for details.
"""
Hawkmoth
========
Sphinx C Domain autodoc directive extension.
"""
import glob
import os
import re
import stat
import subprocess
import sys
from docutils import nodes, statemachine
from docutils.parsers.rst import directives, Directive
from docutils.statemachine import ViewList
from sphinx.ext.autodoc import AutodocReporter
from sphinx.util.nodes import nested_parse_with_titles
from sphinx.util.docutils import switch_source_input
from hawkmoth.parser import parse
with open(os.path.join(os.path.abspath(os.path.dirname(__file__)),
'VERSION')) as version_file:
__version__ = version_file.read().strip()
class CAutoDocDirective(Directive):
"""Extract all documentation comments from the specified file"""
required_argument = 1
optional_arguments = 1
# Allow passing a variable number of file patterns as arguments
final_argument_whitespace = True
option_spec = {
'compat': directives.unchanged_required,
'clang': directives.unchanged_required,
}
has_content = False
def __parse(self, viewlist, filename):
env = self.state.document.settings.env
compat = self.options.get('compat', env.config.cautodoc_compat)
clang = self.options.get('clang', env.config.cautodoc_clang)
comments = parse(filename, compat=compat, clang=clang)
for (comment, meta) in comments:
lineoffset = meta['line'] - 1
lines = statemachine.string2lines(comment, 8,
convert_whitespace=True)
for line in lines:
viewlist.append(line, filename, lineoffset)
lineoffset += 1
def run(self):
env = self.state.document.settings.env
result = ViewList()
for pattern in self.arguments[0].split():
filenames = glob.glob(env.config.cautodoc_root + '/' + pattern)
if len(filenames) == 0:
fmt = 'Pattern "{pat}" does not match any files.'
env.app.warn(fmt.format(pat=pattern),
location=(env.docname, self.lineno))
continue
for filename in filenames:
mode = os.stat(filename).st_mode
if stat.S_ISDIR(mode):
fmt = 'Path "{name}" matching pattern "{pat}" is a directory.'
env.app.warn(fmt.format(name=filename, pat=pattern),
location=(env.docname, self.lineno))
continue
# Tell Sphinx about the dependency and parse the file
env.note_dependency(os.path.abspath(filename))
self.__parse(result, filename)
# Parse the extracted reST
with switch_source_input(self.state, result):
node = nodes.section()
nested_parse_with_titles(self.state, result, node)
return node.children
def setup(app):
app.require_sphinx('1.8')
app.add_config_value('cautodoc_root', app.confdir, 'env')
app.add_config_value('cautodoc_compat', None, 'env')
app.add_config_value('cautodoc_clang', None, 'env')
app.add_directive_to_domain('c', 'autodoc', CAutoDocDirective)
return dict(version = __version__,
parallel_read_safe = True, parallel_write_safe = True)
| 34.316832
| 82
| 0.634737
|
import glob
import os
import re
import stat
import subprocess
import sys
from docutils import nodes, statemachine
from docutils.parsers.rst import directives, Directive
from docutils.statemachine import ViewList
from sphinx.ext.autodoc import AutodocReporter
from sphinx.util.nodes import nested_parse_with_titles
from sphinx.util.docutils import switch_source_input
from hawkmoth.parser import parse
with open(os.path.join(os.path.abspath(os.path.dirname(__file__)),
'VERSION')) as version_file:
__version__ = version_file.read().strip()
class CAutoDocDirective(Directive):
required_argument = 1
optional_arguments = 1
final_argument_whitespace = True
option_spec = {
'compat': directives.unchanged_required,
'clang': directives.unchanged_required,
}
has_content = False
def __parse(self, viewlist, filename):
env = self.state.document.settings.env
compat = self.options.get('compat', env.config.cautodoc_compat)
clang = self.options.get('clang', env.config.cautodoc_clang)
comments = parse(filename, compat=compat, clang=clang)
for (comment, meta) in comments:
lineoffset = meta['line'] - 1
lines = statemachine.string2lines(comment, 8,
convert_whitespace=True)
for line in lines:
viewlist.append(line, filename, lineoffset)
lineoffset += 1
def run(self):
env = self.state.document.settings.env
result = ViewList()
for pattern in self.arguments[0].split():
filenames = glob.glob(env.config.cautodoc_root + '/' + pattern)
if len(filenames) == 0:
fmt = 'Pattern "{pat}" does not match any files.'
env.app.warn(fmt.format(pat=pattern),
location=(env.docname, self.lineno))
continue
for filename in filenames:
mode = os.stat(filename).st_mode
if stat.S_ISDIR(mode):
fmt = 'Path "{name}" matching pattern "{pat}" is a directory.'
env.app.warn(fmt.format(name=filename, pat=pattern),
location=(env.docname, self.lineno))
continue
env.note_dependency(os.path.abspath(filename))
self.__parse(result, filename)
with switch_source_input(self.state, result):
node = nodes.section()
nested_parse_with_titles(self.state, result, node)
return node.children
def setup(app):
app.require_sphinx('1.8')
app.add_config_value('cautodoc_root', app.confdir, 'env')
app.add_config_value('cautodoc_compat', None, 'env')
app.add_config_value('cautodoc_clang', None, 'env')
app.add_directive_to_domain('c', 'autodoc', CAutoDocDirective)
return dict(version = __version__,
parallel_read_safe = True, parallel_write_safe = True)
| true
| true
|
7906312da098eb0cd514ed73552105109d16c5c4
| 5,268
|
py
|
Python
|
src/lib/trains/mot.py
|
CaptainEven/FairMOTVehicle
|
1d9033bcab9723cc5be3d5d94a5ac3712e8e143e
|
[
"MIT"
] | 152
|
2020-06-11T08:27:32.000Z
|
2022-03-26T16:35:55.000Z
|
src/lib/trains/mot.py
|
christw16/FairMOTVehicle
|
1d9033bcab9723cc5be3d5d94a5ac3712e8e143e
|
[
"MIT"
] | 24
|
2020-06-11T14:57:41.000Z
|
2022-03-19T10:07:42.000Z
|
src/lib/trains/mot.py
|
christw16/FairMOTVehicle
|
1d9033bcab9723cc5be3d5d94a5ac3712e8e143e
|
[
"MIT"
] | 29
|
2020-06-11T09:42:22.000Z
|
2022-01-28T22:32:12.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from lib.models.decode import mot_decode
from lib.models.losses import FocalLoss
from lib.models.losses import RegL1Loss, RegLoss, NormRegL1Loss, RegWeightedL1Loss
from lib.models.utils import _sigmoid, _tranpose_and_gather_feat
from lib.utils.post_process import ctdet_post_process
from .base_trainer import BaseTrainer
# 损失函数的定义
class MotLoss(torch.nn.Module):
def __init__(self, opt):
super(MotLoss, self).__init__()
self.crit = torch.nn.MSELoss() if opt.mse_loss else FocalLoss()
self.crit_reg = RegL1Loss() if opt.reg_loss == 'l1' else \
RegLoss() if opt.reg_loss == 'sl1' else None # L1 loss or smooth l1 loss
self.crit_wh = torch.nn.L1Loss(reduction='sum') if opt.dense_wh else \
NormRegL1Loss() if opt.norm_wh else \
RegWeightedL1Loss() if opt.cat_spec_wh else self.crit_reg # box size loss
self.opt = opt
self.emb_dim = opt.reid_dim
self.nID = opt.nID
# 唯一包含可学习参数的层: 用于Re-ID的全连接层
self.classifier = nn.Linear(self.emb_dim, self.nID) # 不同的track id分类最后一层FC:将特征转换到概率得分
self.IDLoss = nn.CrossEntropyLoss(ignore_index=-1) # 不同的track id分类用交叉熵损失
# self.TriLoss = TripletLoss()
self.emb_scale = math.sqrt(2) * math.log(self.nID - 1)
self.s_det = nn.Parameter(-1.85 * torch.ones(1)) # 检测的损失缩放系数
self.s_id = nn.Parameter(-1.05 * torch.ones(1)) # track id分类的损失缩放系数
def forward(self, outputs, batch):
"""
:param outputs:
:param batch:
:return:
"""
opt = self.opt
hm_loss, wh_loss, off_loss, id_loss = 0.0, 0.0, 0.0, 0.0 # 初始化4个loss为0
for s in range(opt.num_stacks):
output = outputs[s]
if not opt.mse_loss:
output['hm'] = _sigmoid(output['hm'])
# 计算heatmap loss
hm_loss += self.crit(output['hm'], batch['hm']) / opt.num_stacks
if opt.wh_weight > 0:
if opt.dense_wh:
mask_weight = batch['dense_wh_mask'].sum() + 1e-4
wh_loss += (self.crit_wh(output['wh'] * batch['dense_wh_mask'],
batch['dense_wh'] * batch['dense_wh_mask']) /
mask_weight) / opt.num_stacks
else: # 计算box尺寸的L1/Smooth L1 loss
wh_loss += self.crit_reg(
output['wh'], batch['reg_mask'],
batch['ind'], batch['wh']) / opt.num_stacks
if opt.reg_offset and opt.off_weight > 0: # 计算box中心坐标偏移的L1 loss
off_loss += self.crit_reg(output['reg'], batch['reg_mask'],
batch['ind'], batch['reg']) / opt.num_stacks
# 检测目标id分类的交叉熵损失
if opt.id_weight > 0:
id_head = _tranpose_and_gather_feat(output['id'], batch['ind'])
id_head = id_head[batch['reg_mask'] > 0].contiguous() # 只有有目标的像素才计算id loss
id_head = self.emb_scale * F.normalize(id_head)
id_target = batch['ids'][batch['reg_mask'] > 0] # 有目标的track id
id_output = self.classifier.forward(id_head).contiguous() # 用于检测目标分类的最后一层是FC?
id_loss += self.IDLoss(id_output, id_target)
# id_loss += self.IDLoss(id_output, id_target) + self.TriLoss(id_head, id_target)
# loss = opt.hm_weight * hm_loss + opt.wh_weight * wh_loss + opt.off_weight * off_loss + opt.id_weight * id_loss
det_loss = opt.hm_weight * hm_loss \
+ opt.wh_weight * wh_loss \
+ opt.off_weight * off_loss
loss = torch.exp(-self.s_det) * det_loss \
+ torch.exp(-self.s_id) * id_loss \
+ (self.s_det + self.s_id)
loss *= 0.5
# print(loss, hm_loss, wh_loss, off_loss, id_loss)
loss_stats = {'loss': loss,
'hm_loss': hm_loss,
'wh_loss': wh_loss,
'off_loss': off_loss,
'id_loss': id_loss}
return loss, loss_stats
# 核心训练类
class MotTrainer(BaseTrainer):
def __init__(self, opt, model, optimizer=None):
super(MotTrainer, self).__init__(opt, model, optimizer=optimizer)
def _get_losses(self, opt):
loss_states = ['loss', 'hm_loss', 'wh_loss', 'off_loss', 'id_loss']
loss = MotLoss(opt)
return loss_states, loss
def save_result(self, output, batch, results):
reg = output['reg'] if self.opt.reg_offset else None
dets = mot_decode(
output['hm'], output['wh'], reg=reg,
cat_spec_wh=self.opt.cat_spec_wh, K=self.opt.K)
dets = dets.detach().cpu().numpy().reshape(1, -1, dets.shape[2])
dets_out = ctdet_post_process(
dets.copy(), batch['meta']['c'].cpu().numpy(),
batch['meta']['s'].cpu().numpy(),
output['hm'].shape[2], output['hm'].shape[3], output['hm'].shape[1])
results[batch['meta']['img_id'].cpu().numpy()[0]] = dets_out[0]
| 42.483871
| 120
| 0.579157
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from lib.models.decode import mot_decode
from lib.models.losses import FocalLoss
from lib.models.losses import RegL1Loss, RegLoss, NormRegL1Loss, RegWeightedL1Loss
from lib.models.utils import _sigmoid, _tranpose_and_gather_feat
from lib.utils.post_process import ctdet_post_process
from .base_trainer import BaseTrainer
class MotLoss(torch.nn.Module):
def __init__(self, opt):
super(MotLoss, self).__init__()
self.crit = torch.nn.MSELoss() if opt.mse_loss else FocalLoss()
self.crit_reg = RegL1Loss() if opt.reg_loss == 'l1' else \
RegLoss() if opt.reg_loss == 'sl1' else None
self.crit_wh = torch.nn.L1Loss(reduction='sum') if opt.dense_wh else \
NormRegL1Loss() if opt.norm_wh else \
RegWeightedL1Loss() if opt.cat_spec_wh else self.crit_reg
self.opt = opt
self.emb_dim = opt.reid_dim
self.nID = opt.nID
self.classifier = nn.Linear(self.emb_dim, self.nID)
self.IDLoss = nn.CrossEntropyLoss(ignore_index=-1)
self.emb_scale = math.sqrt(2) * math.log(self.nID - 1)
self.s_det = nn.Parameter(-1.85 * torch.ones(1))
self.s_id = nn.Parameter(-1.05 * torch.ones(1))
def forward(self, outputs, batch):
opt = self.opt
hm_loss, wh_loss, off_loss, id_loss = 0.0, 0.0, 0.0, 0.0
for s in range(opt.num_stacks):
output = outputs[s]
if not opt.mse_loss:
output['hm'] = _sigmoid(output['hm'])
hm_loss += self.crit(output['hm'], batch['hm']) / opt.num_stacks
if opt.wh_weight > 0:
if opt.dense_wh:
mask_weight = batch['dense_wh_mask'].sum() + 1e-4
wh_loss += (self.crit_wh(output['wh'] * batch['dense_wh_mask'],
batch['dense_wh'] * batch['dense_wh_mask']) /
mask_weight) / opt.num_stacks
else:
wh_loss += self.crit_reg(
output['wh'], batch['reg_mask'],
batch['ind'], batch['wh']) / opt.num_stacks
if opt.reg_offset and opt.off_weight > 0:
off_loss += self.crit_reg(output['reg'], batch['reg_mask'],
batch['ind'], batch['reg']) / opt.num_stacks
if opt.id_weight > 0:
id_head = _tranpose_and_gather_feat(output['id'], batch['ind'])
id_head = id_head[batch['reg_mask'] > 0].contiguous()
id_head = self.emb_scale * F.normalize(id_head)
id_target = batch['ids'][batch['reg_mask'] > 0]
id_output = self.classifier.forward(id_head).contiguous()
id_loss += self.IDLoss(id_output, id_target)
det_loss = opt.hm_weight * hm_loss \
+ opt.wh_weight * wh_loss \
+ opt.off_weight * off_loss
loss = torch.exp(-self.s_det) * det_loss \
+ torch.exp(-self.s_id) * id_loss \
+ (self.s_det + self.s_id)
loss *= 0.5
loss_stats = {'loss': loss,
'hm_loss': hm_loss,
'wh_loss': wh_loss,
'off_loss': off_loss,
'id_loss': id_loss}
return loss, loss_stats
class MotTrainer(BaseTrainer):
def __init__(self, opt, model, optimizer=None):
super(MotTrainer, self).__init__(opt, model, optimizer=optimizer)
def _get_losses(self, opt):
loss_states = ['loss', 'hm_loss', 'wh_loss', 'off_loss', 'id_loss']
loss = MotLoss(opt)
return loss_states, loss
def save_result(self, output, batch, results):
reg = output['reg'] if self.opt.reg_offset else None
dets = mot_decode(
output['hm'], output['wh'], reg=reg,
cat_spec_wh=self.opt.cat_spec_wh, K=self.opt.K)
dets = dets.detach().cpu().numpy().reshape(1, -1, dets.shape[2])
dets_out = ctdet_post_process(
dets.copy(), batch['meta']['c'].cpu().numpy(),
batch['meta']['s'].cpu().numpy(),
output['hm'].shape[2], output['hm'].shape[3], output['hm'].shape[1])
results[batch['meta']['img_id'].cpu().numpy()[0]] = dets_out[0]
| true
| true
|
790632599ec0c5e0db7b70a082f0a63ae4a7dec4
| 26,693
|
py
|
Python
|
lib/python2.7/matplotlib/projections/polar.py
|
ashley8jain/IITD-complaint-system-web
|
21a94601cba710f558d1689b87cfc391a1541c9f
|
[
"BSD-3-Clause"
] | 1
|
2017-01-25T00:38:48.000Z
|
2017-01-25T00:38:48.000Z
|
lib/python2.7/matplotlib/projections/polar.py
|
ashley8jain/IITD-complaint-system-web
|
21a94601cba710f558d1689b87cfc391a1541c9f
|
[
"BSD-3-Clause"
] | null | null | null |
lib/python2.7/matplotlib/projections/polar.py
|
ashley8jain/IITD-complaint-system-web
|
21a94601cba710f558d1689b87cfc391a1541c9f
|
[
"BSD-3-Clause"
] | null | null | null |
import math
import warnings
import numpy as np
import matplotlib
rcParams = matplotlib.rcParams
from matplotlib.axes import Axes
import matplotlib.axis as maxis
from matplotlib import cbook
from matplotlib import docstring
from matplotlib.patches import Circle
from matplotlib.path import Path
from matplotlib.ticker import Formatter, Locator, FormatStrFormatter
from matplotlib.transforms import Affine2D, Affine2DBase, Bbox, \
BboxTransformTo, IdentityTransform, Transform, TransformWrapper, \
ScaledTranslation, blended_transform_factory, BboxTransformToMaxOnly
import matplotlib.spines as mspines
class PolarAxes(Axes):
"""
A polar graph projection, where the input dimensions are *theta*, *r*.
Theta starts pointing east and goes anti-clockwise.
"""
name = 'polar'
class PolarTransform(Transform):
"""
The base polar transform. This handles projection *theta* and
*r* into Cartesian coordinate space *x* and *y*, but does not
perform the ultimate affine transformation into the correct
position.
"""
input_dims = 2
output_dims = 2
is_separable = False
def __init__(self, axis=None, use_rmin=True):
Transform.__init__(self)
self._axis = axis
self._use_rmin = use_rmin
def transform(self, tr):
xy = np.empty(tr.shape, np.float_)
if self._axis is not None:
if self._use_rmin:
rmin = self._axis.viewLim.ymin
else:
rmin = 0
theta_offset = self._axis.get_theta_offset()
theta_direction = self._axis.get_theta_direction()
else:
rmin = 0
theta_offset = 0
theta_direction = 1
t = tr[:, 0:1]
r = tr[:, 1:2]
x = xy[:, 0:1]
y = xy[:, 1:2]
t *= theta_direction
t += theta_offset
if rmin != 0:
r = r - rmin
mask = r < 0
x[:] = np.where(mask, np.nan, r * np.cos(t))
y[:] = np.where(mask, np.nan, r * np.sin(t))
else:
x[:] = r * np.cos(t)
y[:] = r * np.sin(t)
return xy
transform.__doc__ = Transform.transform.__doc__
transform_non_affine = transform
transform_non_affine.__doc__ = Transform.transform_non_affine.__doc__
def transform_path(self, path):
vertices = path.vertices
if len(vertices) == 2 and vertices[0, 0] == vertices[1, 0]:
return Path(self.transform(vertices), path.codes)
ipath = path.interpolated(path._interpolation_steps)
return Path(self.transform(ipath.vertices), ipath.codes)
transform_path.__doc__ = Transform.transform_path.__doc__
transform_path_non_affine = transform_path
transform_path_non_affine.__doc__ = Transform.transform_path_non_affine.__doc__
def inverted(self):
return PolarAxes.InvertedPolarTransform(self._axis, self._use_rmin)
inverted.__doc__ = Transform.inverted.__doc__
class PolarAffine(Affine2DBase):
"""
The affine part of the polar projection. Scales the output so
that maximum radius rests on the edge of the axes circle.
"""
def __init__(self, scale_transform, limits):
"""
*limits* is the view limit of the data. The only part of
its bounds that is used is ymax (for the radius maximum).
The theta range is always fixed to (0, 2pi).
"""
Affine2DBase.__init__(self)
self._scale_transform = scale_transform
self._limits = limits
self.set_children(scale_transform, limits)
self._mtx = None
def get_matrix(self):
if self._invalid:
limits_scaled = self._limits.transformed(self._scale_transform)
yscale = limits_scaled.ymax - limits_scaled.ymin
affine = Affine2D() \
.scale(0.5 / yscale) \
.translate(0.5, 0.5)
self._mtx = affine.get_matrix()
self._inverted = None
self._invalid = 0
return self._mtx
get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__
class InvertedPolarTransform(Transform):
"""
The inverse of the polar transform, mapping Cartesian
coordinate space *x* and *y* back to *theta* and *r*.
"""
input_dims = 2
output_dims = 2
is_separable = False
def __init__(self, axis=None, use_rmin=True):
Transform.__init__(self)
self._axis = axis
self._use_rmin = use_rmin
def transform(self, xy):
if self._axis is not None:
if self._use_rmin:
rmin = self._axis.viewLim.ymin
else:
rmin = 0
theta_offset = self._axis.get_theta_offset()
theta_direction = self._axis.get_theta_direction()
else:
rmin = 0
theta_offset = 0
theta_direction = 1
x = xy[:, 0:1]
y = xy[:, 1:]
r = np.sqrt(x*x + y*y)
theta = np.arccos(x / r)
theta = np.where(y < 0, 2 * np.pi - theta, theta)
theta -= theta_offset
theta *= theta_direction
r += rmin
return np.concatenate((theta, r), 1)
transform.__doc__ = Transform.transform.__doc__
def inverted(self):
return PolarAxes.PolarTransform(self._axis, self._use_rmin)
inverted.__doc__ = Transform.inverted.__doc__
class ThetaFormatter(Formatter):
"""
Used to format the *theta* tick labels. Converts the native
unit of radians into degrees and adds a degree symbol.
"""
def __call__(self, x, pos=None):
# \u00b0 : degree symbol
if rcParams['text.usetex'] and not rcParams['text.latex.unicode']:
return r"$%0.0f^\circ$" % ((x / np.pi) * 180.0)
else:
# we use unicode, rather than mathtext with \circ, so
# that it will work correctly with any arbitrary font
# (assuming it has a degree sign), whereas $5\circ$
# will only work correctly with one of the supported
# math fonts (Computer Modern and STIX)
return u"%0.0f\u00b0" % ((x / np.pi) * 180.0)
class RadialLocator(Locator):
"""
Used to locate radius ticks.
Ensures that all ticks are strictly positive. For all other
tasks, it delegates to the base
:class:`~matplotlib.ticker.Locator` (which may be different
depending on the scale of the *r*-axis.
"""
def __init__(self, base):
self.base = base
def __call__(self):
ticks = self.base()
return [x for x in ticks if x > 0]
def autoscale(self):
return self.base.autoscale()
def pan(self, numsteps):
return self.base.pan(numsteps)
def zoom(self, direction):
return self.base.zoom(direction)
def refresh(self):
return self.base.refresh()
def view_limits(self, vmin, vmax):
vmin, vmax = self.base.view_limits(vmin, vmax)
return 0, vmax
def __init__(self, *args, **kwargs):
"""
Create a new Polar Axes for a polar plot.
The following optional kwargs are supported:
- *resolution*: The number of points of interpolation between
each pair of data points. Set to 1 to disable
interpolation.
"""
self.resolution = kwargs.pop('resolution', None)
if self.resolution not in (None, 1):
warnings.warn(
"""The resolution kwarg to Polar plots is now ignored.
If you need to interpolate data points, consider running
cbook.simple_linear_interpolation on the data before passing to matplotlib.""")
Axes.__init__(self, *args, **kwargs)
self.set_aspect('equal', adjustable='box', anchor='C')
self.cla()
__init__.__doc__ = Axes.__init__.__doc__
def cla(self):
Axes.cla(self)
self.title.set_y(1.05)
self.xaxis.set_major_formatter(self.ThetaFormatter())
self.xaxis.isDefault_majfmt = True
angles = np.arange(0.0, 360.0, 45.0)
self.set_thetagrids(angles)
self.yaxis.set_major_locator(self.RadialLocator(self.yaxis.get_major_locator()))
self.grid(rcParams['polaraxes.grid'])
self.xaxis.set_ticks_position('none')
self.yaxis.set_ticks_position('none')
self.yaxis.set_tick_params(label1On=True)
# Why do we need to turn on yaxis tick labels, but
# xaxis tick labels are already on?
self.set_theta_offset(0)
self.set_theta_direction(1)
def _init_axis(self):
"move this out of __init__ because non-separable axes don't use it"
self.xaxis = maxis.XAxis(self)
self.yaxis = maxis.YAxis(self)
# Calling polar_axes.xaxis.cla() or polar_axes.xaxis.cla()
# results in weird artifacts. Therefore we disable this for
# now.
# self.spines['polar'].register_axis(self.yaxis)
self._update_transScale()
def _set_lim_and_transforms(self):
self.transAxes = BboxTransformTo(self.bbox)
# Transforms the x and y axis separately by a scale factor
# It is assumed that this part will have non-linear components
self.transScale = TransformWrapper(IdentityTransform())
# A (possibly non-linear) projection on the (already scaled)
# data. This one is aware of rmin
self.transProjection = self.PolarTransform(self)
# This one is not aware of rmin
self.transPureProjection = self.PolarTransform(self, use_rmin=False)
# An affine transformation on the data, generally to limit the
# range of the axes
self.transProjectionAffine = self.PolarAffine(self.transScale, self.viewLim)
# The complete data transformation stack -- from data all the
# way to display coordinates
self.transData = self.transScale + self.transProjection + \
(self.transProjectionAffine + self.transAxes)
# This is the transform for theta-axis ticks. It is
# equivalent to transData, except it always puts r == 1.0 at
# the edge of the axis circle.
self._xaxis_transform = (
self.transPureProjection +
self.PolarAffine(IdentityTransform(), Bbox.unit()) +
self.transAxes)
# The theta labels are moved from radius == 0.0 to radius == 1.1
self._theta_label1_position = Affine2D().translate(0.0, 1.1)
self._xaxis_text1_transform = (
self._theta_label1_position +
self._xaxis_transform)
self._theta_label2_position = Affine2D().translate(0.0, 1.0 / 1.1)
self._xaxis_text2_transform = (
self._theta_label2_position +
self._xaxis_transform)
# This is the transform for r-axis ticks. It scales the theta
# axis so the gridlines from 0.0 to 1.0, now go from 0.0 to
# 2pi.
self._yaxis_transform = (
Affine2D().scale(np.pi * 2.0, 1.0) +
self.transData)
# The r-axis labels are put at an angle and padded in the r-direction
self._r_label_position = ScaledTranslation(
22.5, 0.0, Affine2D())
self._yaxis_text_transform = (
self._r_label_position +
Affine2D().scale(1.0 / 360.0, 1.0) +
self._yaxis_transform
)
def get_xaxis_transform(self,which='grid'):
assert which in ['tick1','tick2','grid']
return self._xaxis_transform
def get_xaxis_text1_transform(self, pad):
return self._xaxis_text1_transform, 'center', 'center'
def get_xaxis_text2_transform(self, pad):
return self._xaxis_text2_transform, 'center', 'center'
def get_yaxis_transform(self,which='grid'):
assert which in ['tick1','tick2','grid']
return self._yaxis_transform
def get_yaxis_text1_transform(self, pad):
angle = self._r_label_position.to_values()[4]
if angle < 90.:
return self._yaxis_text_transform, 'bottom', 'left'
elif angle < 180.:
return self._yaxis_text_transform, 'bottom', 'right'
elif angle < 270.:
return self._yaxis_text_transform, 'top', 'right'
else:
return self._yaxis_text_transform, 'top', 'left'
def get_yaxis_text2_transform(self, pad):
angle = self._r_label_position.to_values()[4]
if angle < 90.:
return self._yaxis_text_transform, 'top', 'right'
elif angle < 180.:
return self._yaxis_text_transform, 'top', 'left'
elif angle < 270.:
return self._yaxis_text_transform, 'bottom', 'left'
else:
return self._yaxis_text_transform, 'bottom', 'right'
def _gen_axes_patch(self):
return Circle((0.5, 0.5), 0.5)
def _gen_axes_spines(self):
return {'polar':mspines.Spine.circular_spine(self,
(0.5, 0.5), 0.5)}
def set_rmax(self, rmax):
self.viewLim.y1 = rmax
def get_rmax(self):
return self.viewLim.ymax
def set_rmin(self, rmin):
self.viewLim.y0 = rmin
def get_rmin(self):
return self.viewLim.ymin
def set_theta_offset(self, offset):
"""
Set the offset for the location of 0 in radians.
"""
self._theta_offset = offset
def get_theta_offset(self):
"""
Get the offset for the location of 0 in radians.
"""
return self._theta_offset
def set_theta_zero_location(self, loc):
"""
Sets the location of theta's zero. (Calls set_theta_offset
with the correct value in radians under the hood.)
May be one of "N", "NW", "W", "SW", "S", "SE", "E", or "NE".
"""
mapping = {
'N': np.pi * 0.5,
'NW': np.pi * 0.75,
'W': np.pi,
'SW': np.pi * 1.25,
'S': np.pi * 1.5,
'SE': np.pi * 1.75,
'E': 0,
'NE': np.pi * 0.25 }
return self.set_theta_offset(mapping[loc])
def set_theta_direction(self, direction):
"""
Set the direction in which theta increases.
clockwise, -1:
Theta increases in the clockwise direction
counterclockwise, anticlockwise, 1:
Theta increases in the counterclockwise direction
"""
if direction in ('clockwise',):
self._direction = -1
elif direction in ('counterclockwise', 'anticlockwise'):
self._direction = 1
elif direction in (1, -1):
self._direction = direction
else:
raise ValueError("direction must be 1, -1, clockwise or counterclockwise")
def get_theta_direction(self):
"""
Get the direction in which theta increases.
-1:
Theta increases in the clockwise direction
1:
Theta increases in the counterclockwise direction
"""
return self._direction
def set_rlim(self, *args, **kwargs):
if 'rmin' in kwargs:
kwargs['ymin'] = kwargs.pop('rmin')
if 'rmax' in kwargs:
kwargs['ymax'] = kwargs.pop('rmax')
return self.set_ylim(*args, **kwargs)
def set_yscale(self, *args, **kwargs):
Axes.set_yscale(self, *args, **kwargs)
self.yaxis.set_major_locator(
self.RadialLocator(self.yaxis.get_major_locator()))
set_rscale = Axes.set_yscale
set_rticks = Axes.set_yticks
@docstring.dedent_interpd
def set_thetagrids(self, angles, labels=None, frac=None, fmt=None,
**kwargs):
"""
Set the angles at which to place the theta grids (these
gridlines are equal along the theta dimension). *angles* is in
degrees.
*labels*, if not None, is a ``len(angles)`` list of strings of
the labels to use at each angle.
If *labels* is None, the labels will be ``fmt %% angle``
*frac* is the fraction of the polar axes radius at which to
place the label (1 is the edge). Eg. 1.05 is outside the axes
and 0.95 is inside the axes.
Return value is a list of tuples (*line*, *label*), where
*line* is :class:`~matplotlib.lines.Line2D` instances and the
*label* is :class:`~matplotlib.text.Text` instances.
kwargs are optional text properties for the labels:
%(Text)s
ACCEPTS: sequence of floats
"""
angles = np.asarray(angles, np.float_)
self.set_xticks(angles * (np.pi / 180.0))
if labels is not None:
self.set_xticklabels(labels)
elif fmt is not None:
self.xaxis.set_major_formatter(FormatStrFormatter(fmt))
if frac is not None:
self._theta_label1_position.clear().translate(0.0, frac)
self._theta_label2_position.clear().translate(0.0, 1.0 / frac)
for t in self.xaxis.get_ticklabels():
t.update(kwargs)
return self.xaxis.get_ticklines(), self.xaxis.get_ticklabels()
@docstring.dedent_interpd
def set_rgrids(self, radii, labels=None, angle=None, fmt=None,
**kwargs):
"""
Set the radial locations and labels of the *r* grids.
The labels will appear at radial distances *radii* at the
given *angle* in degrees.
*labels*, if not None, is a ``len(radii)`` list of strings of the
labels to use at each radius.
If *labels* is None, the built-in formatter will be used.
Return value is a list of tuples (*line*, *label*), where
*line* is :class:`~matplotlib.lines.Line2D` instances and the
*label* is :class:`~matplotlib.text.Text` instances.
kwargs are optional text properties for the labels:
%(Text)s
ACCEPTS: sequence of floats
"""
radii = np.asarray(radii)
rmin = radii.min()
if rmin <= 0:
raise ValueError('radial grids must be strictly positive')
self.set_yticks(radii)
if labels is not None:
self.set_yticklabels(labels)
elif fmt is not None:
self.yaxis.set_major_formatter(FormatStrFormatter(fmt))
if angle is None:
angle = self._r_label_position.to_values()[4]
self._r_label_position._t = (angle, 0.0)
self._r_label_position.invalidate()
for t in self.yaxis.get_ticklabels():
t.update(kwargs)
return self.yaxis.get_gridlines(), self.yaxis.get_ticklabels()
def set_xscale(self, scale, *args, **kwargs):
if scale != 'linear':
raise NotImplementedError("You can not set the xscale on a polar plot.")
def set_xlim(self, *args, **kargs):
# The xlim is fixed, no matter what you do
self.viewLim.intervalx = (0.0, np.pi * 2.0)
def format_coord(self, theta, r):
"""
Return a format string formatting the coordinate using Unicode
characters.
"""
theta /= math.pi
# \u03b8: lower-case theta
# \u03c0: lower-case pi
# \u00b0: degree symbol
return u'\u03b8=%0.3f\u03c0 (%0.3f\u00b0), r=%0.3f' % (theta, theta * 180.0, r)
def get_data_ratio(self):
'''
Return the aspect ratio of the data itself. For a polar plot,
this should always be 1.0
'''
return 1.0
### Interactive panning
def can_zoom(self):
"""
Return *True* if this axes supports the zoom box button functionality.
Polar axes do not support zoom boxes.
"""
return False
def can_pan(self) :
"""
Return *True* if this axes supports the pan/zoom button functionality.
For polar axes, this is slightly misleading. Both panning and
zooming are performed by the same button. Panning is performed
in azimuth while zooming is done along the radial.
"""
return True
def start_pan(self, x, y, button):
angle = np.deg2rad(self._r_label_position.to_values()[4])
mode = ''
if button == 1:
epsilon = np.pi / 45.0
t, r = self.transData.inverted().transform_point((x, y))
if t >= angle - epsilon and t <= angle + epsilon:
mode = 'drag_r_labels'
elif button == 3:
mode = 'zoom'
self._pan_start = cbook.Bunch(
rmax = self.get_rmax(),
trans = self.transData.frozen(),
trans_inverse = self.transData.inverted().frozen(),
r_label_angle = self._r_label_position.to_values()[4],
x = x,
y = y,
mode = mode
)
def end_pan(self):
del self._pan_start
def drag_pan(self, button, key, x, y):
p = self._pan_start
if p.mode == 'drag_r_labels':
startt, startr = p.trans_inverse.transform_point((p.x, p.y))
t, r = p.trans_inverse.transform_point((x, y))
# Deal with theta
dt0 = t - startt
dt1 = startt - t
if abs(dt1) < abs(dt0):
dt = abs(dt1) * sign(dt0) * -1.0
else:
dt = dt0 * -1.0
dt = (dt / np.pi) * 180.0
self._r_label_position._t = (p.r_label_angle - dt, 0.0)
self._r_label_position.invalidate()
trans, vert1, horiz1 = self.get_yaxis_text1_transform(0.0)
trans, vert2, horiz2 = self.get_yaxis_text2_transform(0.0)
for t in self.yaxis.majorTicks + self.yaxis.minorTicks:
t.label1.set_va(vert1)
t.label1.set_ha(horiz1)
t.label2.set_va(vert2)
t.label2.set_ha(horiz2)
elif p.mode == 'zoom':
startt, startr = p.trans_inverse.transform_point((p.x, p.y))
t, r = p.trans_inverse.transform_point((x, y))
dr = r - startr
# Deal with r
scale = r / startr
self.set_rmax(p.rmax / scale)
# These are a couple of aborted attempts to project a polar plot using
# cubic bezier curves.
# def transform_path(self, path):
# twopi = 2.0 * np.pi
# halfpi = 0.5 * np.pi
# vertices = path.vertices
# t0 = vertices[0:-1, 0]
# t1 = vertices[1: , 0]
# td = np.where(t1 > t0, t1 - t0, twopi - (t0 - t1))
# maxtd = td.max()
# interpolate = np.ceil(maxtd / halfpi)
# if interpolate > 1.0:
# vertices = self.interpolate(vertices, interpolate)
# vertices = self.transform(vertices)
# result = np.zeros((len(vertices) * 3 - 2, 2), np.float_)
# codes = mpath.Path.CURVE4 * np.ones((len(vertices) * 3 - 2, ), mpath.Path.code_type)
# result[0] = vertices[0]
# codes[0] = mpath.Path.MOVETO
# kappa = 4.0 * ((np.sqrt(2.0) - 1.0) / 3.0)
# kappa = 0.5
# p0 = vertices[0:-1]
# p1 = vertices[1: ]
# x0 = p0[:, 0:1]
# y0 = p0[:, 1: ]
# b0 = ((y0 - x0) - y0) / ((x0 + y0) - x0)
# a0 = y0 - b0*x0
# x1 = p1[:, 0:1]
# y1 = p1[:, 1: ]
# b1 = ((y1 - x1) - y1) / ((x1 + y1) - x1)
# a1 = y1 - b1*x1
# x = -(a0-a1) / (b0-b1)
# y = a0 + b0*x
# xk = (x - x0) * kappa + x0
# yk = (y - y0) * kappa + y0
# result[1::3, 0:1] = xk
# result[1::3, 1: ] = yk
# xk = (x - x1) * kappa + x1
# yk = (y - y1) * kappa + y1
# result[2::3, 0:1] = xk
# result[2::3, 1: ] = yk
# result[3::3] = p1
# print vertices[-2:]
# print result[-2:]
# return mpath.Path(result, codes)
# twopi = 2.0 * np.pi
# halfpi = 0.5 * np.pi
# vertices = path.vertices
# t0 = vertices[0:-1, 0]
# t1 = vertices[1: , 0]
# td = np.where(t1 > t0, t1 - t0, twopi - (t0 - t1))
# maxtd = td.max()
# interpolate = np.ceil(maxtd / halfpi)
# print "interpolate", interpolate
# if interpolate > 1.0:
# vertices = self.interpolate(vertices, interpolate)
# result = np.zeros((len(vertices) * 3 - 2, 2), np.float_)
# codes = mpath.Path.CURVE4 * np.ones((len(vertices) * 3 - 2, ), mpath.Path.code_type)
# result[0] = vertices[0]
# codes[0] = mpath.Path.MOVETO
# kappa = 4.0 * ((np.sqrt(2.0) - 1.0) / 3.0)
# tkappa = np.arctan(kappa)
# hyp_kappa = np.sqrt(kappa*kappa + 1.0)
# t0 = vertices[0:-1, 0]
# t1 = vertices[1: , 0]
# r0 = vertices[0:-1, 1]
# r1 = vertices[1: , 1]
# td = np.where(t1 > t0, t1 - t0, twopi - (t0 - t1))
# td_scaled = td / (np.pi * 0.5)
# rd = r1 - r0
# r0kappa = r0 * kappa * td_scaled
# r1kappa = r1 * kappa * td_scaled
# ravg_kappa = ((r1 + r0) / 2.0) * kappa * td_scaled
# result[1::3, 0] = t0 + (tkappa * td_scaled)
# result[1::3, 1] = r0*hyp_kappa
# # result[1::3, 1] = r0 / np.cos(tkappa * td_scaled) # np.sqrt(r0*r0 + ravg_kappa*ravg_kappa)
# result[2::3, 0] = t1 - (tkappa * td_scaled)
# result[2::3, 1] = r1*hyp_kappa
# # result[2::3, 1] = r1 / np.cos(tkappa * td_scaled) # np.sqrt(r1*r1 + ravg_kappa*ravg_kappa)
# result[3::3, 0] = t1
# result[3::3, 1] = r1
# print vertices[:6], result[:6], t0[:6], t1[:6], td[:6], td_scaled[:6], tkappa
# result = self.transform(result)
# return mpath.Path(result, codes)
# transform_path_non_affine = transform_path
| 35.076216
| 106
| 0.559173
|
import math
import warnings
import numpy as np
import matplotlib
rcParams = matplotlib.rcParams
from matplotlib.axes import Axes
import matplotlib.axis as maxis
from matplotlib import cbook
from matplotlib import docstring
from matplotlib.patches import Circle
from matplotlib.path import Path
from matplotlib.ticker import Formatter, Locator, FormatStrFormatter
from matplotlib.transforms import Affine2D, Affine2DBase, Bbox, \
BboxTransformTo, IdentityTransform, Transform, TransformWrapper, \
ScaledTranslation, blended_transform_factory, BboxTransformToMaxOnly
import matplotlib.spines as mspines
class PolarAxes(Axes):
name = 'polar'
class PolarTransform(Transform):
input_dims = 2
output_dims = 2
is_separable = False
def __init__(self, axis=None, use_rmin=True):
Transform.__init__(self)
self._axis = axis
self._use_rmin = use_rmin
def transform(self, tr):
xy = np.empty(tr.shape, np.float_)
if self._axis is not None:
if self._use_rmin:
rmin = self._axis.viewLim.ymin
else:
rmin = 0
theta_offset = self._axis.get_theta_offset()
theta_direction = self._axis.get_theta_direction()
else:
rmin = 0
theta_offset = 0
theta_direction = 1
t = tr[:, 0:1]
r = tr[:, 1:2]
x = xy[:, 0:1]
y = xy[:, 1:2]
t *= theta_direction
t += theta_offset
if rmin != 0:
r = r - rmin
mask = r < 0
x[:] = np.where(mask, np.nan, r * np.cos(t))
y[:] = np.where(mask, np.nan, r * np.sin(t))
else:
x[:] = r * np.cos(t)
y[:] = r * np.sin(t)
return xy
transform.__doc__ = Transform.transform.__doc__
transform_non_affine = transform
transform_non_affine.__doc__ = Transform.transform_non_affine.__doc__
def transform_path(self, path):
vertices = path.vertices
if len(vertices) == 2 and vertices[0, 0] == vertices[1, 0]:
return Path(self.transform(vertices), path.codes)
ipath = path.interpolated(path._interpolation_steps)
return Path(self.transform(ipath.vertices), ipath.codes)
transform_path.__doc__ = Transform.transform_path.__doc__
transform_path_non_affine = transform_path
transform_path_non_affine.__doc__ = Transform.transform_path_non_affine.__doc__
def inverted(self):
return PolarAxes.InvertedPolarTransform(self._axis, self._use_rmin)
inverted.__doc__ = Transform.inverted.__doc__
class PolarAffine(Affine2DBase):
def __init__(self, scale_transform, limits):
Affine2DBase.__init__(self)
self._scale_transform = scale_transform
self._limits = limits
self.set_children(scale_transform, limits)
self._mtx = None
def get_matrix(self):
if self._invalid:
limits_scaled = self._limits.transformed(self._scale_transform)
yscale = limits_scaled.ymax - limits_scaled.ymin
affine = Affine2D() \
.scale(0.5 / yscale) \
.translate(0.5, 0.5)
self._mtx = affine.get_matrix()
self._inverted = None
self._invalid = 0
return self._mtx
get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__
class InvertedPolarTransform(Transform):
input_dims = 2
output_dims = 2
is_separable = False
def __init__(self, axis=None, use_rmin=True):
Transform.__init__(self)
self._axis = axis
self._use_rmin = use_rmin
def transform(self, xy):
if self._axis is not None:
if self._use_rmin:
rmin = self._axis.viewLim.ymin
else:
rmin = 0
theta_offset = self._axis.get_theta_offset()
theta_direction = self._axis.get_theta_direction()
else:
rmin = 0
theta_offset = 0
theta_direction = 1
x = xy[:, 0:1]
y = xy[:, 1:]
r = np.sqrt(x*x + y*y)
theta = np.arccos(x / r)
theta = np.where(y < 0, 2 * np.pi - theta, theta)
theta -= theta_offset
theta *= theta_direction
r += rmin
return np.concatenate((theta, r), 1)
transform.__doc__ = Transform.transform.__doc__
def inverted(self):
return PolarAxes.PolarTransform(self._axis, self._use_rmin)
inverted.__doc__ = Transform.inverted.__doc__
class ThetaFormatter(Formatter):
def __call__(self, x, pos=None):
if rcParams['text.usetex'] and not rcParams['text.latex.unicode']:
return r"$%0.0f^\circ$" % ((x / np.pi) * 180.0)
else:
return u"%0.0f\u00b0" % ((x / np.pi) * 180.0)
class RadialLocator(Locator):
def __init__(self, base):
self.base = base
def __call__(self):
ticks = self.base()
return [x for x in ticks if x > 0]
def autoscale(self):
return self.base.autoscale()
def pan(self, numsteps):
return self.base.pan(numsteps)
def zoom(self, direction):
return self.base.zoom(direction)
def refresh(self):
return self.base.refresh()
def view_limits(self, vmin, vmax):
vmin, vmax = self.base.view_limits(vmin, vmax)
return 0, vmax
def __init__(self, *args, **kwargs):
self.resolution = kwargs.pop('resolution', None)
if self.resolution not in (None, 1):
warnings.warn(
"""The resolution kwarg to Polar plots is now ignored.
If you need to interpolate data points, consider running
cbook.simple_linear_interpolation on the data before passing to matplotlib.""")
Axes.__init__(self, *args, **kwargs)
self.set_aspect('equal', adjustable='box', anchor='C')
self.cla()
__init__.__doc__ = Axes.__init__.__doc__
def cla(self):
Axes.cla(self)
self.title.set_y(1.05)
self.xaxis.set_major_formatter(self.ThetaFormatter())
self.xaxis.isDefault_majfmt = True
angles = np.arange(0.0, 360.0, 45.0)
self.set_thetagrids(angles)
self.yaxis.set_major_locator(self.RadialLocator(self.yaxis.get_major_locator()))
self.grid(rcParams['polaraxes.grid'])
self.xaxis.set_ticks_position('none')
self.yaxis.set_ticks_position('none')
self.yaxis.set_tick_params(label1On=True)
self.set_theta_offset(0)
self.set_theta_direction(1)
def _init_axis(self):
self.xaxis = maxis.XAxis(self)
self.yaxis = maxis.YAxis(self)
self._update_transScale()
def _set_lim_and_transforms(self):
self.transAxes = BboxTransformTo(self.bbox)
self.transScale = TransformWrapper(IdentityTransform())
self.transProjection = self.PolarTransform(self)
self.transPureProjection = self.PolarTransform(self, use_rmin=False)
self.transProjectionAffine = self.PolarAffine(self.transScale, self.viewLim)
self.transData = self.transScale + self.transProjection + \
(self.transProjectionAffine + self.transAxes)
self._xaxis_transform = (
self.transPureProjection +
self.PolarAffine(IdentityTransform(), Bbox.unit()) +
self.transAxes)
self._theta_label1_position = Affine2D().translate(0.0, 1.1)
self._xaxis_text1_transform = (
self._theta_label1_position +
self._xaxis_transform)
self._theta_label2_position = Affine2D().translate(0.0, 1.0 / 1.1)
self._xaxis_text2_transform = (
self._theta_label2_position +
self._xaxis_transform)
self._yaxis_transform = (
Affine2D().scale(np.pi * 2.0, 1.0) +
self.transData)
self._r_label_position = ScaledTranslation(
22.5, 0.0, Affine2D())
self._yaxis_text_transform = (
self._r_label_position +
Affine2D().scale(1.0 / 360.0, 1.0) +
self._yaxis_transform
)
def get_xaxis_transform(self,which='grid'):
assert which in ['tick1','tick2','grid']
return self._xaxis_transform
def get_xaxis_text1_transform(self, pad):
return self._xaxis_text1_transform, 'center', 'center'
def get_xaxis_text2_transform(self, pad):
return self._xaxis_text2_transform, 'center', 'center'
def get_yaxis_transform(self,which='grid'):
assert which in ['tick1','tick2','grid']
return self._yaxis_transform
def get_yaxis_text1_transform(self, pad):
angle = self._r_label_position.to_values()[4]
if angle < 90.:
return self._yaxis_text_transform, 'bottom', 'left'
elif angle < 180.:
return self._yaxis_text_transform, 'bottom', 'right'
elif angle < 270.:
return self._yaxis_text_transform, 'top', 'right'
else:
return self._yaxis_text_transform, 'top', 'left'
def get_yaxis_text2_transform(self, pad):
angle = self._r_label_position.to_values()[4]
if angle < 90.:
return self._yaxis_text_transform, 'top', 'right'
elif angle < 180.:
return self._yaxis_text_transform, 'top', 'left'
elif angle < 270.:
return self._yaxis_text_transform, 'bottom', 'left'
else:
return self._yaxis_text_transform, 'bottom', 'right'
def _gen_axes_patch(self):
return Circle((0.5, 0.5), 0.5)
def _gen_axes_spines(self):
return {'polar':mspines.Spine.circular_spine(self,
(0.5, 0.5), 0.5)}
def set_rmax(self, rmax):
self.viewLim.y1 = rmax
def get_rmax(self):
return self.viewLim.ymax
def set_rmin(self, rmin):
self.viewLim.y0 = rmin
def get_rmin(self):
return self.viewLim.ymin
def set_theta_offset(self, offset):
self._theta_offset = offset
def get_theta_offset(self):
return self._theta_offset
def set_theta_zero_location(self, loc):
mapping = {
'N': np.pi * 0.5,
'NW': np.pi * 0.75,
'W': np.pi,
'SW': np.pi * 1.25,
'S': np.pi * 1.5,
'SE': np.pi * 1.75,
'E': 0,
'NE': np.pi * 0.25 }
return self.set_theta_offset(mapping[loc])
def set_theta_direction(self, direction):
if direction in ('clockwise',):
self._direction = -1
elif direction in ('counterclockwise', 'anticlockwise'):
self._direction = 1
elif direction in (1, -1):
self._direction = direction
else:
raise ValueError("direction must be 1, -1, clockwise or counterclockwise")
def get_theta_direction(self):
return self._direction
def set_rlim(self, *args, **kwargs):
if 'rmin' in kwargs:
kwargs['ymin'] = kwargs.pop('rmin')
if 'rmax' in kwargs:
kwargs['ymax'] = kwargs.pop('rmax')
return self.set_ylim(*args, **kwargs)
def set_yscale(self, *args, **kwargs):
Axes.set_yscale(self, *args, **kwargs)
self.yaxis.set_major_locator(
self.RadialLocator(self.yaxis.get_major_locator()))
set_rscale = Axes.set_yscale
set_rticks = Axes.set_yticks
@docstring.dedent_interpd
def set_thetagrids(self, angles, labels=None, frac=None, fmt=None,
**kwargs):
angles = np.asarray(angles, np.float_)
self.set_xticks(angles * (np.pi / 180.0))
if labels is not None:
self.set_xticklabels(labels)
elif fmt is not None:
self.xaxis.set_major_formatter(FormatStrFormatter(fmt))
if frac is not None:
self._theta_label1_position.clear().translate(0.0, frac)
self._theta_label2_position.clear().translate(0.0, 1.0 / frac)
for t in self.xaxis.get_ticklabels():
t.update(kwargs)
return self.xaxis.get_ticklines(), self.xaxis.get_ticklabels()
@docstring.dedent_interpd
def set_rgrids(self, radii, labels=None, angle=None, fmt=None,
**kwargs):
radii = np.asarray(radii)
rmin = radii.min()
if rmin <= 0:
raise ValueError('radial grids must be strictly positive')
self.set_yticks(radii)
if labels is not None:
self.set_yticklabels(labels)
elif fmt is not None:
self.yaxis.set_major_formatter(FormatStrFormatter(fmt))
if angle is None:
angle = self._r_label_position.to_values()[4]
self._r_label_position._t = (angle, 0.0)
self._r_label_position.invalidate()
for t in self.yaxis.get_ticklabels():
t.update(kwargs)
return self.yaxis.get_gridlines(), self.yaxis.get_ticklabels()
def set_xscale(self, scale, *args, **kwargs):
if scale != 'linear':
raise NotImplementedError("You can not set the xscale on a polar plot.")
def set_xlim(self, *args, **kargs):
self.viewLim.intervalx = (0.0, np.pi * 2.0)
def format_coord(self, theta, r):
theta /= math.pi
return u'\u03b8=%0.3f\u03c0 (%0.3f\u00b0), r=%0.3f' % (theta, theta * 180.0, r)
def get_data_ratio(self):
return 1.0
lse
def can_pan(self) :
return True
def start_pan(self, x, y, button):
angle = np.deg2rad(self._r_label_position.to_values()[4])
mode = ''
if button == 1:
epsilon = np.pi / 45.0
t, r = self.transData.inverted().transform_point((x, y))
if t >= angle - epsilon and t <= angle + epsilon:
mode = 'drag_r_labels'
elif button == 3:
mode = 'zoom'
self._pan_start = cbook.Bunch(
rmax = self.get_rmax(),
trans = self.transData.frozen(),
trans_inverse = self.transData.inverted().frozen(),
r_label_angle = self._r_label_position.to_values()[4],
x = x,
y = y,
mode = mode
)
def end_pan(self):
del self._pan_start
def drag_pan(self, button, key, x, y):
p = self._pan_start
if p.mode == 'drag_r_labels':
startt, startr = p.trans_inverse.transform_point((p.x, p.y))
t, r = p.trans_inverse.transform_point((x, y))
dt0 = t - startt
dt1 = startt - t
if abs(dt1) < abs(dt0):
dt = abs(dt1) * sign(dt0) * -1.0
else:
dt = dt0 * -1.0
dt = (dt / np.pi) * 180.0
self._r_label_position._t = (p.r_label_angle - dt, 0.0)
self._r_label_position.invalidate()
trans, vert1, horiz1 = self.get_yaxis_text1_transform(0.0)
trans, vert2, horiz2 = self.get_yaxis_text2_transform(0.0)
for t in self.yaxis.majorTicks + self.yaxis.minorTicks:
t.label1.set_va(vert1)
t.label1.set_ha(horiz1)
t.label2.set_va(vert2)
t.label2.set_ha(horiz2)
elif p.mode == 'zoom':
startt, startr = p.trans_inverse.transform_point((p.x, p.y))
t, r = p.trans_inverse.transform_point((x, y))
dr = r - startr
scale = r / startr
self.set_rmax(p.rmax / scale)
| true
| true
|
7906325fff586a997f3830749f9ea02b50c622e1
| 486
|
py
|
Python
|
www/captions.py
|
divlv/servicerepo
|
e46766edadfcbe9cd46df0d07dc86428fcf14a5b
|
[
"MIT"
] | 1
|
2020-02-22T13:13:36.000Z
|
2020-02-22T13:13:36.000Z
|
www/captions.py
|
divlv/servicerepo
|
e46766edadfcbe9cd46df0d07dc86428fcf14a5b
|
[
"MIT"
] | null | null | null |
www/captions.py
|
divlv/servicerepo
|
e46766edadfcbe9cd46df0d07dc86428fcf14a5b
|
[
"MIT"
] | null | null | null |
#
# Captions:
#
project_title="Services Repository"
project_owner1=""
project_owner2=""
project_cip="ServiceRepo"
nav_up="To the top"
cap_findsrv="Find service"
cap_findsrv_desc="Find service by JSON key"
cap_findsrvtag_desc="...or by query variable, tag etc."
cap_injson="Incoming JSON"
cap_outjson="Outgoing JSON"
cap_search_and="AND"
cap_search_or="OR"
cap_showall_conf="Show all saved services?"
#
b_find="Find service"
b_findall="Show all"
#
err_emptyq="Empty search query"
| 16.758621
| 55
| 0.771605
|
project_title="Services Repository"
project_owner1=""
project_owner2=""
project_cip="ServiceRepo"
nav_up="To the top"
cap_findsrv="Find service"
cap_findsrv_desc="Find service by JSON key"
cap_findsrvtag_desc="...or by query variable, tag etc."
cap_injson="Incoming JSON"
cap_outjson="Outgoing JSON"
cap_search_and="AND"
cap_search_or="OR"
cap_showall_conf="Show all saved services?"
b_find="Find service"
b_findall="Show all"
err_emptyq="Empty search query"
| true
| true
|
7906340d65e6dcafb6603ae74103b9afcf7f3663
| 384
|
py
|
Python
|
tests/test_nvl.py
|
Jhengsh/tidyframe
|
aa4f8a35cb4abe4d883eb59b6ef6a920580b611f
|
[
"MIT"
] | null | null | null |
tests/test_nvl.py
|
Jhengsh/tidyframe
|
aa4f8a35cb4abe4d883eb59b6ef6a920580b611f
|
[
"MIT"
] | 30
|
2019-01-28T04:07:15.000Z
|
2021-10-04T15:06:13.000Z
|
tests/test_nvl.py
|
Jhengsh/tidyframe
|
aa4f8a35cb4abe4d883eb59b6ef6a920580b611f
|
[
"MIT"
] | null | null | null |
import pandas as pd
from tidyframe import nvl
def test_nvl_series():
test_list = [0, 1, None, pd.np.NaN]
test_series = pd.Series(test_list)
nvl(test_series, 10)
def test_nvl_list():
test_list = [0, 1, None, pd.np.NaN]
nvl(test_list, 10)
def test_nvl_int():
nvl(None, 10)
def test_nvl_str():
nvl(None, 'abc')
def test_nvl_int_v2():
nvl(1, 10)
| 14.769231
| 39
| 0.643229
|
import pandas as pd
from tidyframe import nvl
def test_nvl_series():
test_list = [0, 1, None, pd.np.NaN]
test_series = pd.Series(test_list)
nvl(test_series, 10)
def test_nvl_list():
test_list = [0, 1, None, pd.np.NaN]
nvl(test_list, 10)
def test_nvl_int():
nvl(None, 10)
def test_nvl_str():
nvl(None, 'abc')
def test_nvl_int_v2():
nvl(1, 10)
| true
| true
|
7906340dc0bd97f7adefe0418f967d9dc8e9a430
| 540
|
py
|
Python
|
src/mkdv/tools/hdl/hdl_tool.py
|
fvutils/sim-mk
|
271b4374a21785ab1b22fac333e423b5febb6a81
|
[
"Apache-2.0"
] | null | null | null |
src/mkdv/tools/hdl/hdl_tool.py
|
fvutils/sim-mk
|
271b4374a21785ab1b22fac333e423b5febb6a81
|
[
"Apache-2.0"
] | null | null | null |
src/mkdv/tools/hdl/hdl_tool.py
|
fvutils/sim-mk
|
271b4374a21785ab1b22fac333e423b5febb6a81
|
[
"Apache-2.0"
] | null | null | null |
'''
Created on Nov 16, 2021
@author: mballance
'''
from mkdv.tools.hdl.hdl_tool_config import HdlToolConfig
import os
class HdlTool(object):
def config(self, cfg : HdlToolConfig):
raise NotImplementedError("config not implemented for %s" % str(type(self)))
def setup(self, cfg : HdlToolConfig):
raise NotImplementedError("setup not implemented for %s" % str(type(self)))
def run(self, cfg : HdlToolConfig):
raise NotImplementedError("setup not implemented for %s" % str(type(self)))
| 28.421053
| 84
| 0.67963
|
from mkdv.tools.hdl.hdl_tool_config import HdlToolConfig
import os
class HdlTool(object):
def config(self, cfg : HdlToolConfig):
raise NotImplementedError("config not implemented for %s" % str(type(self)))
def setup(self, cfg : HdlToolConfig):
raise NotImplementedError("setup not implemented for %s" % str(type(self)))
def run(self, cfg : HdlToolConfig):
raise NotImplementedError("setup not implemented for %s" % str(type(self)))
| true
| true
|
7906368e1d9c659eb0406aa3ca794ff9bb4ab5dc
| 1,131
|
py
|
Python
|
tests/discrete/binomial_test.py
|
TylerYep/probs
|
feb46808bd7fe324f78642b581d6117fd6fedca4
|
[
"MIT"
] | null | null | null |
tests/discrete/binomial_test.py
|
TylerYep/probs
|
feb46808bd7fe324f78642b581d6117fd6fedca4
|
[
"MIT"
] | null | null | null |
tests/discrete/binomial_test.py
|
TylerYep/probs
|
feb46808bd7fe324f78642b581d6117fd6fedca4
|
[
"MIT"
] | null | null | null |
from probs import Binomial
class TestBinomial:
@staticmethod
def test_binomial() -> None:
d = Binomial()
assert d.expectation() == 0
assert d.variance() == 0
# TODO: Python 3.7 implementation differs from 3.8+
# assert P(d == 0) == 1
# assert P(d == 1) == 0
# assert P(d == 2) == 0
# d = Binomial(n=6, p=0.7)
# assert P(d == 0) == 0.000729
# assert P(d == 1) == 0.010206
# assert P(d == 2) == 0.059535
# assert P(d == 3) == 0.18522
# assert P(d == 4) == 0.324135
# assert P(d == 5) == 0.302526
# assert P(d == 6) == 0.117649
# assert P(d == 7) == 0
@staticmethod
def test_sum() -> None:
d = Binomial() + Binomial()
assert d.expectation() == 0
assert d.variance() == 0
# TODO
assert d.pmf == {}
# assert P(d == 2) == 1 / 36
# assert P(d == 8) == 5 / 36
# assert P(d == 60) == 0
@staticmethod
def test_repr() -> None:
d = Binomial() + Binomial()
assert str(d) == "Binomial(pmf={}, n=0, p=1)"
| 25.704545
| 59
| 0.458002
|
from probs import Binomial
class TestBinomial:
@staticmethod
def test_binomial() -> None:
d = Binomial()
assert d.expectation() == 0
assert d.variance() == 0
@staticmethod
def test_sum() -> None:
d = Binomial() + Binomial()
assert d.expectation() == 0
assert d.variance() == 0
assert d.pmf == {}
@staticmethod
def test_repr() -> None:
d = Binomial() + Binomial()
assert str(d) == "Binomial(pmf={}, n=0, p=1)"
| true
| true
|
790636d7bd1cc17e99e9693ac4f2a1d8a4a0f220
| 389
|
py
|
Python
|
logkit/logkit/asgi.py
|
zhaheyan/logkit
|
6f30a82aa88a5fff3a0b4160374de4482a4f5fb4
|
[
"Apache-2.0"
] | null | null | null |
logkit/logkit/asgi.py
|
zhaheyan/logkit
|
6f30a82aa88a5fff3a0b4160374de4482a4f5fb4
|
[
"Apache-2.0"
] | null | null | null |
logkit/logkit/asgi.py
|
zhaheyan/logkit
|
6f30a82aa88a5fff3a0b4160374de4482a4f5fb4
|
[
"Apache-2.0"
] | 1
|
2020-03-21T02:16:13.000Z
|
2020-03-21T02:16:13.000Z
|
"""
ASGI config for logkit project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'logkit.settings')
application = get_asgi_application()
| 22.882353
| 78
| 0.784062
|
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'logkit.settings')
application = get_asgi_application()
| true
| true
|
790636fd3ea0fbc5a98ffb372028f6cb49d267fb
| 2,052
|
py
|
Python
|
src/example_helper.py
|
ArthurBernard/quant-reseach
|
8b6385c7e688f33b6e98d5b470af2cb0c1cafb2c
|
[
"MIT"
] | null | null | null |
src/example_helper.py
|
ArthurBernard/quant-reseach
|
8b6385c7e688f33b6e98d5b470af2cb0c1cafb2c
|
[
"MIT"
] | null | null | null |
src/example_helper.py
|
ArthurBernard/quant-reseach
|
8b6385c7e688f33b6e98d5b470af2cb0c1cafb2c
|
[
"MIT"
] | null | null | null |
import msgpack
import zlib
import numpy as np
import helper_functions as hf
import datetime_helper as dh
def strip_data_by_time(t_data, data, t_min, t_max):
data = np.array([s for s, t in zip(data, t_data) if t >= t_min and t <= t_max])
t_data = np.array([t for t in t_data if t >= t_min and t <= t_max])
return t_data, data
def load_example_data(filename_augmento_topics,
filename_augmento_data,
filename_bitmex_data,
datetime_start=None,
datetime_end=None):
# load the topics
with open(filename_augmento_topics, "rb") as f:
temp = msgpack.unpackb(zlib.decompress(f.read()), encoding='utf-8')
augmento_topics = {int(k) : v for k, v in temp.items()}
augmento_topics_inv = {v : int(k) for k, v in temp.items()}
# load the augmento data
with open(filename_augmento_data, "rb") as f:
temp = msgpack.unpackb(zlib.decompress(f.read()), encoding='utf-8')
t_aug_data = np.array([el["t_epoch"] for el in temp], dtype=np.float64)
aug_data = np.array([el["counts"] for el in temp], dtype=np.int32)
# load the price data
with open(filename_bitmex_data, "rb") as f:
temp = msgpack.unpackb(zlib.decompress(f.read()), encoding='utf-8')
t_price_data = np.array([el["t_epoch"] for el in temp], dtype=np.float64)
price_data = np.array([el["open"] for el in temp], dtype=np.float64)
# set the start and end times if they are specified
if datetime_start != None:
t_start = dh.datetime_to_epoch(datetime_start)
else:
t_start = max(np.min(t_aug_data), np.min(t_price_data))
if datetime_end != None:
t_end = dh.datetime_to_epoch(datetime_end)
else:
t_end = min(np.max(t_aug_data), np.max(t_price_data))
# strip the sentiments and prices outside the shared time range
t_aug_data, aug_data = strip_data_by_time(t_aug_data, aug_data, t_start, t_end)
t_price_data, price_data = strip_data_by_time(t_price_data, price_data, t_start, t_end)
return augmento_topics, augmento_topics_inv, t_aug_data, aug_data, t_price_data, price_data
| 39.461538
| 92
| 0.705653
|
import msgpack
import zlib
import numpy as np
import helper_functions as hf
import datetime_helper as dh
def strip_data_by_time(t_data, data, t_min, t_max):
data = np.array([s for s, t in zip(data, t_data) if t >= t_min and t <= t_max])
t_data = np.array([t for t in t_data if t >= t_min and t <= t_max])
return t_data, data
def load_example_data(filename_augmento_topics,
filename_augmento_data,
filename_bitmex_data,
datetime_start=None,
datetime_end=None):
with open(filename_augmento_topics, "rb") as f:
temp = msgpack.unpackb(zlib.decompress(f.read()), encoding='utf-8')
augmento_topics = {int(k) : v for k, v in temp.items()}
augmento_topics_inv = {v : int(k) for k, v in temp.items()}
with open(filename_augmento_data, "rb") as f:
temp = msgpack.unpackb(zlib.decompress(f.read()), encoding='utf-8')
t_aug_data = np.array([el["t_epoch"] for el in temp], dtype=np.float64)
aug_data = np.array([el["counts"] for el in temp], dtype=np.int32)
with open(filename_bitmex_data, "rb") as f:
temp = msgpack.unpackb(zlib.decompress(f.read()), encoding='utf-8')
t_price_data = np.array([el["t_epoch"] for el in temp], dtype=np.float64)
price_data = np.array([el["open"] for el in temp], dtype=np.float64)
if datetime_start != None:
t_start = dh.datetime_to_epoch(datetime_start)
else:
t_start = max(np.min(t_aug_data), np.min(t_price_data))
if datetime_end != None:
t_end = dh.datetime_to_epoch(datetime_end)
else:
t_end = min(np.max(t_aug_data), np.max(t_price_data))
t_aug_data, aug_data = strip_data_by_time(t_aug_data, aug_data, t_start, t_end)
t_price_data, price_data = strip_data_by_time(t_price_data, price_data, t_start, t_end)
return augmento_topics, augmento_topics_inv, t_aug_data, aug_data, t_price_data, price_data
| true
| true
|
79063825397602ee85dc1a634bc561da559cd420
| 937
|
py
|
Python
|
src/Classes/MSDS400/Module 7/polution.py
|
bmoretz/Python-Playground
|
a367ec7659b85c24363c21b5c0ac25db08ffa1f6
|
[
"MIT"
] | null | null | null |
src/Classes/MSDS400/Module 7/polution.py
|
bmoretz/Python-Playground
|
a367ec7659b85c24363c21b5c0ac25db08ffa1f6
|
[
"MIT"
] | null | null | null |
src/Classes/MSDS400/Module 7/polution.py
|
bmoretz/Python-Playground
|
a367ec7659b85c24363c21b5c0ac25db08ffa1f6
|
[
"MIT"
] | null | null | null |
from sympy import symbols, integrate, Rational, lambdify
import matplotlib.pyplot as plt
import numpy as np
# Pollution from a factory is entering a lake. The rate of concentration of the pollutant at time t is given by
t = symbols( 't', positive = True )
dP = 91*t ** Rational( 5, 2 )
# where t is the number of years since the factory started introducing pollutants into the lake.
# Ecologists estimate that the lake can accept a total level of pollution of 7600 units before all the fish life in the lake ends.
# Can the factory operate for 5 years without killing all the fish in the lake?
# Yes, because:
P = integrate( dP, ( t, 0, 5 ) ).evalf()
round( P )
# is less than 7600.
# What is the polution doing?
g_xlim = [ 1, 10 ]
g_ylim = [ -5, 15 ]
lam_p = lambdify( t, integrate( dP, t ), np )
x_vals = np.linspace( g_xlim[0], g_xlim[1], 1000, endpoint=True )
y_vals = lam_p( x_vals )
plt.plot( x_vals, y_vals )
plt.show()
| 33.464286
| 132
| 0.708645
|
from sympy import symbols, integrate, Rational, lambdify
import matplotlib.pyplot as plt
import numpy as np
t = symbols( 't', positive = True )
dP = 91*t ** Rational( 5, 2 )
P = integrate( dP, ( t, 0, 5 ) ).evalf()
round( P )
g_xlim = [ 1, 10 ]
g_ylim = [ -5, 15 ]
lam_p = lambdify( t, integrate( dP, t ), np )
x_vals = np.linspace( g_xlim[0], g_xlim[1], 1000, endpoint=True )
y_vals = lam_p( x_vals )
plt.plot( x_vals, y_vals )
plt.show()
| true
| true
|
79063838a9fdaec6a3b656af830209367d06537e
| 6,096
|
py
|
Python
|
activation_generator.py
|
Gareth001/tcav
|
e391e7682c34933e27bd592106c119317383ef10
|
[
"Apache-2.0"
] | null | null | null |
activation_generator.py
|
Gareth001/tcav
|
e391e7682c34933e27bd592106c119317383ef10
|
[
"Apache-2.0"
] | null | null | null |
activation_generator.py
|
Gareth001/tcav
|
e391e7682c34933e27bd592106c119317383ef10
|
[
"Apache-2.0"
] | null | null | null |
"""
Copyright 2018 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
""" Activation generator helper classes for TCAV"""
from abc import ABCMeta
from abc import abstractmethod
from multiprocessing import dummy as multiprocessing
import os.path
import numpy as np
import PIL.Image
import tensorflow as tf
class ActivationGeneratorInterface(object):
"""Interface for an activation generator for a model"""
__metaclass__ = ABCMeta
@abstractmethod
def process_and_load_activations(self, bottleneck_names, concepts):
pass
@abstractmethod
def get_model():
pass
class ActivationGeneratorBase(ActivationGeneratorInterface):
"""Basic abstract activation generator for a model"""
def __init__(self, model, acts_dir, max_examples=500):
self.model = model
self.acts_dir = acts_dir
self.max_examples = max_examples
def get_model(self):
return self.model
@abstractmethod
def get_examples_for_concept(self, concept):
pass
def get_activations_for_concept(self, concept, bottleneck):
examples = self.get_examples_for_concept(concept)
return self.get_activations_for_examples(examples, bottleneck)
def get_activations_for_examples(self, examples, bottleneck):
acts = self.model.run_examples(examples, bottleneck)
return self.model.reshape_activations(acts).squeeze()
def process_and_load_activations(self, bottleneck_names, concepts):
acts = {}
if self.acts_dir and not tf.gfile.Exists(self.acts_dir):
tf.gfile.MakeDirs(self.acts_dir)
for concept in concepts:
if concept not in acts:
acts[concept] = {}
for bottleneck_name in bottleneck_names:
acts_path = os.path.join(self.acts_dir, 'acts_{}_{}'.format(
concept, bottleneck_name)) if self.acts_dir else None
if acts_path and tf.gfile.Exists(acts_path):
with tf.gfile.Open(acts_path, 'rb') as f:
acts[concept][bottleneck_name] = np.load(f).squeeze()
tf.logging.info('Loaded {} shape {}'.format(
acts_path, acts[concept][bottleneck_name].shape))
else:
acts[concept][bottleneck_name] = self.get_activations_for_concept(
concept, bottleneck_name)
if acts_path:
tf.logging.info('{} does not exist, Making one...'.format(
acts_path))
with tf.gfile.Open(acts_path, 'w') as f:
np.save(f, acts[concept][bottleneck_name], allow_pickle=False)
return acts
class ImageActivationGenerator(ActivationGeneratorBase):
"""Activation generator for a basic image model"""
def __init__(self, model, source_dir, acts_dir, max_examples=10):
self.source_dir = source_dir
super(ImageActivationGenerator, self).__init__(
model, acts_dir, max_examples)
def get_examples_for_concept(self, concept):
concept_dir = os.path.join(self.source_dir, concept)
img_paths = [os.path.join(concept_dir, d)
for d in tf.gfile.ListDirectory(concept_dir)]
imgs = self.load_images_from_files(img_paths, self.max_examples,
shape=self.model.get_image_shape()[:2])
return imgs
def load_image_from_file(self, filename, shape):
"""Given a filename, try to open the file. If failed, return None.
Args:
filename: location of the image file
shape: the shape of the image file to be scaled
Returns:
the image if succeeds, None if fails.
Rasies:
exception if the image was not the right shape.
"""
if not tf.gfile.Exists(filename):
tf.logging.error('Cannot find file: {}'.format(filename))
return None
try:
# ensure image has no transparency channel
img = np.array(PIL.Image.open(tf.gfile.Open(filename, 'rb')).convert(
'RGB').resize(shape, PIL.Image.BILINEAR))
# Normalize pixel values to between 0 and 1.
img = np.float32(img) / 255.0
if not (len(img.shape) == 3 and img.shape[2] == 3):
return None
else:
return img
except Exception as e:
tf.logging.info(e)
return None
return img
def load_images_from_files(self, filenames, max_imgs=500,
do_shuffle=True, run_parallel=True,
shape=(299, 299),
num_workers=100):
"""Return image arrays from filenames.
Args:
filenames: locations of image files.
max_imgs: maximum number of images from filenames.
do_shuffle: before getting max_imgs files, shuffle the names or not
run_parallel: get images in parallel or not
shape: desired shape of the image
num_workers: number of workers in parallelization.
Returns:
image arrays
"""
imgs = []
# First shuffle a copy of the filenames.
filenames = filenames[:]
if do_shuffle:
np.random.shuffle(filenames)
if run_parallel:
pool = multiprocessing.Pool(num_workers)
imgs = pool.map(
lambda filename: self.load_image_from_file(filename, shape),
filenames[:max_imgs])
imgs = [img for img in imgs if img is not None]
if len(imgs) <= 1:
raise ValueError('You must have more than 1 image in each class to run TCAV.')
else:
for filename in filenames:
img = self.load_image_from_file(filename, shape)
if img is not None:
imgs.append(img)
if len(imgs) <= 1:
raise ValueError('You must have more than 1 image in each class to run TCAV.')
elif len(imgs) >= max_imgs:
break
return np.array(imgs)
| 33.494505
| 88
| 0.674869
|
from abc import ABCMeta
from abc import abstractmethod
from multiprocessing import dummy as multiprocessing
import os.path
import numpy as np
import PIL.Image
import tensorflow as tf
class ActivationGeneratorInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def process_and_load_activations(self, bottleneck_names, concepts):
pass
@abstractmethod
def get_model():
pass
class ActivationGeneratorBase(ActivationGeneratorInterface):
def __init__(self, model, acts_dir, max_examples=500):
self.model = model
self.acts_dir = acts_dir
self.max_examples = max_examples
def get_model(self):
return self.model
@abstractmethod
def get_examples_for_concept(self, concept):
pass
def get_activations_for_concept(self, concept, bottleneck):
examples = self.get_examples_for_concept(concept)
return self.get_activations_for_examples(examples, bottleneck)
def get_activations_for_examples(self, examples, bottleneck):
acts = self.model.run_examples(examples, bottleneck)
return self.model.reshape_activations(acts).squeeze()
def process_and_load_activations(self, bottleneck_names, concepts):
acts = {}
if self.acts_dir and not tf.gfile.Exists(self.acts_dir):
tf.gfile.MakeDirs(self.acts_dir)
for concept in concepts:
if concept not in acts:
acts[concept] = {}
for bottleneck_name in bottleneck_names:
acts_path = os.path.join(self.acts_dir, 'acts_{}_{}'.format(
concept, bottleneck_name)) if self.acts_dir else None
if acts_path and tf.gfile.Exists(acts_path):
with tf.gfile.Open(acts_path, 'rb') as f:
acts[concept][bottleneck_name] = np.load(f).squeeze()
tf.logging.info('Loaded {} shape {}'.format(
acts_path, acts[concept][bottleneck_name].shape))
else:
acts[concept][bottleneck_name] = self.get_activations_for_concept(
concept, bottleneck_name)
if acts_path:
tf.logging.info('{} does not exist, Making one...'.format(
acts_path))
with tf.gfile.Open(acts_path, 'w') as f:
np.save(f, acts[concept][bottleneck_name], allow_pickle=False)
return acts
class ImageActivationGenerator(ActivationGeneratorBase):
def __init__(self, model, source_dir, acts_dir, max_examples=10):
self.source_dir = source_dir
super(ImageActivationGenerator, self).__init__(
model, acts_dir, max_examples)
def get_examples_for_concept(self, concept):
concept_dir = os.path.join(self.source_dir, concept)
img_paths = [os.path.join(concept_dir, d)
for d in tf.gfile.ListDirectory(concept_dir)]
imgs = self.load_images_from_files(img_paths, self.max_examples,
shape=self.model.get_image_shape()[:2])
return imgs
def load_image_from_file(self, filename, shape):
if not tf.gfile.Exists(filename):
tf.logging.error('Cannot find file: {}'.format(filename))
return None
try:
img = np.array(PIL.Image.open(tf.gfile.Open(filename, 'rb')).convert(
'RGB').resize(shape, PIL.Image.BILINEAR))
img = np.float32(img) / 255.0
if not (len(img.shape) == 3 and img.shape[2] == 3):
return None
else:
return img
except Exception as e:
tf.logging.info(e)
return None
return img
def load_images_from_files(self, filenames, max_imgs=500,
do_shuffle=True, run_parallel=True,
shape=(299, 299),
num_workers=100):
imgs = []
filenames = filenames[:]
if do_shuffle:
np.random.shuffle(filenames)
if run_parallel:
pool = multiprocessing.Pool(num_workers)
imgs = pool.map(
lambda filename: self.load_image_from_file(filename, shape),
filenames[:max_imgs])
imgs = [img for img in imgs if img is not None]
if len(imgs) <= 1:
raise ValueError('You must have more than 1 image in each class to run TCAV.')
else:
for filename in filenames:
img = self.load_image_from_file(filename, shape)
if img is not None:
imgs.append(img)
if len(imgs) <= 1:
raise ValueError('You must have more than 1 image in each class to run TCAV.')
elif len(imgs) >= max_imgs:
break
return np.array(imgs)
| true
| true
|
790639972680792d281d3fa1431e804ec104d35f
| 1,158
|
py
|
Python
|
posts/migrations/0001_initial.py
|
AmrMKayid/django-blog
|
09bab49da81800b1605769dd756dcbe2e691ff1e
|
[
"MIT"
] | null | null | null |
posts/migrations/0001_initial.py
|
AmrMKayid/django-blog
|
09bab49da81800b1605769dd756dcbe2e691ff1e
|
[
"MIT"
] | null | null | null |
posts/migrations/0001_initial.py
|
AmrMKayid/django-blog
|
09bab49da81800b1605769dd756dcbe2e691ff1e
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.6 on 2018-07-05 16:13
from django.db import migrations, models
import posts.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=120)),
('slug', models.SlugField(allow_unicode=True, unique=True)),
('image', models.ImageField(blank=True, height_field='height_field', null=True, upload_to=posts.models.upload_location, width_field='width_field')),
('height_field', models.IntegerField(default=0)),
('width_field', models.IntegerField(default=0)),
('content', models.TextField()),
('updated', models.DateTimeField(auto_now=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
],
options={
'ordering': ['-timestamp', '-updated'],
},
),
]
| 35.090909
| 164
| 0.578584
|
from django.db import migrations, models
import posts.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=120)),
('slug', models.SlugField(allow_unicode=True, unique=True)),
('image', models.ImageField(blank=True, height_field='height_field', null=True, upload_to=posts.models.upload_location, width_field='width_field')),
('height_field', models.IntegerField(default=0)),
('width_field', models.IntegerField(default=0)),
('content', models.TextField()),
('updated', models.DateTimeField(auto_now=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
],
options={
'ordering': ['-timestamp', '-updated'],
},
),
]
| true
| true
|
7906399aebd737d245c8c29de5b7cae402ac6e4d
| 377
|
py
|
Python
|
mlbench_core/controlflow/pytorch/__init__.py
|
c4dt/mlbench-core
|
8a5cf6e00ff4535b2aea23b213241858a5ee5f00
|
[
"Apache-2.0"
] | null | null | null |
mlbench_core/controlflow/pytorch/__init__.py
|
c4dt/mlbench-core
|
8a5cf6e00ff4535b2aea23b213241858a5ee5f00
|
[
"Apache-2.0"
] | null | null | null |
mlbench_core/controlflow/pytorch/__init__.py
|
c4dt/mlbench-core
|
8a5cf6e00ff4535b2aea23b213241858a5ee5f00
|
[
"Apache-2.0"
] | null | null | null |
from .checkpoints_evaluation import CheckpointsEvaluationControlFlow
from .controlflow import (
record_train_batch_stats,
record_validation_stats,
validation_round,
)
from .helpers import prepare_batch
__all__ = [
"CheckpointsEvaluationControlFlow",
"record_validation_stats",
"record_train_batch_stats",
"validation_round",
"prepare_batch",
]
| 23.5625
| 68
| 0.777188
|
from .checkpoints_evaluation import CheckpointsEvaluationControlFlow
from .controlflow import (
record_train_batch_stats,
record_validation_stats,
validation_round,
)
from .helpers import prepare_batch
__all__ = [
"CheckpointsEvaluationControlFlow",
"record_validation_stats",
"record_train_batch_stats",
"validation_round",
"prepare_batch",
]
| true
| true
|
79063a6a2c9a0a173bbd60d73d5c82033ab1c010
| 6,067
|
py
|
Python
|
xfaster/spec_tools.py
|
SPIDER-CMB/xfaster
|
1b8e56d775f2c3a8693d1372ae461392c21da7ca
|
[
"MIT"
] | 1
|
2021-03-25T14:15:44.000Z
|
2021-03-25T14:15:44.000Z
|
xfaster/spec_tools.py
|
annegambrel/xfaster
|
03d5a2971d3cc19ae360d78995e3575f3f678d6e
|
[
"MIT"
] | 7
|
2021-04-20T23:34:38.000Z
|
2021-08-24T00:00:53.000Z
|
xfaster/spec_tools.py
|
SPIDER-CMB/xfaster
|
1b8e56d775f2c3a8693d1372ae461392c21da7ca
|
[
"MIT"
] | 1
|
2021-05-18T16:43:54.000Z
|
2021-05-18T16:43:54.000Z
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import numpy as np
__all__ = [
"wigner3j",
"get_camb_cl",
"scale_dust",
]
def blackbody(nu, ref_freq=353.0):
"""
The ratio of the blackbody function for dust at frequency nu
over the value for reference frequency ref_freq
Arguments
---------
nu : float
Frequency in GHz.
ref_freq : float
Reference frequency in GHz.
Returns
-------
blackbody_ratio : float
B(nu, T_dust) / B(nu_ref, T_dust)
"""
k = 1.38064852e-23 # Boltzmann constant
h = 6.626070040e-34 # Planck constant
T = 19.6
nu_ref = ref_freq * 1.0e9
nu *= 1.0e9 # GHz -> Hz
x = h * nu / k / T
x_ref = h * nu_ref / k / T
return x ** 3 / x_ref ** 3 * (np.exp(x_ref) - 1) / (np.exp(x) - 1)
def rj2cmb(nu_in):
"""
Conversion from Rayleigh-Jeans units to CMB temperature units
Arguments
---------
nu_in : float
Frequency in GHz.
Returns
-------
cal_fac : float
Number by which to multiply a RJ temperature to get a CMB temp
"""
k = 1.38064852e-23 # Boltzmann constant
h = 6.626070040e-34 # Planck constant
T = 2.72548 # Cmb BB temp in K
nu = nu_in * 1.0e9 # GHz -> Hz
x = h * nu / k / T
return (np.exp(x) - 1.0) ** 2 / (x ** 2 * np.exp(x))
def scale_dust(freq0, freq1, ref_freq, beta, delta_beta=None, deriv=False):
"""
Get the factor by which you must multiply the cross spectrum from maps of
frequencies freq0 and freq1 to match the dust power at ref_freq given
spectra index beta.
If deriv is True, return the frequency scaling at the reference beta,
and the first derivative w.r.t. beta.
Otherwise if delta_beta is given, return the scale factor adjusted
for a linearized offset delta_beta from the reference beta.
Arguments
---------
freq0 : float
Frequency of map0 in GHz.
freq1 : float
Frequency of map1 in GHz.
ref_freq : float
Reference frequency from which to compute relative scaling in GHz.
beta : float
Dust spectral index.
delta_beta : float
Difference from beta-- scaling computed as a first order Taylor
expansion from original beta-scaling.
deriv : bool
If true, return the frequency scaling at the reference beta, along with
the first derivative w.r.t. beta at the reference beta.
Returns
-------
freq_scale : float
The relative scaling factor for the dust cross spectrum-- multiply by
this number to get the dust spectrum at the reference frequency
-- or --
freq_scale, deriv : floats
The relative scaling factor and its derivative
"""
freq_scale = (
rj2cmb(freq0)
* rj2cmb(freq1)
/ rj2cmb(ref_freq) ** 2.0
* blackbody(freq0, ref_freq=ref_freq)
* blackbody(freq1, ref_freq=ref_freq)
* (freq0 * freq1 / ref_freq ** 2) ** (beta - 2.0)
)
if deriv or delta_beta is not None:
delta = np.log(freq0 * freq1 / ref_freq ** 2)
if deriv:
return (freq_scale, freq_scale * delta)
return freq_scale * (1 + delta * delta_beta)
return freq_scale
def wigner3j(l2, m2, l3, m3):
r"""
Wigner 3j symbols computed for all valid values of ``L``, as in:
.. math::
\begin{pmatrix}
\ell_2 & \ell_3 & L \\
m_2 & m_3 & 0 \\
\end{pmatrix}
Arguments
---------
l2, m2, l3, m3 : int
The ell and m values for which to compute the symbols.
Returns
-------
fj : array_like
Array of size ``l2 + l3 + 2``, indexed by ``L``
lmin : int
The minimum value of ``L`` for which ``fj`` is non-zero.
lmax : int
The maximum value of ``L`` for which ``fj`` is non-zero.
"""
import camb
try:
from camb.mathutils import threej
except ImportError:
from camb.bispectrum import threej
arr = threej(l2, l3, m2, m3)
lmin = np.max([np.abs(l2 - l3), np.abs(m2 + m3)])
lmax = l2 + l3
fj = np.zeros(lmax + 2, dtype=arr.dtype)
fj[lmin : lmax + 1] = arr
return fj, lmin, lmax
def get_camb_cl(r, lmax, nt=None, spec="total", lfac=True):
"""
Compute camb spectrum with tensors and lensing.
Parameter values are from arXiv:1807.06209 Table 1 Plik best fit
Arguments
---------
r : float
Tensor-to-scalar ratio
lmax : int
Maximum ell for which to compute spectra
nt : scalar, optional
Tensor spectral index. If not supplied, assumes
slow-roll consistency relation.
spec : string, optional
Spectrum component to return. Can be 'total', 'unlensed_total',
'unlensed_scalar', 'lensed_scalar', 'tensor', 'lens_potential'.
lfac: bool, optional
If True, multiply Cls by ell*(ell+1)/2/pi
Returns
-------
cls : array_like
Array of spectra of shape (lmax + 1, nspec).
Diagonal ordering (TT, EE, BB, TE).
"""
# Set up a new set of parameters for CAMB
import camb
pars = camb.CAMBparams()
# This function sets up CosmoMC-like settings, with one massive neutrino and
# helium set using BBN consistency
pars.set_cosmology(
H0=67.32,
ombh2=0.022383,
omch2=0.12011,
mnu=0.06,
omk=0,
tau=0.0543,
)
ln1010As = 3.0448
pars.InitPower.set_params(As=np.exp(ln1010As) / 1.0e10, ns=0.96605, r=r, nt=nt)
if lmax < 2500:
# This results in unacceptable bias. Use higher lmax, then cut it down
lmax0 = 2500
else:
lmax0 = lmax
pars.set_for_lmax(lmax0, lens_potential_accuracy=2)
pars.WantTensors = True
pars.do_lensing = True
# calculate results for these parameters
results = camb.get_results(pars)
powers = results.get_cmb_power_spectra(pars, CMB_unit="muK", raw_cl=not lfac)
totCL = powers[spec][: lmax + 1, :4].T
return totCL
| 27.577273
| 83
| 0.60656
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import numpy as np
__all__ = [
"wigner3j",
"get_camb_cl",
"scale_dust",
]
def blackbody(nu, ref_freq=353.0):
k = 1.38064852e-23
h = 6.626070040e-34
T = 19.6
nu_ref = ref_freq * 1.0e9
nu *= 1.0e9
x = h * nu / k / T
x_ref = h * nu_ref / k / T
return x ** 3 / x_ref ** 3 * (np.exp(x_ref) - 1) / (np.exp(x) - 1)
def rj2cmb(nu_in):
k = 1.38064852e-23
h = 6.626070040e-34
T = 2.72548
nu = nu_in * 1.0e9
x = h * nu / k / T
return (np.exp(x) - 1.0) ** 2 / (x ** 2 * np.exp(x))
def scale_dust(freq0, freq1, ref_freq, beta, delta_beta=None, deriv=False):
freq_scale = (
rj2cmb(freq0)
* rj2cmb(freq1)
/ rj2cmb(ref_freq) ** 2.0
* blackbody(freq0, ref_freq=ref_freq)
* blackbody(freq1, ref_freq=ref_freq)
* (freq0 * freq1 / ref_freq ** 2) ** (beta - 2.0)
)
if deriv or delta_beta is not None:
delta = np.log(freq0 * freq1 / ref_freq ** 2)
if deriv:
return (freq_scale, freq_scale * delta)
return freq_scale * (1 + delta * delta_beta)
return freq_scale
def wigner3j(l2, m2, l3, m3):
import camb
try:
from camb.mathutils import threej
except ImportError:
from camb.bispectrum import threej
arr = threej(l2, l3, m2, m3)
lmin = np.max([np.abs(l2 - l3), np.abs(m2 + m3)])
lmax = l2 + l3
fj = np.zeros(lmax + 2, dtype=arr.dtype)
fj[lmin : lmax + 1] = arr
return fj, lmin, lmax
def get_camb_cl(r, lmax, nt=None, spec="total", lfac=True):
import camb
pars = camb.CAMBparams()
pars.set_cosmology(
H0=67.32,
ombh2=0.022383,
omch2=0.12011,
mnu=0.06,
omk=0,
tau=0.0543,
)
ln1010As = 3.0448
pars.InitPower.set_params(As=np.exp(ln1010As) / 1.0e10, ns=0.96605, r=r, nt=nt)
if lmax < 2500:
lmax0 = 2500
else:
lmax0 = lmax
pars.set_for_lmax(lmax0, lens_potential_accuracy=2)
pars.WantTensors = True
pars.do_lensing = True
results = camb.get_results(pars)
powers = results.get_cmb_power_spectra(pars, CMB_unit="muK", raw_cl=not lfac)
totCL = powers[spec][: lmax + 1, :4].T
return totCL
| true
| true
|
79063bc3575f6f0e9e00ae7a9cc592d7d8155cf9
| 1,364
|
py
|
Python
|
Controller-Implementation/libs/core/SwitchConnection.py
|
qcz994/p4-bier
|
38d28a58d40a16d9c3aaabd4a37a61404705d0dd
|
[
"Apache-2.0"
] | 11
|
2019-04-16T11:23:37.000Z
|
2021-05-28T14:56:25.000Z
|
Controller-Implementation/libs/core/SwitchConnection.py
|
qcz994/p4-bier
|
38d28a58d40a16d9c3aaabd4a37a61404705d0dd
|
[
"Apache-2.0"
] | null | null | null |
Controller-Implementation/libs/core/SwitchConnection.py
|
qcz994/p4-bier
|
38d28a58d40a16d9c3aaabd4a37a61404705d0dd
|
[
"Apache-2.0"
] | 2
|
2020-10-28T03:23:49.000Z
|
2021-09-04T06:47:42.000Z
|
import grpc
import threading
import proto.connection_pb2_grpc
from libs.core.Log import Log
from libs.core.Switch import Switch
from libs.core.Event import Event
from libs.Configuration import Configuration
class SwitchConnection:
def __init__(self, grpc_address=None):
self.channel = grpc.insecure_channel(grpc_address)
self.stub = proto.connection_pb2_grpc.LocalServerStub(self.channel)
response = self.stub.Hello(proto.connection_pb2.HelloMessage(ip="127.0.0.1", port=int(Configuration.get('listen_port'))))
self.name = response.name.encode('utf-8')
Event.trigger('new_switch_connection',
name=self.name, device=Switch(name=self.name, ip=response.ip.encode('utf-8'), mac=response.mac.encode('utf-8'), bfr_id=response.bfr_id))
def addTableEntry(self, tableEntry=None):
"""
Add a table entry to the switch
"""
response = self.stub.AddEntry(tableEntry)
if response.code == 0:
Log.error("Error for entry:", tableEntry, "on switch", self.name)
def removeTableEntry(self, tableEntry=None):
"""
Remove a table entry from the switch
"""
response = self.stub.RemoveEntry(tableEntry)
if response.code == 0:
Log.error("Error while removing entry:", tableEntry, "on switch", self.name)
| 34.974359
| 158
| 0.674487
|
import grpc
import threading
import proto.connection_pb2_grpc
from libs.core.Log import Log
from libs.core.Switch import Switch
from libs.core.Event import Event
from libs.Configuration import Configuration
class SwitchConnection:
def __init__(self, grpc_address=None):
self.channel = grpc.insecure_channel(grpc_address)
self.stub = proto.connection_pb2_grpc.LocalServerStub(self.channel)
response = self.stub.Hello(proto.connection_pb2.HelloMessage(ip="127.0.0.1", port=int(Configuration.get('listen_port'))))
self.name = response.name.encode('utf-8')
Event.trigger('new_switch_connection',
name=self.name, device=Switch(name=self.name, ip=response.ip.encode('utf-8'), mac=response.mac.encode('utf-8'), bfr_id=response.bfr_id))
def addTableEntry(self, tableEntry=None):
response = self.stub.AddEntry(tableEntry)
if response.code == 0:
Log.error("Error for entry:", tableEntry, "on switch", self.name)
def removeTableEntry(self, tableEntry=None):
response = self.stub.RemoveEntry(tableEntry)
if response.code == 0:
Log.error("Error while removing entry:", tableEntry, "on switch", self.name)
| true
| true
|
79063c15265324d68540db88b7769a66ef94a146
| 486
|
py
|
Python
|
pysimplegui/values_from_some_elements.py
|
CrazyJ36/python
|
4cff6e7240672a273d978521bb511065f45d4312
|
[
"MIT"
] | null | null | null |
pysimplegui/values_from_some_elements.py
|
CrazyJ36/python
|
4cff6e7240672a273d978521bb511065f45d4312
|
[
"MIT"
] | null | null | null |
pysimplegui/values_from_some_elements.py
|
CrazyJ36/python
|
4cff6e7240672a273d978521bb511065f45d4312
|
[
"MIT"
] | null | null | null |
import PySimpleGUI as sg
layout = [
[sg.Text('text')],
[sg.Input('input', key= 'input1')],
[sg.Input('input', key='input2')],
[sg.Button('button', key='button1')]
]
window = sg.Window('list values - list or dict', layout)
while True:
event, values = window.Read()
if event == 'button1':
print(values['input1'])
print(values['input2'])
# prints button key because that's current events' key
print(event)
elif event is None:
break
window.Close()
| 18
| 58
| 0.627572
|
import PySimpleGUI as sg
layout = [
[sg.Text('text')],
[sg.Input('input', key= 'input1')],
[sg.Input('input', key='input2')],
[sg.Button('button', key='button1')]
]
window = sg.Window('list values - list or dict', layout)
while True:
event, values = window.Read()
if event == 'button1':
print(values['input1'])
print(values['input2'])
print(event)
elif event is None:
break
window.Close()
| true
| true
|
79063cd16685cd58176874482bf9832e97c1beeb
| 198
|
py
|
Python
|
topCoder/srms/500s/srm509/div2/palindromizeation_div_2.py
|
gauravsingh58/algo
|
397859a53429e7a585e5f6964ad24146c6261326
|
[
"WTFPL"
] | 1
|
2020-09-30T19:53:08.000Z
|
2020-09-30T19:53:08.000Z
|
topCoder/srms/500s/srm509/div2/palindromizeation_div_2.py
|
gauravsingh58/algo
|
397859a53429e7a585e5f6964ad24146c6261326
|
[
"WTFPL"
] | null | null | null |
topCoder/srms/500s/srm509/div2/palindromizeation_div_2.py
|
gauravsingh58/algo
|
397859a53429e7a585e5f6964ad24146c6261326
|
[
"WTFPL"
] | 1
|
2020-10-15T09:10:57.000Z
|
2020-10-15T09:10:57.000Z
|
class PalindromizationDiv2:
def getMinimumCost(self, X):
def p(n):
s = str(n)
return s == s[::-1]
return next(i for i in xrange(X+1) if p(X+i) or p(X-i))
| 28.285714
| 63
| 0.510101
|
class PalindromizationDiv2:
def getMinimumCost(self, X):
def p(n):
s = str(n)
return s == s[::-1]
return next(i for i in xrange(X+1) if p(X+i) or p(X-i))
| true
| true
|
79063d3c1c520a30d84ac76b8b18f780c167d3ec
| 541
|
py
|
Python
|
ansible/tests/TestIncompleteConditional.py
|
SODALITE-EU/defect-prediction
|
0d53f04a4572b3555bb87a223dbfedf06ec5bd09
|
[
"Apache-2.0"
] | 2
|
2020-11-27T07:18:42.000Z
|
2021-03-25T14:12:25.000Z
|
ansible/tests/TestIncompleteConditional.py
|
SODALITE-EU/defect-prediction
|
0d53f04a4572b3555bb87a223dbfedf06ec5bd09
|
[
"Apache-2.0"
] | 3
|
2020-11-23T16:37:29.000Z
|
2021-09-12T20:56:15.000Z
|
ansible/tests/TestIncompleteConditional.py
|
SODALITE-EU/defect-prediction
|
0d53f04a4572b3555bb87a223dbfedf06ec5bd09
|
[
"Apache-2.0"
] | 1
|
2020-02-09T11:31:56.000Z
|
2020-02-09T11:31:56.000Z
|
import unittest
from ansiblelint.rules import RulesCollection
from ansiblelint.runner import Runner
from ansiblelints.stage.IncompleteConditional import IncompleteConditional
class TestIncompleteConditional(unittest.TestCase):
collection = RulesCollection()
def setUp(self):
self.collection.register(IncompleteConditional())
def test_file(self):
file_name = 'testResources/ansible-smell/hardcodepassword5.yml'
good_runner = Runner(file_name, rules=self.collection)
print(good_runner.run())
| 27.05
| 74
| 0.770795
|
import unittest
from ansiblelint.rules import RulesCollection
from ansiblelint.runner import Runner
from ansiblelints.stage.IncompleteConditional import IncompleteConditional
class TestIncompleteConditional(unittest.TestCase):
collection = RulesCollection()
def setUp(self):
self.collection.register(IncompleteConditional())
def test_file(self):
file_name = 'testResources/ansible-smell/hardcodepassword5.yml'
good_runner = Runner(file_name, rules=self.collection)
print(good_runner.run())
| true
| true
|
79063d731fe4afc084efefc90254d9062d43347c
| 11,703
|
py
|
Python
|
src/secondaires/navigation/equipage/objectifs/rejoindre.py
|
stormi/tsunami
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
[
"BSD-3-Clause"
] | null | null | null |
src/secondaires/navigation/equipage/objectifs/rejoindre.py
|
stormi/tsunami
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
[
"BSD-3-Clause"
] | null | null | null |
src/secondaires/navigation/equipage/objectifs/rejoindre.py
|
stormi/tsunami
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
[
"BSD-3-Clause"
] | null | null | null |
# -*-coding:Utf-8 -*
# Copyright (c) 2014 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Objectif rejoindre."""
from math import fabs, radians, sqrt
from vector import Vector
from primaires.vehicule.vecteur import Vecteur
from secondaires.navigation.constantes import *
from secondaires.navigation.equipage.objectif import Objectif
class Rejoindre(Objectif):
"""Objectif rejoindre.
Cet objectif demande à un équipage de rejoindre un point précisé
en coordonnées. Le point indiqué doit être statique (il existe un
objectif particulier pour les points mobiles, comme les navires, qui
intègrent leur propre calcul).
Cet objectif est responsable de trouver un chemin entre le point
actuel et le point visé. Cela inclut le choix de chemins
détournés si le chemin direct ne peut être pris avec des calculs qui
peuvent être assez complexes pour déterminer la vitesse et direction
des chemins intermédiaires.
"""
def __init__(self, equipage, x=None, y=None, vitesse=1):
Objectif.__init__(self, equipage, x, y, vitesse)
self.x = x
self.y = y
self.vitesse = vitesse
self.ancienne_vitesse = None
self.vitesse_optimale = vitesse
self.autre_direction = None
self.autoriser_vitesse_sup = True
self.doit_reculer = ()
def afficher(self):
"""Méthode à redéfinir retournant l'affichage de l'objectif."""
if self.doit_reculer:
return "Doit reculer"
navire = self.navire
distance = self.get_distance()
direction = (distance.direction + 90) % 360
msg_dist = get_nom_distance(distance)
return "Cap sur {}° ({}), à {}".format(round(direction),
distance.nom_direction, msg_dist)
def get_distance(self):
"""Retourne la distance (Vecteur) entre le navire et la destination.
Cette méthode crée un vecteur (class Vecteur définie dans
le module primaire vehicule) qui représente la distance entre
la position du navire et la destination.
"""
navire = self.navire
position = navire.opt_position
o_x = position.x
o_y = position.y
d_x = self.x
d_y = self.y
distance = Vecteur(d_x - o_x, d_y - o_y, 0)
return distance
def trouver_distance_min(self, cible):
"""Trouve la distance minimum.
Cette distance est fonction de la distance minimum entre
une salle du navire d'origine et une salle du navire cible.
"""
navire = self.navire
etendue = navire.etendue
altitude = etendue.altitude
salle_cible = None
distance = None
for salle in navire.salles.values():
if salle.coords.z != altitude:
continue
x, y = salle.coords.x, salle.coords.y
for t_salle in cible.salles.values():
if t_salle.coords.z != altitude:
continue
t_x, t_y = t_salle.coords.x, t_salle.coords.y
t_distance = sqrt((t_x - x) ** 2 + (t_y - y) ** 2)
if distance is None or t_distance < distance:
distance = t_distance
salle_cible = t_salle
return distance, salle_cible
def transmettre_controles(self):
"""Donne les contrôles indiqués (vitesse et direction)."""
equipage = self.equipage
navire = self.navire
distance = self.get_distance()
if self.autre_direction:
direction = round(self.autre_direction)
else:
direction = round(distance.direction)
# Crée ou modifie les contrôles
if equipage.controles.get("direction"):
equipage.controles["direction"].direction = direction
else:
equipage.controler("direction", direction)
vitesse = self.vitesse
if equipage.controles.get("vitesse"):
ancienne_vitesse = equipage.controles["vitesse"].vitesse
equipage.controles["vitesse"].vitesse = vitesse
if vitesse != ancienne_vitesse:
equipage.controles["vitesse"].calculer_vitesse()
else:
equipage.controler("vitesse", self.vitesse,
self.autoriser_vitesse_sup)
def trouver_cap(self):
"""Trouve le cap, tenant compte des obstacles."""
equipage = self.equipage
navire = self.navire
# Si on doit reculer, vérifie que c'est toujours vrai
if self.doit_reculer:
x, y = self.doit_reculer
p_x = navire.position.x
p_y = navire.position.y
max_distance = navire.get_max_distance_au_centre()
if sqrt((x - p_x) ** 2 + (y - p_y) ** 2) > max_distance + 1:
self.doit_reculer = ()
else:
return
# On examine les points listés par la vigie
# Si il n'y a pas de vigie, pas le moyen de les éviter
tries = equipage.vigie_tries
# Si le dictionnaire est vide, ne fait rien
if not tries:
self.autre_direction = None
self.transmettre_controles()
return
# On n'examine que les obstacles
obstacles = tries.get("obstacle", {}).copy()
obstacles.update(tries.get("salle", {}))
obstacles.update(tries.get("sallenavire", {}))
# On s'intéresse seulement aux obstacles qui ont un angle
# dangereux, entre -90° et 90°
dangereux = obstacles.copy()
for angle in obstacles.keys():
if angle < -90 or angle > 90:
del dangereux[angle]
# Si il n'y a aucun obstacle, ne continue pas
if not dangereux:
self.ancienne_vitesse = None
self.autre_direction = None
self.transmettre_controles()
return
# Maintenant on cherche la distance la plus courte
min_angle = None
min_distance = None
for angle, (vecteur, point) in dangereux.items():
if min_distance is None or vecteur.mag < min_distance:
min_distance = vecteur.mag
min_angle = angle
# En fonction de la distance, modifie la vitesse
if -45 <= min_angle <= 45:
if min_distance <= 2:
self.vitesse = 0.05
elif min_distance < 10:
self.vitesse = 0.2
elif min_distance < 25:
self.vitesse = 0.6
# Cherche ensuite le meilleur cap
# On cherche le meilleur cap possible (c'est-à-dire le plus long)
distance = 30
angles = [i * 5 for i in range(0, 35)]
for i in range(1, 35):
angles.append(i * -5)
# Si on est pas exactement dans la bonne direction pour rejoindre
# le point (x, y), on envisage de changer de cap
o_distance = self.get_distance()
if o_distance.norme < 30:
distance = o_distance.norme
relative = o_distance.direction - navire.direction.direction
angles = sorted(angles, key=lambda a: fabs(a - relative))
position = navire.opt_position
while distance > 0:
for angle in angles:
vecteur = navire.opt_direction
vecteur.mag = distance
vecteur.around_z(radians(angle))
if not navire.controller_collision(vecteur, collision=False,
marge=0.8):
if angle != 0:
self.info("Cap libre sur {}°".format(angle))
self.autre_direction = round((
navire.direction.direction + angle) % 360)
if fabs(angle) > 30:
self.vitesse = 0
self.transmettre_controles()
return
distance -= 5
# On ne change pas de cap mais peut-être change-t-on de vitesse
self.transmettre_controles()
def creer(self):
"""L'objectif est créé.
On crée les contrôles associéss pour atteindre l'objectif
visé, à savoir, rejoindre le point (x, y), en essayant
de trouver les obstacles corresondant et un cap de remplacement
si nécessaire.
"""
equipage = self.equipage
commandant = self.commandant
if commandant is None:
return
self.trouver_cap()
def verifier(self, prioritaire):
"""Vérifie que l'objectif est toujours valide.
Dans cette méthode, on vérifie :
Qu'il n'y a aucun obstacle sur la trajectoire assignée
"""
equipage = self.equipage
navire = self.navire
commandant = self.commandant
if commandant is None:
return
if prioritaire:
self.trouver_cap()
def reagir_collision(self, salle, contre):
"""Réagit à une collision."""
if not self.doit_reculer:
commandant = self.commandant
if commandant is None:
return
personnage = commandant.personnage
navire = self.navire
equipage = self.equipage
p_x = navire.position.x
p_y = navire.position.y
self.warning("Essaye de faire reculer le navire")
self.doit_reculer = (p_x, p_y)
# Supprime le contrôle de cap, si il existe
equipage.retirer_controle("direction")
if navire.gouvernail:
equipage.demander("relacher_gouvernail",
personnage=personnage)
# Demande de plier les voiles si besoin
if any(v.hissee for v in navire.voiles):
equipage.demander("plier_voiles", None, personnage=personnage)
# Demande de ramer en marche arrière
rames = navire.rames
if rames:
# On doit centrer les rames si besoin
if any(r.orientation != 0 for r in rames):
equipage.demander("ramer", "centre",
personnage=personnage)
equipage.demander("ramer", "arrière", personnage=personnage)
| 36.12037
| 79
| 0.608989
|
from math import fabs, radians, sqrt
from vector import Vector
from primaires.vehicule.vecteur import Vecteur
from secondaires.navigation.constantes import *
from secondaires.navigation.equipage.objectif import Objectif
class Rejoindre(Objectif):
def __init__(self, equipage, x=None, y=None, vitesse=1):
Objectif.__init__(self, equipage, x, y, vitesse)
self.x = x
self.y = y
self.vitesse = vitesse
self.ancienne_vitesse = None
self.vitesse_optimale = vitesse
self.autre_direction = None
self.autoriser_vitesse_sup = True
self.doit_reculer = ()
def afficher(self):
if self.doit_reculer:
return "Doit reculer"
navire = self.navire
distance = self.get_distance()
direction = (distance.direction + 90) % 360
msg_dist = get_nom_distance(distance)
return "Cap sur {}° ({}), à {}".format(round(direction),
distance.nom_direction, msg_dist)
def get_distance(self):
navire = self.navire
position = navire.opt_position
o_x = position.x
o_y = position.y
d_x = self.x
d_y = self.y
distance = Vecteur(d_x - o_x, d_y - o_y, 0)
return distance
def trouver_distance_min(self, cible):
navire = self.navire
etendue = navire.etendue
altitude = etendue.altitude
salle_cible = None
distance = None
for salle in navire.salles.values():
if salle.coords.z != altitude:
continue
x, y = salle.coords.x, salle.coords.y
for t_salle in cible.salles.values():
if t_salle.coords.z != altitude:
continue
t_x, t_y = t_salle.coords.x, t_salle.coords.y
t_distance = sqrt((t_x - x) ** 2 + (t_y - y) ** 2)
if distance is None or t_distance < distance:
distance = t_distance
salle_cible = t_salle
return distance, salle_cible
def transmettre_controles(self):
equipage = self.equipage
navire = self.navire
distance = self.get_distance()
if self.autre_direction:
direction = round(self.autre_direction)
else:
direction = round(distance.direction)
if equipage.controles.get("direction"):
equipage.controles["direction"].direction = direction
else:
equipage.controler("direction", direction)
vitesse = self.vitesse
if equipage.controles.get("vitesse"):
ancienne_vitesse = equipage.controles["vitesse"].vitesse
equipage.controles["vitesse"].vitesse = vitesse
if vitesse != ancienne_vitesse:
equipage.controles["vitesse"].calculer_vitesse()
else:
equipage.controler("vitesse", self.vitesse,
self.autoriser_vitesse_sup)
def trouver_cap(self):
equipage = self.equipage
navire = self.navire
if self.doit_reculer:
x, y = self.doit_reculer
p_x = navire.position.x
p_y = navire.position.y
max_distance = navire.get_max_distance_au_centre()
if sqrt((x - p_x) ** 2 + (y - p_y) ** 2) > max_distance + 1:
self.doit_reculer = ()
else:
return
# On examine les points listés par la vigie
# Si il n'y a pas de vigie, pas le moyen de les éviter
tries = equipage.vigie_tries
if not tries:
self.autre_direction = None
self.transmettre_controles()
return
obstacles = tries.get("obstacle", {}).copy()
obstacles.update(tries.get("salle", {}))
obstacles.update(tries.get("sallenavire", {}))
# On s'intéresse seulement aux obstacles qui ont un angle
dangereux = obstacles.copy()
for angle in obstacles.keys():
if angle < -90 or angle > 90:
del dangereux[angle]
if not dangereux:
self.ancienne_vitesse = None
self.autre_direction = None
self.transmettre_controles()
return
# Maintenant on cherche la distance la plus courte
min_angle = None
min_distance = None
for angle, (vecteur, point) in dangereux.items():
if min_distance is None or vecteur.mag < min_distance:
min_distance = vecteur.mag
min_angle = angle
# En fonction de la distance, modifie la vitesse
if -45 <= min_angle <= 45:
if min_distance <= 2:
self.vitesse = 0.05
elif min_distance < 10:
self.vitesse = 0.2
elif min_distance < 25:
self.vitesse = 0.6
# Cherche ensuite le meilleur cap
# On cherche le meilleur cap possible (c'est-à-dire le plus long)
distance = 30
angles = [i * 5 for i in range(0, 35)]
for i in range(1, 35):
angles.append(i * -5)
o_distance = self.get_distance()
if o_distance.norme < 30:
distance = o_distance.norme
relative = o_distance.direction - navire.direction.direction
angles = sorted(angles, key=lambda a: fabs(a - relative))
position = navire.opt_position
while distance > 0:
for angle in angles:
vecteur = navire.opt_direction
vecteur.mag = distance
vecteur.around_z(radians(angle))
if not navire.controller_collision(vecteur, collision=False,
marge=0.8):
if angle != 0:
self.info("Cap libre sur {}°".format(angle))
self.autre_direction = round((
navire.direction.direction + angle) % 360)
if fabs(angle) > 30:
self.vitesse = 0
self.transmettre_controles()
return
distance -= 5
self.transmettre_controles()
def creer(self):
equipage = self.equipage
commandant = self.commandant
if commandant is None:
return
self.trouver_cap()
def verifier(self, prioritaire):
equipage = self.equipage
navire = self.navire
commandant = self.commandant
if commandant is None:
return
if prioritaire:
self.trouver_cap()
def reagir_collision(self, salle, contre):
if not self.doit_reculer:
commandant = self.commandant
if commandant is None:
return
personnage = commandant.personnage
navire = self.navire
equipage = self.equipage
p_x = navire.position.x
p_y = navire.position.y
self.warning("Essaye de faire reculer le navire")
self.doit_reculer = (p_x, p_y)
equipage.retirer_controle("direction")
if navire.gouvernail:
equipage.demander("relacher_gouvernail",
personnage=personnage)
if any(v.hissee for v in navire.voiles):
equipage.demander("plier_voiles", None, personnage=personnage)
rames = navire.rames
if rames:
if any(r.orientation != 0 for r in rames):
equipage.demander("ramer", "centre",
personnage=personnage)
equipage.demander("ramer", "arrière", personnage=personnage)
| true
| true
|
79063e026dbd8ff02b4d2b9728c64b760a739851
| 1,182
|
py
|
Python
|
test/functional/rpc_generate.py
|
hhhogannwo/bitcoin
|
cb81907eb1e5a87669a8244d8657041aa8456328
|
[
"MIT"
] | null | null | null |
test/functional/rpc_generate.py
|
hhhogannwo/bitcoin
|
cb81907eb1e5a87669a8244d8657041aa8456328
|
[
"MIT"
] | null | null | null |
test/functional/rpc_generate.py
|
hhhogannwo/bitcoin
|
cb81907eb1e5a87669a8244d8657041aa8456328
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test generate RPC."""
from test_framework.test_framework import MAGATestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
class RPCGenerateTest(MAGATestFramework):
def set_test_params(self):
self.num_nodes = 1
def run_test(self):
message = (
"generate\n"
"has been replaced by the -generate "
"cli option. Refer to -help for more information."
)
self.log.info("Test rpc generate raises with message to use cli option")
assert_raises_rpc_error(-32601, message, self.nodes[0].rpc.generate)
self.log.info("Test rpc generate help prints message to use cli option")
assert_equal(message, self.nodes[0].help("generate"))
self.log.info("Test rpc generate is a hidden command not discoverable in general help")
assert message not in self.nodes[0].help()
if __name__ == "__main__":
RPCGenerateTest().main()
| 31.945946
| 95
| 0.690355
|
from test_framework.test_framework import MAGATestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
class RPCGenerateTest(MAGATestFramework):
def set_test_params(self):
self.num_nodes = 1
def run_test(self):
message = (
"generate\n"
"has been replaced by the -generate "
"cli option. Refer to -help for more information."
)
self.log.info("Test rpc generate raises with message to use cli option")
assert_raises_rpc_error(-32601, message, self.nodes[0].rpc.generate)
self.log.info("Test rpc generate help prints message to use cli option")
assert_equal(message, self.nodes[0].help("generate"))
self.log.info("Test rpc generate is a hidden command not discoverable in general help")
assert message not in self.nodes[0].help()
if __name__ == "__main__":
RPCGenerateTest().main()
| true
| true
|
79063e1c7b5d581ba648e7e5d23b27c3fe2cf71f
| 542
|
py
|
Python
|
rateMyProf/manage.py
|
DefCon-007/rateMyProfessor
|
78a4a336136f3c5675a13b80b95963dbfdf2e88d
|
[
"MIT"
] | 10
|
2018-12-30T15:44:59.000Z
|
2021-01-19T20:24:11.000Z
|
rateMyProf/manage.py
|
DefCon-007/rateMyProfessor
|
78a4a336136f3c5675a13b80b95963dbfdf2e88d
|
[
"MIT"
] | 6
|
2020-02-11T23:40:43.000Z
|
2021-06-10T21:13:42.000Z
|
rateMyProf/manage.py
|
DefCon-007/rateMyProfessor
|
78a4a336136f3c5675a13b80b95963dbfdf2e88d
|
[
"MIT"
] | 2
|
2018-12-31T15:47:43.000Z
|
2019-10-04T11:51:26.000Z
|
#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'rateMyProf.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| 33.875
| 74
| 0.688192
|
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'rateMyProf.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| true
| true
|
79063fa907026da92f56074f756cdf53977749be
| 3,976
|
py
|
Python
|
app/recipe/tests/test_tags_api.py
|
deborahoni/recipe-app-api
|
da0051515f5cf05f48ca61366923cb11fb8a2ce0
|
[
"MIT"
] | null | null | null |
app/recipe/tests/test_tags_api.py
|
deborahoni/recipe-app-api
|
da0051515f5cf05f48ca61366923cb11fb8a2ce0
|
[
"MIT"
] | null | null | null |
app/recipe/tests/test_tags_api.py
|
deborahoni/recipe-app-api
|
da0051515f5cf05f48ca61366923cb11fb8a2ce0
|
[
"MIT"
] | null | null | null |
from django.contrib.auth import get_user_model
from django.urls import reverse
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Tag,Recipe
from recipe.serializers import TagSerializer
TAGS_URL = reverse('recipe:tag-list')
class PublicTagsApiTests(TestCase):
"""Test the publicly available tags API"""
def setUp(self):
self.client = APIClient()
def test_login_required(self):
"""Test that login required for retrieving tags"""
res = self.client.get(TAGS_URL)
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateTagsApiTests(TestCase):
"""Test the authorized user tags API"""
def setUp(self):
self.user = get_user_model().objects.create_user(
'test@mytest.com',
'password'
)
self.client = APIClient()
self.client.force_authenticate(self.user)
def test_retrieve_tags(self):
"""Test retrieving tags"""
Tag.objects.create(user=self.user, name='Vegan')
Tag.objects.create(user=self.user, name='Dessert')
res = self.client.get(TAGS_URL)
tags = Tag.objects.all().order_by('-name')
serializer = TagSerializer(tags, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data, serializer.data)
def test_tags_limited_to_user(self):
"""Test that tags returned are for authenticated user"""
user2 = get_user_model().objects.create_user(
'thatmail@mytest.com',
'testpass'
)
Tag.objects.create(user=user2, name='Tasty')
tag = Tag.objects.create(user=self.user, name='Just Food')
res = self.client.get(TAGS_URL)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 1)
self.assertEqual(res.data[0]['name'], tag.name)
def test_create_tag_successful(self):
"""Test creating a new tag"""
payload = {'name': 'Simple'}
self.client.post(TAGS_URL, payload)
exists = Tag.objects.filter(
user=self.user,
name=payload['name']
).exists()
self.assertTrue(exists)
def test_create_tag_invalid(self):
"""Test creating a new tag with invalid payload"""
payload = {'name': ''}
res = self.client.post(TAGS_URL, payload)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_retrieve_tags_assigned_to_recipes(self):
"""Test filtering tags by those assigned to recipes"""
tag1 = Tag.objects.create(user=self.user, name='Breakfast')
tag2 = Tag.objects.create(user=self.user, name='Lunch')
recipe = Recipe.objects.create(
title='Coriander eggs on toast',
time_minutes=10,
price=5.00,
user=self.user,
)
recipe.tags.add(tag1)
res = self.client.get(TAGS_URL, {'assigned_only': 1})
serializer1 = TagSerializer(tag1)
serializer2 = TagSerializer(tag2)
self.assertIn(serializer1.data, res.data)
self.assertNotIn(serializer2.data, res.data)
def test_retrieve_tags_assigned_unique(self):
"""Test filtering tags by assigned returns unique items"""
tag = Tag.objects.create(user=self.user, name='Breakfast')
Tag.objects.create(user=self.user, name='Lunch')
recipe1 = Recipe.objects.create(
title='Pancakes',
time_minutes=5,
price=3.00,
user=self.user
)
recipe1.tags.add(tag)
recipe2 = Recipe.objects.create(
title='Porridge',
time_minutes=3,
price=2.00,
user=self.user
)
recipe2.tags.add(tag)
res = self.client.get(TAGS_URL, {'assigned_only': 1})
self.assertEqual(len(res.data), 1)
| 31.555556
| 71
| 0.629527
|
from django.contrib.auth import get_user_model
from django.urls import reverse
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Tag,Recipe
from recipe.serializers import TagSerializer
TAGS_URL = reverse('recipe:tag-list')
class PublicTagsApiTests(TestCase):
def setUp(self):
self.client = APIClient()
def test_login_required(self):
res = self.client.get(TAGS_URL)
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateTagsApiTests(TestCase):
def setUp(self):
self.user = get_user_model().objects.create_user(
'test@mytest.com',
'password'
)
self.client = APIClient()
self.client.force_authenticate(self.user)
def test_retrieve_tags(self):
Tag.objects.create(user=self.user, name='Vegan')
Tag.objects.create(user=self.user, name='Dessert')
res = self.client.get(TAGS_URL)
tags = Tag.objects.all().order_by('-name')
serializer = TagSerializer(tags, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data, serializer.data)
def test_tags_limited_to_user(self):
user2 = get_user_model().objects.create_user(
'thatmail@mytest.com',
'testpass'
)
Tag.objects.create(user=user2, name='Tasty')
tag = Tag.objects.create(user=self.user, name='Just Food')
res = self.client.get(TAGS_URL)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 1)
self.assertEqual(res.data[0]['name'], tag.name)
def test_create_tag_successful(self):
payload = {'name': 'Simple'}
self.client.post(TAGS_URL, payload)
exists = Tag.objects.filter(
user=self.user,
name=payload['name']
).exists()
self.assertTrue(exists)
def test_create_tag_invalid(self):
payload = {'name': ''}
res = self.client.post(TAGS_URL, payload)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_retrieve_tags_assigned_to_recipes(self):
tag1 = Tag.objects.create(user=self.user, name='Breakfast')
tag2 = Tag.objects.create(user=self.user, name='Lunch')
recipe = Recipe.objects.create(
title='Coriander eggs on toast',
time_minutes=10,
price=5.00,
user=self.user,
)
recipe.tags.add(tag1)
res = self.client.get(TAGS_URL, {'assigned_only': 1})
serializer1 = TagSerializer(tag1)
serializer2 = TagSerializer(tag2)
self.assertIn(serializer1.data, res.data)
self.assertNotIn(serializer2.data, res.data)
def test_retrieve_tags_assigned_unique(self):
tag = Tag.objects.create(user=self.user, name='Breakfast')
Tag.objects.create(user=self.user, name='Lunch')
recipe1 = Recipe.objects.create(
title='Pancakes',
time_minutes=5,
price=3.00,
user=self.user
)
recipe1.tags.add(tag)
recipe2 = Recipe.objects.create(
title='Porridge',
time_minutes=3,
price=2.00,
user=self.user
)
recipe2.tags.add(tag)
res = self.client.get(TAGS_URL, {'assigned_only': 1})
self.assertEqual(len(res.data), 1)
| true
| true
|
79063fc4091307d7ca99586e5e73c511ea517bf9
| 10,262
|
py
|
Python
|
app.py
|
NazarenoCavazzon/EuroPriceAPI
|
9be4d4e0229260e662281bf01e45f512a6f97d5f
|
[
"MIT"
] | 1
|
2021-06-30T01:46:20.000Z
|
2021-06-30T01:46:20.000Z
|
app.py
|
NazarenoCavazzon/BlueAPI
|
9be4d4e0229260e662281bf01e45f512a6f97d5f
|
[
"MIT"
] | 1
|
2021-07-01T01:48:31.000Z
|
2021-07-01T01:48:31.000Z
|
app.py
|
NazarenoCavazzon/BlueAPI
|
9be4d4e0229260e662281bf01e45f512a6f97d5f
|
[
"MIT"
] | 1
|
2021-06-30T01:47:13.000Z
|
2021-06-30T01:47:13.000Z
|
import os
import codecs
from busSchedules import schedule1B
from busSchedules import schedule2
from busSchedules import schedule3
from busSchedules import schedule4
from busSchedules import schedule5
from busSchedules import schedule6
from busZonesTimes import busZonesTimesOne
from busZonesTimes import busZonesTimesOneB
from busZonesTimes import busZonesTimesTwo
from busZonesTimes import busZonesTimesThree
from busZonesTimes import busZonesTimesFour
from busZonesTimes import busZonesTimesFive
from busZonesTimes import busZonesTimesSix
from busZonesTimes import busZonesTimesOneSaturday
from busZonesTimes import busZonesTimesOneBSaturday
from busZonesTimes import busZonesTimesTwoSaturday
from busZonesTimes import busZonesTimesThreeSaturday
from busZonesTimes import busZonesTimesFourSaturday
from busZonesTimes import busZonesTimesFiveSaturday
from busZonesTimes import busZonesTimesSixSaturday
from busZonesTimes import busZonesTimesOneSunday
from busZonesTimes import busZonesTimesOneBSunday
from busZonesTimes import busZonesTimesTwoSaturday
from busZonesTimes import busZonesTimesThreeSunday
from busZonesTimes import busZonesTimesFourSunday
from busZonesTimes import busZonesTimesFiveSunday
from busZonesTimes import busZonesTimesSixSunday
from busRoutes import lineOne
from busRoutes import lineOneB
from busRoutes import lineTwo
from busRoutes import lineThree
from busRoutes import lineFour
from busRoutes import lineFive
from busRoutes import lineSix
from busRoutes import line242
from busStops import busStopsDict
from busStops import linesDict
from datetime import datetime
from flask_caching import Cache
from flask import Flask, send_from_directory, jsonify
from bs4 import BeautifulSoup
VERSION = "1.0"
CACHE_TIMEOUT_SECONDS = os.getenv('CACHE_TIMEOUT', 3600)
GIT_REPO_URL = 'https://github.com/NazarenoCavazzon/BlueAPI'
DOLAR_URL = 'https://www.paralelohoy.com.ar/p/cotizacion-dolar-hoy-argentina.html'
EURO_URL = 'https://www.paralelohoy.com.ar/p/cotizacion-euro-hoy-argentina.html'
REAL_URL = 'https://www.paralelohoy.com.ar/p/cotizacion-real-hoy-argentina.html'
# Create a class called BusStop that will take line, name, address, latitude and longitude.
class BusStop:
def __init__(self, line, name, address, latitude, longitude):
self.line = line
self.name = name
self.address = address
self.latitude = latitude
self.longitude = longitude
def getValues(url):
import requests
html_source = requests.get(url).text
soup = BeautifulSoup(html_source, 'lxml')
table = soup.find("table")
span = table.tbody.text
splittedSpan = span.split("\n")
splittedSpan = filter(None, splittedSpan)
list = []
for x in splittedSpan:
value = []
value = x.split(":")[1].split("$")
value.pop(0)
list.append(value)
return list
def formatResponse(value):
return {
"fecha": datetime.today().strftime('%Y-%m-%d %H:%M:%S'),
"compra" : f"{value[0]}",
"venta" : f"{value[1]}"
}
app = Flask(__name__)
app.config['JSON_SORT_KEYS'] = False
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route("/favicon.ico")
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static'),'favicon.ico')
@app.route("/")
def getRoot():
html = ""
with codecs.open('index.html', "r", "utf-8") as f:
codeHTML = f.read()
for element in codeHTML:
if element == "¡":
element = VERSION
html += element
elif element == "ñ":
element = GIT_REPO_URL
html += element
else:
html += element
return html
@app.route("/api/ping")
def ping():
return "pong"
@app.route("/api/dolar/oficial")
@cache.cached(timeout=CACHE_TIMEOUT_SECONDS)
def getDolarOficial():
dolarValues = getValues(DOLAR_URL)
dolarOficial = formatResponse(dolarValues[0])
return jsonify(dolarOficial)
@app.route("/api/dolar/blue")
@cache.cached(timeout=CACHE_TIMEOUT_SECONDS)
def getDolarBlue():
dolarValues = getValues(DOLAR_URL)
dolarBlue = formatResponse(dolarValues[1])
return jsonify(dolarBlue)
@app.route("/api/euro/oficial")
@cache.cached(timeout=CACHE_TIMEOUT_SECONDS)
def getEuroOficial():
euroValues = getValues(EURO_URL)
euroOficial = formatResponse(euroValues[0])
return jsonify(euroOficial)
@app.route("/api/euro/blue")
@cache.cached(timeout=CACHE_TIMEOUT_SECONDS)
def getEuroBlue():
euroValues = getValues(EURO_URL)
euroBlue = formatResponse(euroValues[1])
return jsonify(euroBlue)
@app.route("/api/real/oficial")
@cache.cached(timeout=CACHE_TIMEOUT_SECONDS)
def getRealOficial():
realValues = getValues(REAL_URL)
realOficial = formatResponse(realValues[0])
return jsonify(realOficial)
@app.route("/api/real/blue")
@cache.cached(timeout=CACHE_TIMEOUT_SECONDS)
def getRealBlue():
realValues = getValues(REAL_URL)
realBlue = formatResponse(realValues[1])
return jsonify(realBlue)
@app.route("/api/busstops")
def getBusStops():
return jsonify(busStopsDict)
@app.route("/api/1")
def getLine1():
return jsonify(lineOne)
@app.route("/api/1B")
def getLine1B():
return jsonify(lineOneB)
@app.route("/api/2")
def getLine2():
return jsonify(lineTwo)
@app.route("/api/3")
def getLine3():
return jsonify(lineThree)
@app.route("/api/4")
def getLine4():
return jsonify(lineFour)
@app.route("/api/5")
def getLine5():
return jsonify(lineFive)
@app.route("/api/6")
def getLine6():
return jsonify(lineSix)
@app.route("/api/linesDict")
def getLines():
return jsonify(linesDict)
# Horarios por ZONA ============================================================
@app.route("/api/busZonesTimes/1")
def getBusZonesOne():
return jsonify(busZonesTimesOne)
@app.route("/api/busZonesTimes/1B")
def getBusZonesOneB():
return jsonify(busZonesTimesOneB)
@app.route("/api/busZonesTimes/2")
def getBusZonesTwo():
return jsonify(busZonesTimesTwo)
@app.route("/api/busZonesTimes/3")
def getBusZonesThree():
return jsonify(busZonesTimesThree)
@app.route("/api/busZonesTimes/4")
def getBusZonesFour():
return jsonify(busZonesTimesFour)
@app.route("/api/busZonesTimes/5")
def getBusZonesFive():
return jsonify(busZonesTimesFive)
@app.route("/api/busZonesTimes/6")
def getBusZonesSix():
return jsonify(busZonesTimesSix)
# Horarios por ZONA Domingo ============================================================
@app.route("/api/busZonesTimes/1/sunday")
def getBusZonesOneSunday():
return jsonify(busZonesTimesOneSunday)
@app.route("/api/busZonesTimes/1B/sunday")
def getBusZonesOneBSunday():
return jsonify(busZonesTimesOneBSunday)
@app.route("/api/busZonesTimes/2/sunday")
def getBusZonesTwoSunday():
return jsonify(busZonesTimesTwoSunday)
@app.route("/api/busZonesTimes/3/sunday")
def getBusZonesThreeSunday():
return jsonify(busZonesTimesThreeSunday)
@app.route("/api/busZonesTimes/4/sunday")
def getBusZonesFourSunday():
return jsonify(busZonesTimesFourSunday)
@app.route("/api/busZonesTimes/5/sunday")
def getBusZonesFiveSunday():
return jsonify(busZonesTimesFiveSunday)
@app.route("/api/busZonesTimes/6/sunday")
def getBusZonesSixSunday():
return jsonify(busZonesTimesSixSunday)
# Horarios por ZONA Sabado ============================================================
@app.route("/api/busZonesTimes/1/saturday")
def getBusZonesOneSaturday():
return jsonify(busZonesTimesOneSaturday)
@app.route("/api/busZonesTimes/1B/saturday")
def getBusZonesOneBSaturday():
return jsonify(busZonesTimesOneBSaturday)
@app.route("/api/busZonesTimes/2/saturday")
def getBusZonesTwoSaturday():
return jsonify(busZonesTimesTwoSaturday)
@app.route("/api/busZonesTimes/3/saturday")
def getBusZonesThreeSaturday():
return jsonify(busZonesTimesThreeSaturday)
@app.route("/api/busZonesTimes/4/saturday")
def getBusZonesFourSaturday():
return jsonify(busZonesTimesFourSaturday)
@app.route("/api/busZonesTimes/5/saturday")
def getBusZonesFiveSaturday():
return jsonify(busZonesTimesFiveSaturday)
@app.route("/api/busZonesTimes/6/saturday")
def getBusZonesSixSaturday():
return jsonify(busZonesTimesSixSaturday)
# Botones ============================================================
@app.route("/api/gmaps")
def getGMaps():
return jsonify("https://www.google.com/maps/d/u/0/viewer?mid=1d5o2MklEFr0DpG_i_mRwcUd9yjc&ll=-31.654431124663883%2C-64.43315245330842&z=15")
@app.route("/api/donacion")
def getDonationPage():
return jsonify("https://cafecito.app/paragracia")
# Horarios de las lineas de las semanas ============================================================
@app.route("/api/1B/schedule")
def get1Bchedule():
return jsonify(schedule1B)
@app.route("/api/2/schedule")
def get2chedule():
return jsonify(schedule2)
@app.route("/api/3/schedule")
def get3chedule():
return jsonify(schedule3)
@app.route("/api/4/schedule")
def get4chedule():
return jsonify(schedule4)
@app.route("/api/5/schedule")
def get5chedule():
return jsonify(schedule5)
@app.route("/api/6/schedule")
def get6chedule():
return jsonify(schedule6)
# Horarios de las lineas de los fines de semana ============================================================
"""
@app.route("/api/1B/schedule/saturday")
def get1Bchedule():
return jsonify(schedule1B)
@app.route("/api/2/schedule/saturday")
def get2chedule():
return jsonify(schedule2)
@app.route("/api/3/schedule/saturday")
def get3chedule():
return jsonify(schedule3)
@app.route("/api/4/schedule/saturday")
def get4chedule():
return jsonify(schedule4)
@app.route("/api/5/schedule/saturday")
def get5chedule():
return jsonify(schedule5)
@app.route("/api/6/schedule/saturday")
def get6chedule():
return jsonify(schedule6)
"""
if __name__ == '__main__':
app.run(debug=False, port=os.getenv('PORT', 5000))
| 29.153409
| 145
| 0.693432
|
import os
import codecs
from busSchedules import schedule1B
from busSchedules import schedule2
from busSchedules import schedule3
from busSchedules import schedule4
from busSchedules import schedule5
from busSchedules import schedule6
from busZonesTimes import busZonesTimesOne
from busZonesTimes import busZonesTimesOneB
from busZonesTimes import busZonesTimesTwo
from busZonesTimes import busZonesTimesThree
from busZonesTimes import busZonesTimesFour
from busZonesTimes import busZonesTimesFive
from busZonesTimes import busZonesTimesSix
from busZonesTimes import busZonesTimesOneSaturday
from busZonesTimes import busZonesTimesOneBSaturday
from busZonesTimes import busZonesTimesTwoSaturday
from busZonesTimes import busZonesTimesThreeSaturday
from busZonesTimes import busZonesTimesFourSaturday
from busZonesTimes import busZonesTimesFiveSaturday
from busZonesTimes import busZonesTimesSixSaturday
from busZonesTimes import busZonesTimesOneSunday
from busZonesTimes import busZonesTimesOneBSunday
from busZonesTimes import busZonesTimesTwoSaturday
from busZonesTimes import busZonesTimesThreeSunday
from busZonesTimes import busZonesTimesFourSunday
from busZonesTimes import busZonesTimesFiveSunday
from busZonesTimes import busZonesTimesSixSunday
from busRoutes import lineOne
from busRoutes import lineOneB
from busRoutes import lineTwo
from busRoutes import lineThree
from busRoutes import lineFour
from busRoutes import lineFive
from busRoutes import lineSix
from busRoutes import line242
from busStops import busStopsDict
from busStops import linesDict
from datetime import datetime
from flask_caching import Cache
from flask import Flask, send_from_directory, jsonify
from bs4 import BeautifulSoup
VERSION = "1.0"
CACHE_TIMEOUT_SECONDS = os.getenv('CACHE_TIMEOUT', 3600)
GIT_REPO_URL = 'https://github.com/NazarenoCavazzon/BlueAPI'
DOLAR_URL = 'https://www.paralelohoy.com.ar/p/cotizacion-dolar-hoy-argentina.html'
EURO_URL = 'https://www.paralelohoy.com.ar/p/cotizacion-euro-hoy-argentina.html'
REAL_URL = 'https://www.paralelohoy.com.ar/p/cotizacion-real-hoy-argentina.html'
class BusStop:
def __init__(self, line, name, address, latitude, longitude):
self.line = line
self.name = name
self.address = address
self.latitude = latitude
self.longitude = longitude
def getValues(url):
import requests
html_source = requests.get(url).text
soup = BeautifulSoup(html_source, 'lxml')
table = soup.find("table")
span = table.tbody.text
splittedSpan = span.split("\n")
splittedSpan = filter(None, splittedSpan)
list = []
for x in splittedSpan:
value = []
value = x.split(":")[1].split("$")
value.pop(0)
list.append(value)
return list
def formatResponse(value):
return {
"fecha": datetime.today().strftime('%Y-%m-%d %H:%M:%S'),
"compra" : f"{value[0]}",
"venta" : f"{value[1]}"
}
app = Flask(__name__)
app.config['JSON_SORT_KEYS'] = False
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route("/favicon.ico")
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static'),'favicon.ico')
@app.route("/")
def getRoot():
html = ""
with codecs.open('index.html', "r", "utf-8") as f:
codeHTML = f.read()
for element in codeHTML:
if element == "¡":
element = VERSION
html += element
elif element == "ñ":
element = GIT_REPO_URL
html += element
else:
html += element
return html
@app.route("/api/ping")
def ping():
return "pong"
@app.route("/api/dolar/oficial")
@cache.cached(timeout=CACHE_TIMEOUT_SECONDS)
def getDolarOficial():
dolarValues = getValues(DOLAR_URL)
dolarOficial = formatResponse(dolarValues[0])
return jsonify(dolarOficial)
@app.route("/api/dolar/blue")
@cache.cached(timeout=CACHE_TIMEOUT_SECONDS)
def getDolarBlue():
dolarValues = getValues(DOLAR_URL)
dolarBlue = formatResponse(dolarValues[1])
return jsonify(dolarBlue)
@app.route("/api/euro/oficial")
@cache.cached(timeout=CACHE_TIMEOUT_SECONDS)
def getEuroOficial():
euroValues = getValues(EURO_URL)
euroOficial = formatResponse(euroValues[0])
return jsonify(euroOficial)
@app.route("/api/euro/blue")
@cache.cached(timeout=CACHE_TIMEOUT_SECONDS)
def getEuroBlue():
euroValues = getValues(EURO_URL)
euroBlue = formatResponse(euroValues[1])
return jsonify(euroBlue)
@app.route("/api/real/oficial")
@cache.cached(timeout=CACHE_TIMEOUT_SECONDS)
def getRealOficial():
realValues = getValues(REAL_URL)
realOficial = formatResponse(realValues[0])
return jsonify(realOficial)
@app.route("/api/real/blue")
@cache.cached(timeout=CACHE_TIMEOUT_SECONDS)
def getRealBlue():
realValues = getValues(REAL_URL)
realBlue = formatResponse(realValues[1])
return jsonify(realBlue)
@app.route("/api/busstops")
def getBusStops():
return jsonify(busStopsDict)
@app.route("/api/1")
def getLine1():
return jsonify(lineOne)
@app.route("/api/1B")
def getLine1B():
return jsonify(lineOneB)
@app.route("/api/2")
def getLine2():
return jsonify(lineTwo)
@app.route("/api/3")
def getLine3():
return jsonify(lineThree)
@app.route("/api/4")
def getLine4():
return jsonify(lineFour)
@app.route("/api/5")
def getLine5():
return jsonify(lineFive)
@app.route("/api/6")
def getLine6():
return jsonify(lineSix)
@app.route("/api/linesDict")
def getLines():
return jsonify(linesDict)
@app.route("/api/busZonesTimes/1")
def getBusZonesOne():
return jsonify(busZonesTimesOne)
@app.route("/api/busZonesTimes/1B")
def getBusZonesOneB():
return jsonify(busZonesTimesOneB)
@app.route("/api/busZonesTimes/2")
def getBusZonesTwo():
return jsonify(busZonesTimesTwo)
@app.route("/api/busZonesTimes/3")
def getBusZonesThree():
return jsonify(busZonesTimesThree)
@app.route("/api/busZonesTimes/4")
def getBusZonesFour():
return jsonify(busZonesTimesFour)
@app.route("/api/busZonesTimes/5")
def getBusZonesFive():
return jsonify(busZonesTimesFive)
@app.route("/api/busZonesTimes/6")
def getBusZonesSix():
return jsonify(busZonesTimesSix)
@app.route("/api/busZonesTimes/1/sunday")
def getBusZonesOneSunday():
return jsonify(busZonesTimesOneSunday)
@app.route("/api/busZonesTimes/1B/sunday")
def getBusZonesOneBSunday():
return jsonify(busZonesTimesOneBSunday)
@app.route("/api/busZonesTimes/2/sunday")
def getBusZonesTwoSunday():
return jsonify(busZonesTimesTwoSunday)
@app.route("/api/busZonesTimes/3/sunday")
def getBusZonesThreeSunday():
return jsonify(busZonesTimesThreeSunday)
@app.route("/api/busZonesTimes/4/sunday")
def getBusZonesFourSunday():
return jsonify(busZonesTimesFourSunday)
@app.route("/api/busZonesTimes/5/sunday")
def getBusZonesFiveSunday():
return jsonify(busZonesTimesFiveSunday)
@app.route("/api/busZonesTimes/6/sunday")
def getBusZonesSixSunday():
return jsonify(busZonesTimesSixSunday)
@app.route("/api/busZonesTimes/1/saturday")
def getBusZonesOneSaturday():
return jsonify(busZonesTimesOneSaturday)
@app.route("/api/busZonesTimes/1B/saturday")
def getBusZonesOneBSaturday():
return jsonify(busZonesTimesOneBSaturday)
@app.route("/api/busZonesTimes/2/saturday")
def getBusZonesTwoSaturday():
return jsonify(busZonesTimesTwoSaturday)
@app.route("/api/busZonesTimes/3/saturday")
def getBusZonesThreeSaturday():
return jsonify(busZonesTimesThreeSaturday)
@app.route("/api/busZonesTimes/4/saturday")
def getBusZonesFourSaturday():
return jsonify(busZonesTimesFourSaturday)
@app.route("/api/busZonesTimes/5/saturday")
def getBusZonesFiveSaturday():
return jsonify(busZonesTimesFiveSaturday)
@app.route("/api/busZonesTimes/6/saturday")
def getBusZonesSixSaturday():
return jsonify(busZonesTimesSixSaturday)
@app.route("/api/gmaps")
def getGMaps():
return jsonify("https://www.google.com/maps/d/u/0/viewer?mid=1d5o2MklEFr0DpG_i_mRwcUd9yjc&ll=-31.654431124663883%2C-64.43315245330842&z=15")
@app.route("/api/donacion")
def getDonationPage():
return jsonify("https://cafecito.app/paragracia")
@app.route("/api/1B/schedule")
def get1Bchedule():
return jsonify(schedule1B)
@app.route("/api/2/schedule")
def get2chedule():
return jsonify(schedule2)
@app.route("/api/3/schedule")
def get3chedule():
return jsonify(schedule3)
@app.route("/api/4/schedule")
def get4chedule():
return jsonify(schedule4)
@app.route("/api/5/schedule")
def get5chedule():
return jsonify(schedule5)
@app.route("/api/6/schedule")
def get6chedule():
return jsonify(schedule6)
if __name__ == '__main__':
app.run(debug=False, port=os.getenv('PORT', 5000))
| true
| true
|
7906402d8b6164a6553114659c0ac051616ffcf6
| 969
|
py
|
Python
|
AdventOfCode/Day24.py
|
JanStoltman/100DaysOfCode
|
1d18b76ed1e3e942e8392006a5d4bfb41484d047
|
[
"MIT"
] | null | null | null |
AdventOfCode/Day24.py
|
JanStoltman/100DaysOfCode
|
1d18b76ed1e3e942e8392006a5d4bfb41484d047
|
[
"MIT"
] | null | null | null |
AdventOfCode/Day24.py
|
JanStoltman/100DaysOfCode
|
1d18b76ed1e3e942e8392006a5d4bfb41484d047
|
[
"MIT"
] | null | null | null |
from Tree import TreeNode
def zdir(root):
if root[0] == 0:
return 0 #left
elif root[1] ==0:
return 1 #right
else:
return 2 #nothing
def add_nodes(root, nodes):
for i in range(0,len(nodes)):
if nodes[i][0] == root.gfv():
root.addChild(add_nodes(TreeNode(nodes[i], 0), nodes[0:i] + nodes[i+1:]))
elif nodes[i][1] == root.gfv():
root.addChild(add_nodes(TreeNode(nodes[i], 1), nodes[0:i] + nodes[i+1:]))
return root
f = open("Day24DB")
cm = []
for l in f.readlines():
t = l.split('/')
cm.append((int(t[0]), int(t[1]), 0))
cm = sorted(cm, key=sum, reverse=True)
begs = filter(lambda x: x[0] == 0 or x[1] == 0 , cm)
cm = filter(lambda x: not(x[0] == 0 or x[1] == 0), cm)
rts = map(lambda x: TreeNode(x, zdir(x)), begs)
t = TreeNode((0,0),2)
for root in rts:
t.addChild(add_nodes(root, filter(lambda x: not(x[0] == root.l and x[1] == root.r), cm)))
print "Looking for max"
print t.gwgh()
print t.glen()
print t.glnw()
| 23.634146
| 90
| 0.593395
|
from Tree import TreeNode
def zdir(root):
if root[0] == 0:
return 0
elif root[1] ==0:
return 1
else:
return 2
def add_nodes(root, nodes):
for i in range(0,len(nodes)):
if nodes[i][0] == root.gfv():
root.addChild(add_nodes(TreeNode(nodes[i], 0), nodes[0:i] + nodes[i+1:]))
elif nodes[i][1] == root.gfv():
root.addChild(add_nodes(TreeNode(nodes[i], 1), nodes[0:i] + nodes[i+1:]))
return root
f = open("Day24DB")
cm = []
for l in f.readlines():
t = l.split('/')
cm.append((int(t[0]), int(t[1]), 0))
cm = sorted(cm, key=sum, reverse=True)
begs = filter(lambda x: x[0] == 0 or x[1] == 0 , cm)
cm = filter(lambda x: not(x[0] == 0 or x[1] == 0), cm)
rts = map(lambda x: TreeNode(x, zdir(x)), begs)
t = TreeNode((0,0),2)
for root in rts:
t.addChild(add_nodes(root, filter(lambda x: not(x[0] == root.l and x[1] == root.r), cm)))
print "Looking for max"
print t.gwgh()
print t.glen()
print t.glnw()
| false
| true
|
7906405ca6b73f3096cb749f8b7799c56afd83f5
| 1,194
|
py
|
Python
|
azure-mgmt-web/azure/mgmt/web/models/resource_metric_name.py
|
v-Ajnava/azure-sdk-for-python
|
a1f6f80eb5869c5b710e8bfb66146546697e2a6f
|
[
"MIT"
] | 4
|
2016-06-17T23:25:29.000Z
|
2022-03-30T22:37:45.000Z
|
azure/mgmt/web/models/resource_metric_name.py
|
EnjoyLifeFund/Debian_py36_packages
|
1985d4c73fabd5f08f54b922e73a9306e09c77a5
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 2
|
2016-09-30T21:40:24.000Z
|
2017-11-10T18:16:18.000Z
|
azure/mgmt/web/models/resource_metric_name.py
|
EnjoyLifeFund/Debian_py36_packages
|
1985d4c73fabd5f08f54b922e73a9306e09c77a5
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 3
|
2016-05-03T20:49:46.000Z
|
2017-10-05T21:05:27.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ResourceMetricName(Model):
"""Name of a metric for any resource .
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar value: metric name value.
:vartype value: str
:ivar localized_value: Localized metric name value.
:vartype localized_value: str
"""
_validation = {
'value': {'readonly': True},
'localized_value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(self):
self.value = None
self.localized_value = None
| 29.85
| 76
| 0.579564
|
from msrest.serialization import Model
class ResourceMetricName(Model):
_validation = {
'value': {'readonly': True},
'localized_value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(self):
self.value = None
self.localized_value = None
| true
| true
|
790640758e1e32a9bff9aff50842c2b0edfa79dd
| 6,688
|
py
|
Python
|
rasa_core/policies/keras_policy.py
|
KeshavInamdar/rasa_core
|
432638e9df53e2b5d68771ea5cf6af6ef1ac6b72
|
[
"Apache-2.0"
] | 1
|
2018-07-03T16:04:17.000Z
|
2018-07-03T16:04:17.000Z
|
rasa_core/policies/keras_policy.py
|
KeshavInamdar/rasa_core
|
432638e9df53e2b5d68771ea5cf6af6ef1ac6b72
|
[
"Apache-2.0"
] | null | null | null |
rasa_core/policies/keras_policy.py
|
KeshavInamdar/rasa_core
|
432638e9df53e2b5d68771ea5cf6af6ef1ac6b72
|
[
"Apache-2.0"
] | 2
|
2019-02-18T07:38:26.000Z
|
2021-07-17T16:24:03.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import io
import json
import logging
import os
import warnings
from builtins import str
from typing import Any
from rasa_core import utils
from rasa_core.domain import Domain
from rasa_core.policies import Policy
from rasa_core.training.data import DialogueTrainingData
logger = logging.getLogger(__name__)
class KerasPolicy(Policy):
SUPPORTS_ONLINE_TRAINING = True
def __init__(self, model=None, graph=None, current_epoch=0,
featurizer=None, max_history=None):
import keras
super(KerasPolicy, self).__init__(featurizer, max_history)
if KerasPolicy.is_using_tensorflow() and not graph:
self.graph = keras.backend.tf.get_default_graph()
else:
self.graph = graph
self.model = model
self.current_epoch = current_epoch
@property
def max_len(self):
if self.model:
return self.model.layers[0].batch_input_shape[1]
else:
return None
@staticmethod
def is_using_tensorflow():
import keras
return keras.backend._BACKEND == "tensorflow"
def predict_action_probabilities(self, tracker, domain):
x = self.featurize(tracker, domain)
# we need to add a batch dimension with length 1
x = x.reshape((1, self.max_len, x.shape[1]))
if KerasPolicy.is_using_tensorflow() and self.graph is not None:
with self.graph.as_default():
y_pred = self.model.predict(x, batch_size=1)
else:
y_pred = self.model.predict(x, batch_size=1)
return y_pred[-1].tolist()
def _build_model(self, num_features, num_actions, max_history_len):
warnings.warn("Deprecated, use `model_architecture` instead.",
DeprecationWarning, stacklevel=2)
return
def model_architecture(self, num_features, num_actions, max_history_len):
"""Build a keras model and return a compiled model.
:param max_history_len: The maximum number of historical
turns used to decide on next action
"""
from keras.layers import LSTM, Activation, Masking, Dense
from keras.models import Sequential
n_hidden = 32 # Neural Net and training params
batch_shape = (None, max_history_len, num_features)
# Build Model
model = Sequential()
model.add(Masking(-1, batch_input_shape=batch_shape))
model.add(LSTM(n_hidden, batch_input_shape=batch_shape, dropout=0.2))
model.add(Dense(input_dim=n_hidden, units=num_actions))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
logger.debug(model.summary())
return model
def train(self, training_data, domain, **kwargs):
# type: (DialogueTrainingData, Domain, **Any) -> None
self.model = self.model_architecture(domain.num_features,
domain.num_actions,
training_data.max_history())
shuffled_X, shuffled_y = training_data.shuffled(domain)
validation_split = kwargs.get("validation_split", 0.0)
logger.info("Fitting model with {} total samples and a validation "
"split of {}".format(training_data.num_examples(),
validation_split))
self.model.fit(shuffled_X, shuffled_y, **kwargs)
self.current_epoch = kwargs.get("epochs", 10)
logger.info("Done fitting keras policy model")
def continue_training(self, training_data, domain, **kwargs):
# fit to one extra example
self.current_epoch += 1
self.model.fit(training_data.X, training_data.y_as_one_hot(domain),
epochs=self.current_epoch + 1,
batch_size=1,
verbose=0,
initial_epoch=self.current_epoch)
def _persist_configuration(self, config_file):
model_config = {
"arch": "keras_arch.json",
"weights": "keras_weights.h5",
"epochs": self.current_epoch}
utils.dump_obj_as_json_to_file(config_file, model_config)
def persist(self, path):
if self.model:
arch_file = os.path.join(path, 'keras_arch.json')
weights_file = os.path.join(path, 'keras_weights.h5')
config_file = os.path.join(path, 'keras_policy.json')
# makes sure the model directory exists
utils.create_dir_for_file(weights_file)
utils.dump_obj_as_str_to_file(arch_file, self.model.to_json())
self._persist_configuration(config_file)
self.model.save_weights(weights_file, overwrite=True)
else:
warnings.warn("Persist called without a trained model present. "
"Nothing to persist then!")
@classmethod
def _load_model_arch(cls, path, meta):
from keras.models import model_from_json
arch_file = os.path.join(path, meta["arch"])
if os.path.isfile(arch_file):
with io.open(arch_file) as f:
model = model_from_json(f.read())
return model
else:
return None
@classmethod
def _load_weights_for_model(cls, path, model, meta):
weights_file = os.path.join(path, meta["weights"])
if model is not None and os.path.exists(weights_file):
model.load_weights(weights_file)
return model
@classmethod
def load(cls, path, featurizer, max_history):
if os.path.exists(path):
meta_path = os.path.join(path, "keras_policy.json")
if os.path.isfile(meta_path):
with io.open(meta_path) as f:
meta = json.loads(f.read())
model_arch = cls._load_model_arch(path, meta)
return cls(
cls._load_weights_for_model(path, model_arch, meta),
current_epoch=meta["epochs"],
max_history=max_history,
featurizer=featurizer
)
else:
return cls(max_history=max_history,
featurizer=featurizer)
else:
raise Exception("Failed to load dialogue model. Path {} "
"doesn't exist".format(os.path.abspath(path)))
| 37.363128
| 77
| 0.612141
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import io
import json
import logging
import os
import warnings
from builtins import str
from typing import Any
from rasa_core import utils
from rasa_core.domain import Domain
from rasa_core.policies import Policy
from rasa_core.training.data import DialogueTrainingData
logger = logging.getLogger(__name__)
class KerasPolicy(Policy):
SUPPORTS_ONLINE_TRAINING = True
def __init__(self, model=None, graph=None, current_epoch=0,
featurizer=None, max_history=None):
import keras
super(KerasPolicy, self).__init__(featurizer, max_history)
if KerasPolicy.is_using_tensorflow() and not graph:
self.graph = keras.backend.tf.get_default_graph()
else:
self.graph = graph
self.model = model
self.current_epoch = current_epoch
@property
def max_len(self):
if self.model:
return self.model.layers[0].batch_input_shape[1]
else:
return None
@staticmethod
def is_using_tensorflow():
import keras
return keras.backend._BACKEND == "tensorflow"
def predict_action_probabilities(self, tracker, domain):
x = self.featurize(tracker, domain)
x = x.reshape((1, self.max_len, x.shape[1]))
if KerasPolicy.is_using_tensorflow() and self.graph is not None:
with self.graph.as_default():
y_pred = self.model.predict(x, batch_size=1)
else:
y_pred = self.model.predict(x, batch_size=1)
return y_pred[-1].tolist()
def _build_model(self, num_features, num_actions, max_history_len):
warnings.warn("Deprecated, use `model_architecture` instead.",
DeprecationWarning, stacklevel=2)
return
def model_architecture(self, num_features, num_actions, max_history_len):
from keras.layers import LSTM, Activation, Masking, Dense
from keras.models import Sequential
n_hidden = 32
batch_shape = (None, max_history_len, num_features)
model = Sequential()
model.add(Masking(-1, batch_input_shape=batch_shape))
model.add(LSTM(n_hidden, batch_input_shape=batch_shape, dropout=0.2))
model.add(Dense(input_dim=n_hidden, units=num_actions))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
logger.debug(model.summary())
return model
def train(self, training_data, domain, **kwargs):
self.model = self.model_architecture(domain.num_features,
domain.num_actions,
training_data.max_history())
shuffled_X, shuffled_y = training_data.shuffled(domain)
validation_split = kwargs.get("validation_split", 0.0)
logger.info("Fitting model with {} total samples and a validation "
"split of {}".format(training_data.num_examples(),
validation_split))
self.model.fit(shuffled_X, shuffled_y, **kwargs)
self.current_epoch = kwargs.get("epochs", 10)
logger.info("Done fitting keras policy model")
def continue_training(self, training_data, domain, **kwargs):
self.current_epoch += 1
self.model.fit(training_data.X, training_data.y_as_one_hot(domain),
epochs=self.current_epoch + 1,
batch_size=1,
verbose=0,
initial_epoch=self.current_epoch)
def _persist_configuration(self, config_file):
model_config = {
"arch": "keras_arch.json",
"weights": "keras_weights.h5",
"epochs": self.current_epoch}
utils.dump_obj_as_json_to_file(config_file, model_config)
def persist(self, path):
if self.model:
arch_file = os.path.join(path, 'keras_arch.json')
weights_file = os.path.join(path, 'keras_weights.h5')
config_file = os.path.join(path, 'keras_policy.json')
utils.create_dir_for_file(weights_file)
utils.dump_obj_as_str_to_file(arch_file, self.model.to_json())
self._persist_configuration(config_file)
self.model.save_weights(weights_file, overwrite=True)
else:
warnings.warn("Persist called without a trained model present. "
"Nothing to persist then!")
@classmethod
def _load_model_arch(cls, path, meta):
from keras.models import model_from_json
arch_file = os.path.join(path, meta["arch"])
if os.path.isfile(arch_file):
with io.open(arch_file) as f:
model = model_from_json(f.read())
return model
else:
return None
@classmethod
def _load_weights_for_model(cls, path, model, meta):
weights_file = os.path.join(path, meta["weights"])
if model is not None and os.path.exists(weights_file):
model.load_weights(weights_file)
return model
@classmethod
def load(cls, path, featurizer, max_history):
if os.path.exists(path):
meta_path = os.path.join(path, "keras_policy.json")
if os.path.isfile(meta_path):
with io.open(meta_path) as f:
meta = json.loads(f.read())
model_arch = cls._load_model_arch(path, meta)
return cls(
cls._load_weights_for_model(path, model_arch, meta),
current_epoch=meta["epochs"],
max_history=max_history,
featurizer=featurizer
)
else:
return cls(max_history=max_history,
featurizer=featurizer)
else:
raise Exception("Failed to load dialogue model. Path {} "
"doesn't exist".format(os.path.abspath(path)))
| true
| true
|
790640dc9d95970eb4f04f76846efe08f0aca2b9
| 6,347
|
py
|
Python
|
pixiedust/display/chart/renderers/commonOptions.py
|
elgalu/pixiedust
|
e98b91b6ae5a974cbcda985fa305d0cb25b88ba0
|
[
"Apache-2.0"
] | null | null | null |
pixiedust/display/chart/renderers/commonOptions.py
|
elgalu/pixiedust
|
e98b91b6ae5a974cbcda985fa305d0cb25b88ba0
|
[
"Apache-2.0"
] | null | null | null |
pixiedust/display/chart/renderers/commonOptions.py
|
elgalu/pixiedust
|
e98b91b6ae5a974cbcda985fa305d0cb25b88ba0
|
[
"Apache-2.0"
] | 1
|
2018-04-13T18:25:52.000Z
|
2018-04-13T18:25:52.000Z
|
# -------------------------------------------------------------------------------
# Copyright IBM Corp. 2017
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------------
import math
import numpy
import pixiedust
myLogger = pixiedust.getLogger(__name__)
def append(displayObject, arr, option):
if option is not None and displayObject.acceptOption(option["name"]):
arr.append(option)
def chartSize():
return {
'name': 'chartsize',
'description': 'Chart Size',
'metadata': {
'type': 'slider',
'max': 100,
'min': 50,
'default': 100
}
}
def clusterBy(displayObject):
return {
'name': 'clusterby',
'description': 'Cluster By',
'refresh': True,
'metadata': {
'type': "dropdown",
'values': ["None"] + sorted([f for f in displayObject.getFieldNames() if f not in displayObject.getKeyFields() and f not in displayObject.getValueFields()]),
'default': ""
},
'validate': lambda option:\
(option in displayObject.getFieldNames() and option not in displayObject.getKeyFields() and option not in displayObject.getValueFields(),\
"Cluster By value is already used in keys or values for this chart")
}
def timeSeries(displayObject):
if len(displayObject.getKeyFields()) == 1:
pdf = displayObject.getWorkingPandasDataFrame()
field = displayObject.getKeyFields()[0]
dtype = pdf[field].dtype.type if field in pdf else None
existingValue = displayObject.options.get("timeseries", 'false')
if dtype is not None and (dtype is not numpy.datetime64 or existingValue == 'true'):
return {
'name': 'timeseries',
'description': 'Time Series',
'metadata':{
'type': 'checkbox',
'default': 'false'
}
}
def barChart(displayObject):
options = []
options.append(chartSize())
options.append(clusterBy(displayObject))
append(displayObject, options, timeSeries(displayObject))
if not hasattr(displayObject, 'no_orientation') or displayObject.no_orientation is not True:
options.append({
'name': 'orientation',
'description': 'Orientation',
'metadata': {
'type': 'dropdown',
'values': ['vertical', 'horizontal'],
'default': "vertical"
}
})
if displayObject.options.get("clusterby") != None or len(displayObject.getValueFields()) > 1:
options.append({
'name': 'charttype',
'description': 'Type',
'metadata': {
'type': 'dropdown',
'values': ['grouped', 'stacked', 'subplots'],
'default': "grouped"
}
})
options.append({
'name': 'legend',
'description': 'Show legend',
'metadata': {
'type': 'checkbox',
'default': "true"
}
})
options.append({
'name': 'sortby',
'description': 'Sort By',
'metadata': {
'type': 'dropdown',
'values': ['Keys ASC', 'Keys DESC', 'Values ASC', 'Values DESC'],
'default': 'Keys ASC'
}
})
return options
def lineChart(displayObject):
options = []
options.append(chartSize())
options.append(clusterBy(displayObject))
append(displayObject, options, timeSeries(displayObject))
if displayObject.options.get("clusterby") != None or len(displayObject.getValueFields()) > 1:
options.append({
'name': 'lineChartType',
'description': 'Type',
'metadata': {
'type': 'dropdown',
'values': ['grouped', 'subplots'],
'default': "grouped"
}
})
options.append({
'name': 'legend',
'description': 'Show legend',
'metadata': {
'type': 'checkbox',
'default': "false"
}
})
options.append({
'name': 'logx',
'description': 'log scale on x',
'metadata': {
'type': 'checkbox',
'default': "false"
}
})
options.append({
'name': 'logy',
'description': 'log scale on y',
'metadata': {
'type': 'checkbox',
'default': "false"
}
})
return options
def histogram(displayObject):
options = []
options.append(chartSize())
if len(displayObject.getValueFields()) > 1:
append(displayObject, options, {
'name': 'histoChartType',
'description': 'Type',
'metadata': {
'type': 'dropdown',
'values': ['stacked', 'subplots'],
'default': "stacked"
}
})
count = len(displayObject.getWorkingPandasDataFrame().index)
default = math.sqrt(count)
vals = len(displayObject.getWorkingPandasDataFrame().groupby(displayObject.getValueFields()[0]).size())
options.append({
'name': 'binsize',
'description': 'Bin Count',
'metadata': {
'type': 'slider',
'max': int(max(vals, default) + 10),
'min': int(max((min(vals, default) - 10), 2)),
'default': int(default)
}
})
return options
def pieChart(displayObject):
options = []
options.append(chartSize())
return options
def scatterPlot(displayObject):
options = []
options.append(chartSize())
return options
commonOptions = {}
for f in [barChart,lineChart,histogram,pieChart,scatterPlot]:
commonOptions.update({f.__name__:f})
| 31.26601
| 169
| 0.540728
|
import math
import numpy
import pixiedust
myLogger = pixiedust.getLogger(__name__)
def append(displayObject, arr, option):
if option is not None and displayObject.acceptOption(option["name"]):
arr.append(option)
def chartSize():
return {
'name': 'chartsize',
'description': 'Chart Size',
'metadata': {
'type': 'slider',
'max': 100,
'min': 50,
'default': 100
}
}
def clusterBy(displayObject):
return {
'name': 'clusterby',
'description': 'Cluster By',
'refresh': True,
'metadata': {
'type': "dropdown",
'values': ["None"] + sorted([f for f in displayObject.getFieldNames() if f not in displayObject.getKeyFields() and f not in displayObject.getValueFields()]),
'default': ""
},
'validate': lambda option:\
(option in displayObject.getFieldNames() and option not in displayObject.getKeyFields() and option not in displayObject.getValueFields(),\
"Cluster By value is already used in keys or values for this chart")
}
def timeSeries(displayObject):
if len(displayObject.getKeyFields()) == 1:
pdf = displayObject.getWorkingPandasDataFrame()
field = displayObject.getKeyFields()[0]
dtype = pdf[field].dtype.type if field in pdf else None
existingValue = displayObject.options.get("timeseries", 'false')
if dtype is not None and (dtype is not numpy.datetime64 or existingValue == 'true'):
return {
'name': 'timeseries',
'description': 'Time Series',
'metadata':{
'type': 'checkbox',
'default': 'false'
}
}
def barChart(displayObject):
options = []
options.append(chartSize())
options.append(clusterBy(displayObject))
append(displayObject, options, timeSeries(displayObject))
if not hasattr(displayObject, 'no_orientation') or displayObject.no_orientation is not True:
options.append({
'name': 'orientation',
'description': 'Orientation',
'metadata': {
'type': 'dropdown',
'values': ['vertical', 'horizontal'],
'default': "vertical"
}
})
if displayObject.options.get("clusterby") != None or len(displayObject.getValueFields()) > 1:
options.append({
'name': 'charttype',
'description': 'Type',
'metadata': {
'type': 'dropdown',
'values': ['grouped', 'stacked', 'subplots'],
'default': "grouped"
}
})
options.append({
'name': 'legend',
'description': 'Show legend',
'metadata': {
'type': 'checkbox',
'default': "true"
}
})
options.append({
'name': 'sortby',
'description': 'Sort By',
'metadata': {
'type': 'dropdown',
'values': ['Keys ASC', 'Keys DESC', 'Values ASC', 'Values DESC'],
'default': 'Keys ASC'
}
})
return options
def lineChart(displayObject):
options = []
options.append(chartSize())
options.append(clusterBy(displayObject))
append(displayObject, options, timeSeries(displayObject))
if displayObject.options.get("clusterby") != None or len(displayObject.getValueFields()) > 1:
options.append({
'name': 'lineChartType',
'description': 'Type',
'metadata': {
'type': 'dropdown',
'values': ['grouped', 'subplots'],
'default': "grouped"
}
})
options.append({
'name': 'legend',
'description': 'Show legend',
'metadata': {
'type': 'checkbox',
'default': "false"
}
})
options.append({
'name': 'logx',
'description': 'log scale on x',
'metadata': {
'type': 'checkbox',
'default': "false"
}
})
options.append({
'name': 'logy',
'description': 'log scale on y',
'metadata': {
'type': 'checkbox',
'default': "false"
}
})
return options
def histogram(displayObject):
options = []
options.append(chartSize())
if len(displayObject.getValueFields()) > 1:
append(displayObject, options, {
'name': 'histoChartType',
'description': 'Type',
'metadata': {
'type': 'dropdown',
'values': ['stacked', 'subplots'],
'default': "stacked"
}
})
count = len(displayObject.getWorkingPandasDataFrame().index)
default = math.sqrt(count)
vals = len(displayObject.getWorkingPandasDataFrame().groupby(displayObject.getValueFields()[0]).size())
options.append({
'name': 'binsize',
'description': 'Bin Count',
'metadata': {
'type': 'slider',
'max': int(max(vals, default) + 10),
'min': int(max((min(vals, default) - 10), 2)),
'default': int(default)
}
})
return options
def pieChart(displayObject):
options = []
options.append(chartSize())
return options
def scatterPlot(displayObject):
options = []
options.append(chartSize())
return options
commonOptions = {}
for f in [barChart,lineChart,histogram,pieChart,scatterPlot]:
commonOptions.update({f.__name__:f})
| true
| true
|
79064114e4ebf50fb5a7286102992e3f802b6549
| 1,293
|
py
|
Python
|
tools/tags/storage.py
|
polaretto/topocity
|
a0b2f2f0f2d109bc7ffdf5b1f6b9367cf7b11fbe
|
[
"Unlicense"
] | 3
|
2019-09-18T12:17:37.000Z
|
2020-11-26T13:42:07.000Z
|
tools/tags/storage.py
|
ennioVisco/topocity
|
a0b2f2f0f2d109bc7ffdf5b1f6b9367cf7b11fbe
|
[
"Unlicense"
] | null | null | null |
tools/tags/storage.py
|
ennioVisco/topocity
|
a0b2f2f0f2d109bc7ffdf5b1f6b9367cf7b11fbe
|
[
"Unlicense"
] | null | null | null |
import sys
from os import listdir
from os.path import isfile, join, dirname, realpath
import struct
import gzip
def list_dir(d):
return [f for f in listdir(d) if isfile(join(d, f))]
def store(p, file):
try:
output_file = open(p, "w", encoding="utf-8", errors="xmlcharrefreplace")
output_file.write(file)
except:
print("Unable to store the file. Error:", sys.exc_info()[0])
raise
def store_bin(p, file):
with open(p, 'wb') as f:
if isinstance(file, int):
f.write(struct.pack('i', file)) # write an int
elif isinstance(file, str):
f.write(file) # write a string
else:
raise TypeError('Can only write str or int')
def load(p, compression=None):
if compression == 'gz' or compression == 'gzip':
f = gzip.open(p, 'rb')
else:
f = open(p, mode="r", encoding="utf-8")
content = f.read()
f.close()
return content
def to_string(data: bytes, encoding=None):
if encoding is None:
return data.decode("utf-8")
return data.decode(encoding)
def store_list(p, files, file_names):
for i in len(files):
store(p + file_names[i], files[i])
i += 1
def path(file):
return dirname(realpath(file)).replace("\\", "/")
| 23.089286
| 80
| 0.595514
|
import sys
from os import listdir
from os.path import isfile, join, dirname, realpath
import struct
import gzip
def list_dir(d):
return [f for f in listdir(d) if isfile(join(d, f))]
def store(p, file):
try:
output_file = open(p, "w", encoding="utf-8", errors="xmlcharrefreplace")
output_file.write(file)
except:
print("Unable to store the file. Error:", sys.exc_info()[0])
raise
def store_bin(p, file):
with open(p, 'wb') as f:
if isinstance(file, int):
f.write(struct.pack('i', file))
elif isinstance(file, str):
f.write(file)
else:
raise TypeError('Can only write str or int')
def load(p, compression=None):
if compression == 'gz' or compression == 'gzip':
f = gzip.open(p, 'rb')
else:
f = open(p, mode="r", encoding="utf-8")
content = f.read()
f.close()
return content
def to_string(data: bytes, encoding=None):
if encoding is None:
return data.decode("utf-8")
return data.decode(encoding)
def store_list(p, files, file_names):
for i in len(files):
store(p + file_names[i], files[i])
i += 1
def path(file):
return dirname(realpath(file)).replace("\\", "/")
| true
| true
|
7906412b9fa10fe80d8adb057a6cd4ac7eb08bdf
| 3,387
|
py
|
Python
|
tests/providers/amazon/aws/hooks/test_cloud_formation.py
|
daemon-demon/airflow
|
6f96e81f0123b30750fb68ec496246023bf63f35
|
[
"Apache-2.0"
] | 4
|
2020-02-16T18:13:54.000Z
|
2021-01-01T03:22:19.000Z
|
tests/providers/amazon/aws/hooks/test_cloud_formation.py
|
daemon-demon/airflow
|
6f96e81f0123b30750fb68ec496246023bf63f35
|
[
"Apache-2.0"
] | 20
|
2021-01-23T12:33:08.000Z
|
2021-12-07T22:30:37.000Z
|
tests/providers/amazon/aws/hooks/test_cloud_formation.py
|
daemon-demon/airflow
|
6f96e81f0123b30750fb68ec496246023bf63f35
|
[
"Apache-2.0"
] | 2
|
2020-10-23T18:55:05.000Z
|
2022-02-16T21:53:10.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import json
import unittest
from airflow.providers.amazon.aws.hooks.cloud_formation import AWSCloudFormationHook
try:
from moto import mock_cloudformation
except ImportError:
mock_cloudformation = None
@unittest.skipIf(mock_cloudformation is None, 'moto package not present')
class TestAWSCloudFormationHook(unittest.TestCase):
def setUp(self):
self.hook = AWSCloudFormationHook(aws_conn_id='aws_default')
def create_stack(self, stack_name):
timeout = 15
template_body = json.dumps(
{'Resources': {"myResource": {"Type": "emr", "Properties": {"myProperty": "myPropertyValue"}}}}
)
self.hook.create_stack(
stack_name=stack_name,
params={
'TimeoutInMinutes': timeout,
'TemplateBody': template_body,
'Parameters': [{'ParameterKey': 'myParam', 'ParameterValue': 'myParamValue'}],
},
)
@mock_cloudformation
def test_get_conn_returns_a_boto3_connection(self):
self.assertIsNotNone(self.hook.get_conn().describe_stacks())
@mock_cloudformation
def test_get_stack_status(self):
stack_name = 'my_test_get_stack_status_stack'
stack_status = self.hook.get_stack_status(stack_name=stack_name)
self.assertIsNone(stack_status)
self.create_stack(stack_name)
stack_status = self.hook.get_stack_status(stack_name=stack_name)
self.assertEqual(stack_status, 'CREATE_COMPLETE', 'Incorrect stack status returned.')
@mock_cloudformation
def test_create_stack(self):
stack_name = 'my_test_create_stack_stack'
self.create_stack(stack_name)
stacks = self.hook.get_conn().describe_stacks()['Stacks']
self.assertGreater(len(stacks), 0, 'CloudFormation should have stacks')
matching_stacks = [x for x in stacks if x['StackName'] == stack_name]
self.assertEqual(len(matching_stacks), 1, f'stack with name {stack_name} should exist')
stack = matching_stacks[0]
self.assertEqual(stack['StackStatus'], 'CREATE_COMPLETE', 'Stack should be in status CREATE_COMPLETE')
@mock_cloudformation
def test_delete_stack(self):
stack_name = 'my_test_delete_stack_stack'
self.create_stack(stack_name)
self.hook.delete_stack(stack_name=stack_name)
stacks = self.hook.get_conn().describe_stacks()['Stacks']
matching_stacks = [x for x in stacks if x['StackName'] == stack_name]
self.assertEqual(len(matching_stacks), 0, f'stack with name {stack_name} should not exist')
| 38.05618
| 110
| 0.708001
|
import json
import unittest
from airflow.providers.amazon.aws.hooks.cloud_formation import AWSCloudFormationHook
try:
from moto import mock_cloudformation
except ImportError:
mock_cloudformation = None
@unittest.skipIf(mock_cloudformation is None, 'moto package not present')
class TestAWSCloudFormationHook(unittest.TestCase):
def setUp(self):
self.hook = AWSCloudFormationHook(aws_conn_id='aws_default')
def create_stack(self, stack_name):
timeout = 15
template_body = json.dumps(
{'Resources': {"myResource": {"Type": "emr", "Properties": {"myProperty": "myPropertyValue"}}}}
)
self.hook.create_stack(
stack_name=stack_name,
params={
'TimeoutInMinutes': timeout,
'TemplateBody': template_body,
'Parameters': [{'ParameterKey': 'myParam', 'ParameterValue': 'myParamValue'}],
},
)
@mock_cloudformation
def test_get_conn_returns_a_boto3_connection(self):
self.assertIsNotNone(self.hook.get_conn().describe_stacks())
@mock_cloudformation
def test_get_stack_status(self):
stack_name = 'my_test_get_stack_status_stack'
stack_status = self.hook.get_stack_status(stack_name=stack_name)
self.assertIsNone(stack_status)
self.create_stack(stack_name)
stack_status = self.hook.get_stack_status(stack_name=stack_name)
self.assertEqual(stack_status, 'CREATE_COMPLETE', 'Incorrect stack status returned.')
@mock_cloudformation
def test_create_stack(self):
stack_name = 'my_test_create_stack_stack'
self.create_stack(stack_name)
stacks = self.hook.get_conn().describe_stacks()['Stacks']
self.assertGreater(len(stacks), 0, 'CloudFormation should have stacks')
matching_stacks = [x for x in stacks if x['StackName'] == stack_name]
self.assertEqual(len(matching_stacks), 1, f'stack with name {stack_name} should exist')
stack = matching_stacks[0]
self.assertEqual(stack['StackStatus'], 'CREATE_COMPLETE', 'Stack should be in status CREATE_COMPLETE')
@mock_cloudformation
def test_delete_stack(self):
stack_name = 'my_test_delete_stack_stack'
self.create_stack(stack_name)
self.hook.delete_stack(stack_name=stack_name)
stacks = self.hook.get_conn().describe_stacks()['Stacks']
matching_stacks = [x for x in stacks if x['StackName'] == stack_name]
self.assertEqual(len(matching_stacks), 0, f'stack with name {stack_name} should not exist')
| true
| true
|
7906416b851a8d07e9cbc674852b1cde9e66fe35
| 680
|
py
|
Python
|
manage.py
|
eferos93/ODMC_exam_implementation
|
14fd113b00dc45d2b99b2a83c662d6a70171b5e5
|
[
"CC-BY-4.0"
] | null | null | null |
manage.py
|
eferos93/ODMC_exam_implementation
|
14fd113b00dc45d2b99b2a83c662d6a70171b5e5
|
[
"CC-BY-4.0"
] | null | null | null |
manage.py
|
eferos93/ODMC_exam_implementation
|
14fd113b00dc45d2b99b2a83c662d6a70171b5e5
|
[
"CC-BY-4.0"
] | null | null | null |
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ODMC_exam_implementation.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 29.565217
| 88
| 0.686765
|
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ODMC_exam_implementation.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| true
| true
|
790641e28b00c1411b38806eeef70efe573c3db3
| 2,657
|
py
|
Python
|
weights.py
|
FireRedz/HFR-Resampler
|
1246e40c836f2e6e5cb0cf1aeaafd03bc7bf5d48
|
[
"MIT"
] | null | null | null |
weights.py
|
FireRedz/HFR-Resampler
|
1246e40c836f2e6e5cb0cf1aeaafd03bc7bf5d48
|
[
"MIT"
] | null | null | null |
weights.py
|
FireRedz/HFR-Resampler
|
1246e40c836f2e6e5cb0cf1aeaafd03bc7bf5d48
|
[
"MIT"
] | null | null | null |
import math
import numpy as np
from enum import IntEnum
class Mode(IntEnum):
CUSTOM = 0
EQUAL = 1
GAUSS = 2
GAUSS_SYM = 3
PYRAMID = 4
PYRAMID_SYM = 5
SIVEROO_1 = 6
SIVEROO_2 = 7
#This function will return an list of value, like below:
# [0,1,2,3,...,n] -> [a,...,b]
def scaleRange(n: int, a: int, b: int):
return [(x*(b-a)/(n-1))+a for x in range(0,n)]
def equal(n: int):
return [1/n]*n
def gauss(n: int):
r = range(n,0,-1)
val = [math.exp(-(2.0*x/n)**2) for x in r]
val = val/np.sum(val)
return val
def gauss_sym(n: int):
n = n/2
r = range(int(n),-math.ceil(n),-1)
val = ([math.exp(-(2.0*x/(n))**2) for x in r])
val = val/np.sum(val)
return val
def pyramid(n: int):
r = range(1,n+1)
val = [x/n for x in r]
val = val/np.sum(val)
return val
def pyramid_sym(n: int):
r = range(0,n)
val = [(n/2)-abs(x-(n-1)/2) for x in r]
val = val/np.sum(val)
return val
def siveroo1(n: int):
r = scaleRange(n,-3,0.1)
val = [math.floor(3*math.exp(-(x/1.9)**2))/3+0.1 for x in r]
val = val/np.sum(val)
return val
# this function will stretch the given array (w) to a specific length (n)
# example : n = 10, w = [1,2]
# result : val = [1,1,1,1,1,2,2,2,2,2] , flip it, and then normalize it so its sum is equal to 1
def stretch(n: int, w: int):
r = scaleRange(n,0,len(w)-0.1)
val = []
idx = [math.floor(x) for x in r]
for x in range(0,n):
index = int(idx[x])
val.append(w[index])
val = val/np.sum(val)
return val
def null(n: int):
return [0]*n
def get_weight(mode: Mode, count: int):
if count == 1:
return [1.0]
else:
return {
Mode.EQUAL : equal(count),
Mode.GAUSS : gauss(count),
Mode.GAUSS_SYM : gauss_sym(count),
Mode.PYRAMID : pyramid(count),
Mode.PYRAMID_SYM : pyramid_sym(count),
Mode.SIVEROO_1 : siveroo1(count),
Mode.SIVEROO_2 : stretch(count,[1,3,3,2,2])
}.get(mode, [1, 0]) # fallback to [1,0] if fucked up
def modeName(mode: Mode):
return {
Mode.EQUAL : "[1] Equal",
Mode.GAUSS : "[2] Gaussian Asymmetric",
Mode.GAUSS_SYM : "[3] Gaussian Symmetric",
Mode.PYRAMID : "[4] Pyramid Asymmetric",
Mode.PYRAMID_SYM : "[5] Pyramid Symmetric",
Mode.SIVEROO_1 : "[6] Siveroo's Preset I",
Mode.SIVEROO_2 : "[7] Siveroo's Preset II"
}[mode]
| 27.968421
| 96
| 0.515995
|
import math
import numpy as np
from enum import IntEnum
class Mode(IntEnum):
CUSTOM = 0
EQUAL = 1
GAUSS = 2
GAUSS_SYM = 3
PYRAMID = 4
PYRAMID_SYM = 5
SIVEROO_1 = 6
SIVEROO_2 = 7
def scaleRange(n: int, a: int, b: int):
return [(x*(b-a)/(n-1))+a for x in range(0,n)]
def equal(n: int):
return [1/n]*n
def gauss(n: int):
r = range(n,0,-1)
val = [math.exp(-(2.0*x/n)**2) for x in r]
val = val/np.sum(val)
return val
def gauss_sym(n: int):
n = n/2
r = range(int(n),-math.ceil(n),-1)
val = ([math.exp(-(2.0*x/(n))**2) for x in r])
val = val/np.sum(val)
return val
def pyramid(n: int):
r = range(1,n+1)
val = [x/n for x in r]
val = val/np.sum(val)
return val
def pyramid_sym(n: int):
r = range(0,n)
val = [(n/2)-abs(x-(n-1)/2) for x in r]
val = val/np.sum(val)
return val
def siveroo1(n: int):
r = scaleRange(n,-3,0.1)
val = [math.floor(3*math.exp(-(x/1.9)**2))/3+0.1 for x in r]
val = val/np.sum(val)
return val
def stretch(n: int, w: int):
r = scaleRange(n,0,len(w)-0.1)
val = []
idx = [math.floor(x) for x in r]
for x in range(0,n):
index = int(idx[x])
val.append(w[index])
val = val/np.sum(val)
return val
def null(n: int):
return [0]*n
def get_weight(mode: Mode, count: int):
if count == 1:
return [1.0]
else:
return {
Mode.EQUAL : equal(count),
Mode.GAUSS : gauss(count),
Mode.GAUSS_SYM : gauss_sym(count),
Mode.PYRAMID : pyramid(count),
Mode.PYRAMID_SYM : pyramid_sym(count),
Mode.SIVEROO_1 : siveroo1(count),
Mode.SIVEROO_2 : stretch(count,[1,3,3,2,2])
}.get(mode, [1, 0])
def modeName(mode: Mode):
return {
Mode.EQUAL : "[1] Equal",
Mode.GAUSS : "[2] Gaussian Asymmetric",
Mode.GAUSS_SYM : "[3] Gaussian Symmetric",
Mode.PYRAMID : "[4] Pyramid Asymmetric",
Mode.PYRAMID_SYM : "[5] Pyramid Symmetric",
Mode.SIVEROO_1 : "[6] Siveroo's Preset I",
Mode.SIVEROO_2 : "[7] Siveroo's Preset II"
}[mode]
| true
| true
|
790641fac07b70ce14f042decb16966503edbc03
| 19,826
|
py
|
Python
|
google/ads/googleads/v10/services/services/feed_service/client.py
|
JakobSteixner/google-ads-python
|
df2b802cc7e78295a4ece21cc7ef3787cd35dab0
|
[
"Apache-2.0"
] | null | null | null |
google/ads/googleads/v10/services/services/feed_service/client.py
|
JakobSteixner/google-ads-python
|
df2b802cc7e78295a4ece21cc7ef3787cd35dab0
|
[
"Apache-2.0"
] | null | null | null |
google/ads/googleads/v10/services/services/feed_service/client.py
|
JakobSteixner/google-ads-python
|
df2b802cc7e78295a4ece21cc7ef3787cd35dab0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.ads.googleads.v10.services.types import feed_service
from google.rpc import status_pb2 # type: ignore
from .transports.base import FeedServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import FeedServiceGrpcTransport
class FeedServiceClientMeta(type):
"""Metaclass for the FeedService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[FeedServiceTransport]]
_transport_registry["grpc"] = FeedServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[FeedServiceTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class FeedServiceClient(metaclass=FeedServiceClientMeta):
"""Service to manage feeds."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
FeedServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
FeedServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> FeedServiceTransport:
"""Returns the transport used by the client instance.
Returns:
FeedServiceTransport: The transport used by the client
instance.
"""
return self._transport
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
"""Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
"""
self.transport.close()
@staticmethod
def feed_path(customer_id: str, feed_id: str,) -> str:
"""Returns a fully-qualified feed string."""
return "customers/{customer_id}/feeds/{feed_id}".format(
customer_id=customer_id, feed_id=feed_id,
)
@staticmethod
def parse_feed_path(path: str) -> Dict[str, str]:
"""Parses a feed path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/feeds/(?P<feed_id>.+?)$", path
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path
)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, FeedServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the feed service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, FeedServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in (
"true",
"false",
):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
use_client_cert = (
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true"
)
client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
is_mtls = True
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
if is_mtls:
client_cert_source_func = mtls.default_client_cert_source()
else:
client_cert_source_func = None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = (
self.DEFAULT_MTLS_ENDPOINT
if is_mtls
else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
"values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, FeedServiceTransport):
# transport is a FeedServiceTransport instance.
if credentials or client_options.credentials_file:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
)
def mutate_feeds(
self,
request: Union[feed_service.MutateFeedsRequest, dict] = None,
*,
customer_id: str = None,
operations: Sequence[feed_service.FeedOperation] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> feed_service.MutateFeedsResponse:
r"""Creates, updates, or removes feeds. Operation statuses are
returned.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `CollectionSizeError <>`__
`DatabaseError <>`__ `DistinctError <>`__ `FeedError <>`__
`FieldError <>`__ `FieldMaskError <>`__ `HeaderError <>`__
`IdError <>`__ `InternalError <>`__ `ListOperationError <>`__
`MutateError <>`__ `NewResourceCreationError <>`__
`NotEmptyError <>`__ `NullError <>`__ `OperatorError <>`__
`QuotaError <>`__ `RangeError <>`__ `RequestError <>`__
`ResourceCountLimitExceededError <>`__ `SizeLimitError <>`__
`StringFormatError <>`__ `StringLengthError <>`__
Args:
request (Union[google.ads.googleads.v10.services.types.MutateFeedsRequest, dict]):
The request object. Request message for
[FeedService.MutateFeeds][google.ads.googleads.v10.services.FeedService.MutateFeeds].
customer_id (str):
Required. The ID of the customer
whose feeds are being modified.
This corresponds to the ``customer_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
operations (Sequence[google.ads.googleads.v10.services.types.FeedOperation]):
Required. The list of operations to
perform on individual feeds.
This corresponds to the ``operations`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v10.services.types.MutateFeedsResponse:
Response message for an feed mutate.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([customer_id, operations])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a feed_service.MutateFeedsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, feed_service.MutateFeedsRequest):
request = feed_service.MutateFeedsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if customer_id is not None:
request.customer_id = customer_id
if operations is not None:
request.operations = operations
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.mutate_feeds]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("customer_id", request.customer_id),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-ads",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("FeedServiceClient",)
| 40.96281
| 107
| 0.632705
|
from collections import OrderedDict
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials
from google.auth.transport import mtls
from google.auth.transport.grpc import SslCredentials
from google.auth.exceptions import MutualTLSChannelError
from google.oauth2 import service_account
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError:
OptionalRetry = Union[retries.Retry, object]
from google.ads.googleads.v10.services.types import feed_service
from google.rpc import status_pb2
from .transports.base import FeedServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import FeedServiceGrpcTransport
class FeedServiceClientMeta(type):
_transport_registry = (
OrderedDict()
)
_transport_registry["grpc"] = FeedServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[FeedServiceTransport]:
if label:
return cls._transport_registry[label]
return next(iter(cls._transport_registry.values()))
class FeedServiceClient(metaclass=FeedServiceClientMeta):
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> FeedServiceTransport:
return self._transport
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.transport.close()
@staticmethod
def feed_path(customer_id: str, feed_id: str,) -> str:
return "customers/{customer_id}/feeds/{feed_id}".format(
customer_id=customer_id, feed_id=feed_id,
)
@staticmethod
def parse_feed_path(path: str) -> Dict[str, str]:
m = re.match(
r"^customers/(?P<customer_id>.+?)/feeds/(?P<feed_id>.+?)$", path
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path
)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, FeedServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in (
"true",
"false",
):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
use_client_cert = (
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true"
)
client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
is_mtls = True
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
if is_mtls:
client_cert_source_func = mtls.default_client_cert_source()
else:
client_cert_source_func = None
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = (
self.DEFAULT_MTLS_ENDPOINT
if is_mtls
else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
"values: never, auto, always"
)
if isinstance(transport, FeedServiceTransport):
if credentials or client_options.credentials_file:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
)
def mutate_feeds(
self,
request: Union[feed_service.MutateFeedsRequest, dict] = None,
*,
customer_id: str = None,
operations: Sequence[feed_service.FeedOperation] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> feed_service.MutateFeedsResponse:
has_flattened_params = any([customer_id, operations])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
if not isinstance(request, feed_service.MutateFeedsRequest):
request = feed_service.MutateFeedsRequest(request)
if customer_id is not None:
request.customer_id = customer_id
if operations is not None:
request.operations = operations
rpc = self._transport._wrapped_methods[self._transport.mutate_feeds]
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("customer_id", request.customer_id),)
),
)
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
return response
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-ads",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("FeedServiceClient",)
| true
| true
|
7906447f2dbea215a62effae1c0161bccfc62e6a
| 538
|
py
|
Python
|
LeetcodeAlgorithms/116. Populating Next Right Pointers in Each Node/populating-next-right-pointers-in-each-node.py
|
Fenghuapiao/PyLeetcode
|
d804a62643fe935eb61808196a2c093ea9583654
|
[
"MIT"
] | 3
|
2019-08-20T06:54:38.000Z
|
2022-01-07T12:56:46.000Z
|
LeetcodeAlgorithms/116. Populating Next Right Pointers in Each Node/populating-next-right-pointers-in-each-node.py
|
Fenghuapiao/PyLeetcode
|
d804a62643fe935eb61808196a2c093ea9583654
|
[
"MIT"
] | null | null | null |
LeetcodeAlgorithms/116. Populating Next Right Pointers in Each Node/populating-next-right-pointers-in-each-node.py
|
Fenghuapiao/PyLeetcode
|
d804a62643fe935eb61808196a2c093ea9583654
|
[
"MIT"
] | 2
|
2018-06-07T02:56:39.000Z
|
2018-08-01T15:27:55.000Z
|
# Definition for binary tree with next pointer.
# class TreeLinkNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# self.next = None
class Solution:
# @param root, a tree link node
# @return nothing
def connect(self, root):
if root and root.left and root.right:
root.left.next = root.right
root.right.next = root.next and root.next.left
return self.connect(root.left) or self.connect(root.right)
| 33.625
| 70
| 0.585502
|
class Solution:
def connect(self, root):
if root and root.left and root.right:
root.left.next = root.right
root.right.next = root.next and root.next.left
return self.connect(root.left) or self.connect(root.right)
| true
| true
|
790644a1ad87fae9a4d2498a6d2cad85fa73d67d
| 869
|
py
|
Python
|
tests_python/tests/test_many_bakers.py
|
noclouds/tezos-master
|
6c31af7ab3ea782f89f988e389e9b0860207a8d6
|
[
"MIT"
] | null | null | null |
tests_python/tests/test_many_bakers.py
|
noclouds/tezos-master
|
6c31af7ab3ea782f89f988e389e9b0860207a8d6
|
[
"MIT"
] | null | null | null |
tests_python/tests/test_many_bakers.py
|
noclouds/tezos-master
|
6c31af7ab3ea782f89f988e389e9b0860207a8d6
|
[
"MIT"
] | 1
|
2021-06-04T09:31:48.000Z
|
2021-06-04T09:31:48.000Z
|
import time
import pytest
from tools import utils, constants
PARAMS = ['--connections', '500']
# TODO parameterize test
@pytest.mark.baker
@pytest.mark.multinode
@pytest.mark.slow
@pytest.mark.incremental
class TestManyBakers:
"""Run 5 bakers and num nodes, wait and check logs"""
def test_init(self, sandbox):
for i in range(10):
sandbox.add_node(i, params=PARAMS)
utils.activate_alpha(sandbox.client(0))
for i in range(5):
sandbox.add_baker(i, f'bootstrap{i + 1}',
proto=constants.ALPHA_DEAMON)
def test_wait(self):
time.sleep(5)
def test_check_logs(self, sandbox):
if not sandbox.log_dir:
pytest.skip()
assert sandbox.logs
error_pattern = r"canceled|crashed"
assert utils.check_logs(sandbox.logs, error_pattern)
| 24.828571
| 60
| 0.638665
|
import time
import pytest
from tools import utils, constants
PARAMS = ['--connections', '500']
@pytest.mark.baker
@pytest.mark.multinode
@pytest.mark.slow
@pytest.mark.incremental
class TestManyBakers:
def test_init(self, sandbox):
for i in range(10):
sandbox.add_node(i, params=PARAMS)
utils.activate_alpha(sandbox.client(0))
for i in range(5):
sandbox.add_baker(i, f'bootstrap{i + 1}',
proto=constants.ALPHA_DEAMON)
def test_wait(self):
time.sleep(5)
def test_check_logs(self, sandbox):
if not sandbox.log_dir:
pytest.skip()
assert sandbox.logs
error_pattern = r"canceled|crashed"
assert utils.check_logs(sandbox.logs, error_pattern)
| true
| true
|
79064628611c6e9502d07eef23a9c0d64f14b2fc
| 315
|
py
|
Python
|
Chapter10/default_dict_list_of_tuples.py
|
LuisPereda/Learning_Python
|
e89e69346c5584be10d991010f39b59329793ba5
|
[
"MIT"
] | null | null | null |
Chapter10/default_dict_list_of_tuples.py
|
LuisPereda/Learning_Python
|
e89e69346c5584be10d991010f39b59329793ba5
|
[
"MIT"
] | null | null | null |
Chapter10/default_dict_list_of_tuples.py
|
LuisPereda/Learning_Python
|
e89e69346c5584be10d991010f39b59329793ba5
|
[
"MIT"
] | null | null | null |
from collections import defaultdict
game = defaultdict(list)
tuple_list_county = [('US', 'Visconsin'), ('Germany', 'Bavaria'), ('UK', 'Bradfordshire'), ('India', 'punjab'), ('China', 'Shandong'), ('Canada', 'Nova Scotia')]
print game["any_value"]
for k,v in tuple_list_county:
game[k].append(v)
print game
| 26.25
| 162
| 0.669841
|
from collections import defaultdict
game = defaultdict(list)
tuple_list_county = [('US', 'Visconsin'), ('Germany', 'Bavaria'), ('UK', 'Bradfordshire'), ('India', 'punjab'), ('China', 'Shandong'), ('Canada', 'Nova Scotia')]
print game["any_value"]
for k,v in tuple_list_county:
game[k].append(v)
print game
| false
| true
|
79064688870260d6a5b4eb36b1cf009bb9eb49b9
| 12,593
|
py
|
Python
|
splunk_eventgen/splunk_app/lib/mod_input/fields.py
|
technimad/eventgen
|
f33c07227182603d532db065a917a8faba1f01c9
|
[
"Apache-2.0"
] | null | null | null |
splunk_eventgen/splunk_app/lib/mod_input/fields.py
|
technimad/eventgen
|
f33c07227182603d532db065a917a8faba1f01c9
|
[
"Apache-2.0"
] | 5
|
2021-03-31T20:15:06.000Z
|
2022-02-26T10:40:39.000Z
|
splunk_eventgen/splunk_app/lib/mod_input/fields.py
|
technimad/eventgen
|
f33c07227182603d532db065a917a8faba1f01c9
|
[
"Apache-2.0"
] | null | null | null |
import json
import re
class FieldValidationException(Exception):
pass
class Field(object):
"""
This is the base class that should be used to create field validators. Sub-class this and override to_python if you
need custom validation.
"""
DATA_TYPE_STRING = 'string'
DATA_TYPE_NUMBER = 'number'
DATA_TYPE_BOOLEAN = 'boolean'
def get_data_type(self):
"""
Get the type of the field.
"""
return Field.DATA_TYPE_STRING
def __init__(self, name, title, description, required_on_create=True, required_on_edit=False):
"""
Create the field.
Arguments:
name -- Set the name of the field (e.g. "database_server")
title -- Set the human readable title (e.g. "Database server")
description -- Set the human-readable description of the field
(e.g. "The IP or domain name of the database server")
required_on_create -- If "true", the parameter is required on input stanza creation.
required_on_edit -- If "true", the parameter is required on input stanza modification.
Default values for required_on_create and required_on_edit match the
documented behavior at http://docs.splunk.com/Documentation/Splunk/latest/AdvancedDev/ModInputsScripts.
"""
# Note: there is no distinction between a None value and blank value,
# as modular input UIs does not recognize such a distinction.
if name is None or len(name.strip()) == 0:
raise ValueError("The name parameter cannot be empty.")
if title is None or len(title.strip()) == 0:
raise ValueError("The title parameter cannot be empty.")
if description is None or len(description.strip()) == 0:
raise ValueError("The description parameter cannot be empty.")
self.name = name
self.title = title
self.description = description
self.required_on_create = required_on_create
self.required_on_edit = required_on_edit
def to_python(self, value):
"""
Convert the field to a Python object. Should throw a FieldValidationException if the data is invalid.
Arguments:
value -- The value to convert
"""
# No standard validation here; the modular input framework handles empty values.
return value
def to_string(self, value):
"""
Convert the field to a string value that can be returned. Should throw a FieldValidationException if the data is
invalid.
Arguments:
value -- The value to convert
"""
return str(value)
class BooleanField(Field):
def to_python(self, value):
Field.to_python(self, value)
if value in [True, False]:
return value
elif str(value).strip().lower() in ["true", "t", "1"]:
return True
elif str(value).strip().lower() in ["false", "f", "0"]:
return False
raise FieldValidationException(
"The value of '%s' for the '%s' parameter is not a valid boolean" % (str(value), self.name))
def to_string(self, value):
if value is True:
return "1"
elif value is False:
return "0"
return str(value)
def get_data_type(self):
return Field.DATA_TYPE_BOOLEAN
class DelimitedField(Field):
def __init__(self, name, title, description, delim, required_on_create=True, required_on_edit=False):
super(DelimitedField, self).__init__(name, title, description, required_on_create, required_on_edit)
self._delim = delim
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
try:
tmp = value.split(self._delim)
return tmp
except ValueError as e:
raise FieldValidationException(str(e))
else:
return None
def to_string(self, value):
if value is not None:
return str(value)
return ""
def get_data_type(self):
return Field.DATA_TYPE_STRING
class DurationField(Field):
"""
The duration field represents a duration as represented by a string such as 1d for a 24 hour period.
The string is converted to an integer indicating the number of seconds.
"""
DURATION_RE = re.compile(r"(?P<duration>[0-9]+)\s*(?P<units>[a-z]*)", re.IGNORECASE)
MINUTE = 60
HOUR = 3600
DAY = 86400
WEEK = 604800
UNITS = {
'w': WEEK, 'week': WEEK, 'd': DAY, 'day': DAY, 'h': HOUR, 'hour': HOUR, 'm': MINUTE, 'min': MINUTE, 'minute':
MINUTE, 's': 1}
def to_python(self, value):
Field.to_python(self, value)
# Parse the duration
m = DurationField.DURATION_RE.match(value)
# Make sure the duration could be parsed
if m is None:
raise FieldValidationException(
"The value of '%s' for the '%s' parameter is not a valid duration" % (str(value), self.name))
# Get the units and duration
d = m.groupdict()
units = d['units']
# Parse the value provided
try:
duration = int(d['duration'])
except ValueError:
raise FieldValidationException(
"The duration '%s' for the '%s' parameter is not a valid number" % (d['duration'], self.name))
# Make sure the units are valid
if len(units) > 0 and units not in DurationField.UNITS:
raise FieldValidationException(
"The unit '%s' for the '%s' parameter is not a valid unit of duration" % (units, self.name))
# Convert the units to seconds
if len(units) > 0:
return duration * DurationField.UNITS[units]
else:
return duration
def to_string(self, value):
return str(value)
class FloatField(Field):
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
try:
return float(value)
except ValueError as e:
raise FieldValidationException(str(e))
else:
return None
def to_string(self, value):
if value is not None:
return str(value)
return ""
def get_data_type(self):
return Field.DATA_TYPE_NUMBER
class IntegerField(Field):
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
try:
return int(value)
except ValueError as e:
raise FieldValidationException(str(e))
else:
return None
def to_string(self, value):
if value is not None:
return str(value)
return ""
def get_data_type(self):
return Field.DATA_TYPE_NUMBER
class IntervalField(Field):
'''Class for handling Splunk's "interval" field, which typically accepts
an integer value OR a cron-style string. Note that this means that the
data type returned is a string, so the modular input must handle conversion
of this string to an integer at runtime.'''
# Accepted cron field formats:
# Asterisk: * (equivalent to first-last range)
# Lists: 1,2,3,4,5
# Ranges: 1-60
#
# and combinations of the above:
#
# Ranges followed by steps: 0-23/2
# Asterisks followed by steps: */2
#
# Note that we don't check explicitly for correct numeric values for each
# cron field.
cron_rx = re.compile(
r'''
(
\d{1,2} # A digit.
|\d{1,2}-\d{1,2} # A range.
|(\d{1,2},)+\d{1,2} # A list of digits.
|\d{1,2}-\d{1,2}/\d{1,2} # A range followed by a step.
|\* # The asterisk character.
|\*/\d{1,2} # An asterisk followed by a step.
)
''', re.VERBOSE)
def to_python(self, value):
try:
# Try parsing the string as an integer.
return int(value)
except ValueError:
# Try parsing the string as a cron schedule.
if self.parse_cron(value):
return value
raise FieldValidationException("The value of '{}' for the '{}' parameter is not a valid value".format(
value, self.name))
def get_data_type(self):
return Field.DATA_TYPE_STRING
def parse_cron(self, value):
'''Check for valid cron string.'''
fields = value.split()
if len(fields) == 5 and all([self.cron_rx.match(i) for i in fields]):
return True
return False
class JsonField(Field):
def to_python(self, value):
Field.to_python(self, value)
try:
return json.loads(value)
except (TypeError, ValueError):
raise FieldValidationException(
"The value of '%s' for the '%s' parameter is not a valid JSON object" % (str(value), self.name))
def to_string(self, value):
return str(value)
def get_data_type(self):
return Field.DATA_TYPE_STRING
class ListField(Field):
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
return value.split(",")
else:
return []
def to_string(self, value):
if value is not None:
return ",".join(value)
return ""
class RangeField(Field):
def __init__(self, name, title, description, low, high, required_on_create=True, required_on_edit=False):
super(RangeField, self).__init__(name, title, description, required_on_create, required_on_edit)
self.low = low
self.high = high
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
try:
tmp = int(value)
if tmp >= self.low and tmp <= self.high:
return tmp
else:
raise FieldValidationException("Value out of range.")
except ValueError as e:
raise FieldValidationException(str(e))
else:
return None
def to_string(self, value):
if value is not None:
return str(value)
return ""
def get_data_type(self):
return Field.DATA_TYPE_NUMBER
class RegexField(Field):
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
try:
return re.compile(value)
except Exception as e:
raise FieldValidationException(str(e))
else:
return None
def to_string(self, value):
if value is not None:
return value.pattern
return ""
class SeverityField(Field):
# Note: We ignore "FATAL" severity since Python's logging assigns it the
# same value as "CRITICAL".
SEVERITIES = {'DEBUG': 10, 'INFO': 20, 'WARN': 30, 'ERROR': 40, 'CRITICAL': 50}
SEVERITIES_BY_INT = {v: k for k, v in SEVERITIES.items()}
def to_python(self, value):
try:
if value in SeverityField.SEVERITIES:
return SeverityField.SEVERITIES[value]
except AttributeError:
# Did not receive a string for some reason.
pass
raise FieldValidationException("The value of '{}' for the '{}' parameter is not a valid value".format(
value, self.name))
def to_string(self, value):
if value in SeverityField.SEVERITIES_BY_INT:
return SeverityField.SEVERITIES_BY_INT[value]
else:
raise ValueError('Invalid value provided for severity.')
def get_data_type(self):
return Field.DATA_TYPE_NUMBER
class VerbosityField(Field):
def to_python(self, value):
Field.to_python(self, value)
value = int(value)
if value is not None:
if value in [10, 20, 30, 40, 50]:
return value
else:
raise FieldValidationException('Invalid value provided for verbosity, must be one of the following: ' +
'{10, 20, 30, 40, 50}')
else:
return None
def to_string(self, value):
if value is not None:
return str(value)
return ""
def get_data_type(self):
return Field.DATA_TYPE_NUMBER
| 28.426637
| 120
| 0.58334
|
import json
import re
class FieldValidationException(Exception):
pass
class Field(object):
DATA_TYPE_STRING = 'string'
DATA_TYPE_NUMBER = 'number'
DATA_TYPE_BOOLEAN = 'boolean'
def get_data_type(self):
return Field.DATA_TYPE_STRING
def __init__(self, name, title, description, required_on_create=True, required_on_edit=False):
if name is None or len(name.strip()) == 0:
raise ValueError("The name parameter cannot be empty.")
if title is None or len(title.strip()) == 0:
raise ValueError("The title parameter cannot be empty.")
if description is None or len(description.strip()) == 0:
raise ValueError("The description parameter cannot be empty.")
self.name = name
self.title = title
self.description = description
self.required_on_create = required_on_create
self.required_on_edit = required_on_edit
def to_python(self, value):
return value
def to_string(self, value):
return str(value)
class BooleanField(Field):
def to_python(self, value):
Field.to_python(self, value)
if value in [True, False]:
return value
elif str(value).strip().lower() in ["true", "t", "1"]:
return True
elif str(value).strip().lower() in ["false", "f", "0"]:
return False
raise FieldValidationException(
"The value of '%s' for the '%s' parameter is not a valid boolean" % (str(value), self.name))
def to_string(self, value):
if value is True:
return "1"
elif value is False:
return "0"
return str(value)
def get_data_type(self):
return Field.DATA_TYPE_BOOLEAN
class DelimitedField(Field):
def __init__(self, name, title, description, delim, required_on_create=True, required_on_edit=False):
super(DelimitedField, self).__init__(name, title, description, required_on_create, required_on_edit)
self._delim = delim
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
try:
tmp = value.split(self._delim)
return tmp
except ValueError as e:
raise FieldValidationException(str(e))
else:
return None
def to_string(self, value):
if value is not None:
return str(value)
return ""
def get_data_type(self):
return Field.DATA_TYPE_STRING
class DurationField(Field):
DURATION_RE = re.compile(r"(?P<duration>[0-9]+)\s*(?P<units>[a-z]*)", re.IGNORECASE)
MINUTE = 60
HOUR = 3600
DAY = 86400
WEEK = 604800
UNITS = {
'w': WEEK, 'week': WEEK, 'd': DAY, 'day': DAY, 'h': HOUR, 'hour': HOUR, 'm': MINUTE, 'min': MINUTE, 'minute':
MINUTE, 's': 1}
def to_python(self, value):
Field.to_python(self, value)
m = DurationField.DURATION_RE.match(value)
if m is None:
raise FieldValidationException(
"The value of '%s' for the '%s' parameter is not a valid duration" % (str(value), self.name))
d = m.groupdict()
units = d['units']
try:
duration = int(d['duration'])
except ValueError:
raise FieldValidationException(
"The duration '%s' for the '%s' parameter is not a valid number" % (d['duration'], self.name))
if len(units) > 0 and units not in DurationField.UNITS:
raise FieldValidationException(
"The unit '%s' for the '%s' parameter is not a valid unit of duration" % (units, self.name))
if len(units) > 0:
return duration * DurationField.UNITS[units]
else:
return duration
def to_string(self, value):
return str(value)
class FloatField(Field):
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
try:
return float(value)
except ValueError as e:
raise FieldValidationException(str(e))
else:
return None
def to_string(self, value):
if value is not None:
return str(value)
return ""
def get_data_type(self):
return Field.DATA_TYPE_NUMBER
class IntegerField(Field):
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
try:
return int(value)
except ValueError as e:
raise FieldValidationException(str(e))
else:
return None
def to_string(self, value):
if value is not None:
return str(value)
return ""
def get_data_type(self):
return Field.DATA_TYPE_NUMBER
class IntervalField(Field):
# cron field.
cron_rx = re.compile(
r'''
(
\d{1,2} # A digit.
|\d{1,2}-\d{1,2} # A range.
|(\d{1,2},)+\d{1,2} # A list of digits.
|\d{1,2}-\d{1,2}/\d{1,2} # A range followed by a step.
|\* # The asterisk character.
|\*/\d{1,2} # An asterisk followed by a step.
)
''', re.VERBOSE)
def to_python(self, value):
try:
# Try parsing the string as an integer.
return int(value)
except ValueError:
# Try parsing the string as a cron schedule.
if self.parse_cron(value):
return value
raise FieldValidationException("The value of '{}' for the '{}' parameter is not a valid value".format(
value, self.name))
def get_data_type(self):
return Field.DATA_TYPE_STRING
def parse_cron(self, value):
fields = value.split()
if len(fields) == 5 and all([self.cron_rx.match(i) for i in fields]):
return True
return False
class JsonField(Field):
def to_python(self, value):
Field.to_python(self, value)
try:
return json.loads(value)
except (TypeError, ValueError):
raise FieldValidationException(
"The value of '%s' for the '%s' parameter is not a valid JSON object" % (str(value), self.name))
def to_string(self, value):
return str(value)
def get_data_type(self):
return Field.DATA_TYPE_STRING
class ListField(Field):
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
return value.split(",")
else:
return []
def to_string(self, value):
if value is not None:
return ",".join(value)
return ""
class RangeField(Field):
def __init__(self, name, title, description, low, high, required_on_create=True, required_on_edit=False):
super(RangeField, self).__init__(name, title, description, required_on_create, required_on_edit)
self.low = low
self.high = high
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
try:
tmp = int(value)
if tmp >= self.low and tmp <= self.high:
return tmp
else:
raise FieldValidationException("Value out of range.")
except ValueError as e:
raise FieldValidationException(str(e))
else:
return None
def to_string(self, value):
if value is not None:
return str(value)
return ""
def get_data_type(self):
return Field.DATA_TYPE_NUMBER
class RegexField(Field):
def to_python(self, value):
Field.to_python(self, value)
if value is not None:
try:
return re.compile(value)
except Exception as e:
raise FieldValidationException(str(e))
else:
return None
def to_string(self, value):
if value is not None:
return value.pattern
return ""
class SeverityField(Field):
# Note: We ignore "FATAL" severity since Python's logging assigns it the
SEVERITIES = {'DEBUG': 10, 'INFO': 20, 'WARN': 30, 'ERROR': 40, 'CRITICAL': 50}
SEVERITIES_BY_INT = {v: k for k, v in SEVERITIES.items()}
def to_python(self, value):
try:
if value in SeverityField.SEVERITIES:
return SeverityField.SEVERITIES[value]
except AttributeError:
pass
raise FieldValidationException("The value of '{}' for the '{}' parameter is not a valid value".format(
value, self.name))
def to_string(self, value):
if value in SeverityField.SEVERITIES_BY_INT:
return SeverityField.SEVERITIES_BY_INT[value]
else:
raise ValueError('Invalid value provided for severity.')
def get_data_type(self):
return Field.DATA_TYPE_NUMBER
class VerbosityField(Field):
def to_python(self, value):
Field.to_python(self, value)
value = int(value)
if value is not None:
if value in [10, 20, 30, 40, 50]:
return value
else:
raise FieldValidationException('Invalid value provided for verbosity, must be one of the following: ' +
'{10, 20, 30, 40, 50}')
else:
return None
def to_string(self, value):
if value is not None:
return str(value)
return ""
def get_data_type(self):
return Field.DATA_TYPE_NUMBER
| true
| true
|
790647080e90218c847c23aa214848a351882c4a
| 326
|
py
|
Python
|
simple_task_python/rename.py
|
sagorbrur/sagorbrur.github.io
|
4e510029a9d56754a80dfc897cf477e62a530c01
|
[
"CC0-1.0"
] | null | null | null |
simple_task_python/rename.py
|
sagorbrur/sagorbrur.github.io
|
4e510029a9d56754a80dfc897cf477e62a530c01
|
[
"CC0-1.0"
] | null | null | null |
simple_task_python/rename.py
|
sagorbrur/sagorbrur.github.io
|
4e510029a9d56754a80dfc897cf477e62a530c01
|
[
"CC0-1.0"
] | null | null | null |
# a simple script to rename multiple files
import os
import re
path = 'myimages/'
files = os.listdir(path)
files.sort(key=lambda var:[int(x) if x.isdigit() else x for x in re.findall(r'[^0-9]|[0-9]+', var)])
for i, file in enumerate(files):
os.rename(path + file, path + "rename_{}".format(i)+".jpg")
print('done!')
| 27.166667
| 100
| 0.653374
|
import os
import re
path = 'myimages/'
files = os.listdir(path)
files.sort(key=lambda var:[int(x) if x.isdigit() else x for x in re.findall(r'[^0-9]|[0-9]+', var)])
for i, file in enumerate(files):
os.rename(path + file, path + "rename_{}".format(i)+".jpg")
print('done!')
| true
| true
|
79064728f1c2ca0961b36d99d98adda990fc58fc
| 16,175
|
py
|
Python
|
eval.py
|
FLyingLSJ/ssd.pytorch
|
9caca0788f0bebab345f969a7d3c1f8b2081b809
|
[
"MIT"
] | 1
|
2020-07-09T12:56:20.000Z
|
2020-07-09T12:56:20.000Z
|
eval.py
|
FLyingLSJ/ssd.pytorch
|
9caca0788f0bebab345f969a7d3c1f8b2081b809
|
[
"MIT"
] | null | null | null |
eval.py
|
FLyingLSJ/ssd.pytorch
|
9caca0788f0bebab345f969a7d3c1f8b2081b809
|
[
"MIT"
] | null | null | null |
"""Adapted from:
@longcw faster_rcnn_pytorch: https://github.com/longcw/faster_rcnn_pytorch
@rbgirshick py-faster-rcnn https://github.com/rbgirshick/py-faster-rcnn
Licensed under The MIT License [see LICENSE for details]
"""
from __future__ import print_function
import torch
import torch.nn as nn
import torch.backends.cudnn as cudnn
from torch.autograd import Variable
from data import VOC_ROOT, VOCAnnotationTransform, VOCDetection, BaseTransform
from data import VOC_CLASSES as labelmap
import torch.utils.data as data
from ssd import build_ssd
import sys
import os
import time
import argparse
import numpy as np
import pickle
import cv2
if sys.version_info[0] == 2:
import xml.etree.cElementTree as ET
else:
import xml.etree.ElementTree as ET
def str2bool(v):
return v.lower() in ("yes", "true", "t", "1")
parser = argparse.ArgumentParser(
description='Single Shot MultiBox Detector Evaluation')
parser.add_argument('--trained_model',
default='weights/ssd300_mAP_77.43_v2.pth', type=str,
help='Trained state_dict file path to open')
parser.add_argument('--save_folder', default='eval/', type=str,
help='File path to save results')
parser.add_argument('--confidence_threshold', default=0.5, type=float,
help='Detection confidence threshold')
parser.add_argument('--top_k', default=5, type=int,
help='Further restrict the number of predictions to parse')
parser.add_argument('--cuda', default=False, type=str2bool,
help='Use cuda to train model')
parser.add_argument('--voc_root', default=VOC_ROOT,
help='Location of VOC root directory')
parser.add_argument('--cleanup', default=True, type=str2bool,
help='Cleanup and remove results files following eval')
args = parser.parse_args()
if not os.path.exists(args.save_folder):
os.mkdir(args.save_folder)
if torch.cuda.is_available():
if args.cuda:
torch.set_default_tensor_type('torch.cuda.FloatTensor')
if not args.cuda:
print("WARNING: It looks like you have a CUDA device, but aren't using \
CUDA. Run with --cuda for optimal eval speed.")
torch.set_default_tensor_type('torch.FloatTensor')
else:
torch.set_default_tensor_type('torch.FloatTensor')
annopath = os.path.join(args.voc_root, 'VOC2007', 'Annotations', '%s.xml')
imgpath = os.path.join(args.voc_root, 'VOC2007', 'JPEGImages', '%s.jpg')
if sys.platform.startswith("linux"):
imgsetpath = os.path.join(args.voc_root, 'VOC2007', 'ImageSets', 'Main', '{:s}.txt') # Linux 系统下
if sys.platform.startswith("win"):
imgsetpath = os.path.join(args.voc_root, 'VOC2007', 'ImageSets', 'Main', '{}.txt') # Linux 系统下
YEAR = '2007'
devkit_path = args.voc_root + 'VOC' + YEAR
dataset_mean = (104, 117, 123)
set_type = 'test'
class Timer(object):
"""A simple timer."""
def __init__(self):
self.total_time = 0.
self.calls = 0
self.start_time = 0.
self.diff = 0.
self.average_time = 0.
def tic(self):
# using time.time instead of time.clock because time time.clock
# does not normalize for multithreading
self.start_time = time.time()
def toc(self, average=True):
self.diff = time.time() - self.start_time
self.total_time += self.diff
self.calls += 1
self.average_time = self.total_time / self.calls
if average:
return self.average_time
else:
return self.diff
def parse_rec(filename):
""" Parse a PASCAL VOC xml file """
tree = ET.parse(filename)
objects = []
for obj in tree.findall('object'):
obj_struct = {}
obj_struct['name'] = obj.find('name').text
obj_struct['pose'] = obj.find('pose').text
obj_struct['truncated'] = int(obj.find('truncated').text)
obj_struct['difficult'] = int(obj.find('difficult').text)
bbox = obj.find('bndbox')
obj_struct['bbox'] = [int(bbox.find('xmin').text) - 1,
int(bbox.find('ymin').text) - 1,
int(bbox.find('xmax').text) - 1,
int(bbox.find('ymax').text) - 1]
objects.append(obj_struct)
return objects
def get_output_dir(name, phase):
"""Return the directory where experimental artifacts are placed.
If the directory does not exist, it is created.
A canonical path is built using the name from an imdb and a network
(if not None).
"""
filedir = os.path.join(name, phase)
if not os.path.exists(filedir):
os.makedirs(filedir)
return filedir
def get_voc_results_file_template(image_set, cls):
# VOCdevkit/VOC2007/results/det_test_aeroplane.txt
filename = 'det_' + image_set + '_%s.txt' % (cls)
filedir = os.path.join(devkit_path, 'results')
if not os.path.exists(filedir):
os.makedirs(filedir)
path = os.path.join(filedir, filename)
return path
def write_voc_results_file(all_boxes, dataset):
for cls_ind, cls in enumerate(labelmap):
print('Writing {:s} VOC results file'.format(cls))
filename = get_voc_results_file_template(set_type, cls)
with open(filename, 'wt') as f:
for im_ind, index in enumerate(dataset.ids):
dets = all_boxes[cls_ind+1][im_ind]
if dets == []:
continue
# the VOCdevkit expects 1-based indices
for k in range(dets.shape[0]):
f.write('{:s} {:.3f} {:.1f} {:.1f} {:.1f} {:.1f}\n'.
format(index[1], dets[k, -1],
dets[k, 0] + 1, dets[k, 1] + 1,
dets[k, 2] + 1, dets[k, 3] + 1))
def do_python_eval(output_dir='output', use_07=True):
cachedir = os.path.join(devkit_path, 'annotations_cache')
aps = []
# The PASCAL VOC metric changed in 2010
use_07_metric = use_07
print('VOC07 metric? ' + ('Yes' if use_07_metric else 'No'))
if not os.path.isdir(output_dir):
os.mkdir(output_dir)
for i, cls in enumerate(labelmap):
filename = get_voc_results_file_template(set_type, cls)
rec, prec, ap = voc_eval(
filename, annopath, imgsetpath.format(set_type), cls, cachedir,
ovthresh=0.5, use_07_metric=use_07_metric)
aps += [ap]
print('AP for {} = {:.4f}'.format(cls, ap))
with open(os.path.join(output_dir, cls + '_pr.pkl'), 'wb') as f:
pickle.dump({'rec': rec, 'prec': prec, 'ap': ap}, f)
print('Mean AP = {:.4f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('Results:')
for ap in aps:
print('{:.3f}'.format(ap))
print('{:.3f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('')
print('--------------------------------------------------------------')
print('Results computed with the **unofficial** Python eval code.')
print('Results should be very close to the official MATLAB eval code.')
print('--------------------------------------------------------------')
def voc_ap(rec, prec, use_07_metric=True):
""" ap = voc_ap(rec, prec, [use_07_metric])
Compute VOC AP given precision and recall.
If use_07_metric is true, uses the
VOC 07 11 point method (default:True).
"""
if use_07_metric:
# 11 point metric
ap = 0.
for t in np.arange(0., 1.1, 0.1):
if np.sum(rec >= t) == 0:
p = 0
else:
p = np.max(prec[rec >= t])
ap = ap + p / 11.
else:
# correct AP calculation
# first append sentinel values at the end
mrec = np.concatenate(([0.], rec, [1.]))
mpre = np.concatenate(([0.], prec, [0.]))
# compute the precision envelope
for i in range(mpre.size - 1, 0, -1):
mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
def voc_eval(detpath,
annopath,
imagesetfile,
classname,
cachedir,
ovthresh=0.5,
use_07_metric=True):
"""rec, prec, ap = voc_eval(detpath,
annopath,
imagesetfile,
classname,
[ovthresh],
[use_07_metric])
Top level function that does the PASCAL VOC evaluation.
detpath: Path to detections
detpath.format(classname) should produce the detection results file.
annopath: Path to annotations
annopath.format(imagename) should be the xml annotations file.
imagesetfile: Text file containing the list of images, one image per line.
classname: Category name (duh)
cachedir: Directory for caching the annotations
[ovthresh]: Overlap threshold (default = 0.5)
[use_07_metric]: Whether to use VOC07's 11 point AP computation
(default True)
"""
# assumes detections are in detpath.format(classname)
# assumes annotations are in annopath.format(imagename)
# assumes imagesetfile is a text file with each line an image name
# cachedir caches the annotations in a pickle file
# first load gt
if not os.path.isdir(cachedir):
os.mkdir(cachedir)
cachefile = os.path.join(cachedir, 'annots.pkl')
# read list of images
with open(imagesetfile, 'r') as f:
lines = f.readlines()
imagenames = [x.strip() for x in lines]
if not os.path.isfile(cachefile):
# load annots
recs = {}
for i, imagename in enumerate(imagenames):
recs[imagename] = parse_rec(annopath % (imagename))
if i % 100 == 0:
print('Reading annotation for {:d}/{:d}'.format(
i + 1, len(imagenames)))
# save
print('Saving cached annotations to {:s}'.format(cachefile))
with open(cachefile, 'wb') as f:
pickle.dump(recs, f)
else:
# load
with open(cachefile, 'rb') as f:
recs = pickle.load(f)
# extract gt objects for this class
class_recs = {}
npos = 0
for imagename in imagenames:
R = [obj for obj in recs[imagename] if obj['name'] == classname]
bbox = np.array([x['bbox'] for x in R])
difficult = np.array([x['difficult'] for x in R]).astype(np.bool)
det = [False] * len(R)
npos = npos + sum(~difficult)
class_recs[imagename] = {'bbox': bbox,
'difficult': difficult,
'det': det}
# read dets
detfile = detpath.format(classname)
with open(detfile, 'r') as f:
lines = f.readlines()
if any(lines) == 1:
splitlines = [x.strip().split(' ') for x in lines]
image_ids = [x[0] for x in splitlines]
confidence = np.array([float(x[1]) for x in splitlines])
BB = np.array([[float(z) for z in x[2:]] for x in splitlines])
# sort by confidence
sorted_ind = np.argsort(-confidence)
sorted_scores = np.sort(-confidence)
BB = BB[sorted_ind, :]
image_ids = [image_ids[x] for x in sorted_ind]
# go down dets and mark TPs and FPs
nd = len(image_ids)
tp = np.zeros(nd)
fp = np.zeros(nd)
for d in range(nd):
R = class_recs[image_ids[d]]
bb = BB[d, :].astype(float)
ovmax = -np.inf
BBGT = R['bbox'].astype(float)
if BBGT.size > 0:
# compute overlaps
# intersection
ixmin = np.maximum(BBGT[:, 0], bb[0])
iymin = np.maximum(BBGT[:, 1], bb[1])
ixmax = np.minimum(BBGT[:, 2], bb[2])
iymax = np.minimum(BBGT[:, 3], bb[3])
iw = np.maximum(ixmax - ixmin, 0.)
ih = np.maximum(iymax - iymin, 0.)
inters = iw * ih
uni = ((bb[2] - bb[0]) * (bb[3] - bb[1]) +
(BBGT[:, 2] - BBGT[:, 0]) *
(BBGT[:, 3] - BBGT[:, 1]) - inters)
overlaps = inters / uni
ovmax = np.max(overlaps)
jmax = np.argmax(overlaps)
if ovmax > ovthresh:
if not R['difficult'][jmax]:
if not R['det'][jmax]:
tp[d] = 1.
R['det'][jmax] = 1
else:
fp[d] = 1.
else:
fp[d] = 1.
# compute precision recall
fp = np.cumsum(fp)
tp = np.cumsum(tp)
rec = tp / float(npos)
# avoid divide by zero in case the first detection matches a difficult
# ground truth
prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)
ap = voc_ap(rec, prec, use_07_metric)
else:
rec = -1.
prec = -1.
ap = -1.
return rec, prec, ap
def test_net(save_folder, net, cuda, dataset, transform, top_k,
im_size=300, thresh=0.05):
num_images = len(dataset)
# all detections are collected into:
# all_boxes[cls][image] = N x 5 array of detections in
# (x1, y1, x2, y2, score)
all_boxes = [[[] for _ in range(num_images)]
for _ in range(len(labelmap)+1)]
# timers
_t = {'im_detect': Timer(), 'misc': Timer()}
output_dir = get_output_dir('ssd300_120000', set_type)
det_file = os.path.join(output_dir, 'detections.pkl')
for i in range(num_images):
im, gt, h, w = dataset.pull_item(i)
x = Variable(im.unsqueeze(0))
if args.cuda:
x = x.cuda()
_t['im_detect'].tic()
detections = net(x).data
detect_time = _t['im_detect'].toc(average=False)
# skip j = 0, because it's the background class
for j in range(1, detections.size(1)):
dets = detections[0, j, :]
mask = dets[:, 0].gt(0.).expand(5, dets.size(0)).t()
dets = torch.masked_select(dets, mask).view(-1, 5)
if dets.size(0) == 0:
continue
boxes = dets[:, 1:]
boxes[:, 0] *= w
boxes[:, 2] *= w
boxes[:, 1] *= h
boxes[:, 3] *= h
scores = dets[:, 0].cpu().numpy()
cls_dets = np.hstack((boxes.cpu().numpy(),
scores[:, np.newaxis])).astype(np.float32,
copy=False)
all_boxes[j][i] = cls_dets
print('im_detect: {:d}/{:d} {:.3f}s'.format(i + 1,
num_images, detect_time))
with open(det_file, 'wb') as f:
pickle.dump(all_boxes, f, pickle.HIGHEST_PROTOCOL)
print('Evaluating detections')
evaluate_detections(all_boxes, output_dir, dataset)
def evaluate_detections(box_list, output_dir, dataset):
write_voc_results_file(box_list, dataset)
do_python_eval(output_dir)
if __name__ == '__main__':
# load net
num_classes = len(labelmap) + 1 # +1 for background
net = build_ssd('test', 300, num_classes) # initialize SSD
#net.load_state_dict(torch.load(args.trained_model))
net.load_state_dict(torch.load(args.trained_model, map_location='cpu')) # running on a CPU-only machine
net.eval()
print('Finished loading model!')
# load data
dataset = VOCDetection(args.voc_root,
[('2007', set_type)],
BaseTransform(300, dataset_mean),
VOCAnnotationTransform())
if args.cuda:
net = net.cuda()
cudnn.benchmark = True
# evaluation
test_net(args.save_folder, net, args.cuda, dataset,
BaseTransform(net.size, dataset_mean), args.top_k, 300,
thresh=args.confidence_threshold)
| 36.43018
| 109
| 0.563277
|
from __future__ import print_function
import torch
import torch.nn as nn
import torch.backends.cudnn as cudnn
from torch.autograd import Variable
from data import VOC_ROOT, VOCAnnotationTransform, VOCDetection, BaseTransform
from data import VOC_CLASSES as labelmap
import torch.utils.data as data
from ssd import build_ssd
import sys
import os
import time
import argparse
import numpy as np
import pickle
import cv2
if sys.version_info[0] == 2:
import xml.etree.cElementTree as ET
else:
import xml.etree.ElementTree as ET
def str2bool(v):
return v.lower() in ("yes", "true", "t", "1")
parser = argparse.ArgumentParser(
description='Single Shot MultiBox Detector Evaluation')
parser.add_argument('--trained_model',
default='weights/ssd300_mAP_77.43_v2.pth', type=str,
help='Trained state_dict file path to open')
parser.add_argument('--save_folder', default='eval/', type=str,
help='File path to save results')
parser.add_argument('--confidence_threshold', default=0.5, type=float,
help='Detection confidence threshold')
parser.add_argument('--top_k', default=5, type=int,
help='Further restrict the number of predictions to parse')
parser.add_argument('--cuda', default=False, type=str2bool,
help='Use cuda to train model')
parser.add_argument('--voc_root', default=VOC_ROOT,
help='Location of VOC root directory')
parser.add_argument('--cleanup', default=True, type=str2bool,
help='Cleanup and remove results files following eval')
args = parser.parse_args()
if not os.path.exists(args.save_folder):
os.mkdir(args.save_folder)
if torch.cuda.is_available():
if args.cuda:
torch.set_default_tensor_type('torch.cuda.FloatTensor')
if not args.cuda:
print("WARNING: It looks like you have a CUDA device, but aren't using \
CUDA. Run with --cuda for optimal eval speed.")
torch.set_default_tensor_type('torch.FloatTensor')
else:
torch.set_default_tensor_type('torch.FloatTensor')
annopath = os.path.join(args.voc_root, 'VOC2007', 'Annotations', '%s.xml')
imgpath = os.path.join(args.voc_root, 'VOC2007', 'JPEGImages', '%s.jpg')
if sys.platform.startswith("linux"):
imgsetpath = os.path.join(args.voc_root, 'VOC2007', 'ImageSets', 'Main', '{:s}.txt') # Linux 系统下
if sys.platform.startswith("win"):
imgsetpath = os.path.join(args.voc_root, 'VOC2007', 'ImageSets', 'Main', '{}.txt') # Linux 系统下
YEAR = '2007'
devkit_path = args.voc_root + 'VOC' + YEAR
dataset_mean = (104, 117, 123)
set_type = 'test'
class Timer(object):
def __init__(self):
self.total_time = 0.
self.calls = 0
self.start_time = 0.
self.diff = 0.
self.average_time = 0.
def tic(self):
# using time.time instead of time.clock because time time.clock
# does not normalize for multithreading
self.start_time = time.time()
def toc(self, average=True):
self.diff = time.time() - self.start_time
self.total_time += self.diff
self.calls += 1
self.average_time = self.total_time / self.calls
if average:
return self.average_time
else:
return self.diff
def parse_rec(filename):
tree = ET.parse(filename)
objects = []
for obj in tree.findall('object'):
obj_struct = {}
obj_struct['name'] = obj.find('name').text
obj_struct['pose'] = obj.find('pose').text
obj_struct['truncated'] = int(obj.find('truncated').text)
obj_struct['difficult'] = int(obj.find('difficult').text)
bbox = obj.find('bndbox')
obj_struct['bbox'] = [int(bbox.find('xmin').text) - 1,
int(bbox.find('ymin').text) - 1,
int(bbox.find('xmax').text) - 1,
int(bbox.find('ymax').text) - 1]
objects.append(obj_struct)
return objects
def get_output_dir(name, phase):
filedir = os.path.join(name, phase)
if not os.path.exists(filedir):
os.makedirs(filedir)
return filedir
def get_voc_results_file_template(image_set, cls):
# VOCdevkit/VOC2007/results/det_test_aeroplane.txt
filename = 'det_' + image_set + '_%s.txt' % (cls)
filedir = os.path.join(devkit_path, 'results')
if not os.path.exists(filedir):
os.makedirs(filedir)
path = os.path.join(filedir, filename)
return path
def write_voc_results_file(all_boxes, dataset):
for cls_ind, cls in enumerate(labelmap):
print('Writing {:s} VOC results file'.format(cls))
filename = get_voc_results_file_template(set_type, cls)
with open(filename, 'wt') as f:
for im_ind, index in enumerate(dataset.ids):
dets = all_boxes[cls_ind+1][im_ind]
if dets == []:
continue
# the VOCdevkit expects 1-based indices
for k in range(dets.shape[0]):
f.write('{:s} {:.3f} {:.1f} {:.1f} {:.1f} {:.1f}\n'.
format(index[1], dets[k, -1],
dets[k, 0] + 1, dets[k, 1] + 1,
dets[k, 2] + 1, dets[k, 3] + 1))
def do_python_eval(output_dir='output', use_07=True):
cachedir = os.path.join(devkit_path, 'annotations_cache')
aps = []
# The PASCAL VOC metric changed in 2010
use_07_metric = use_07
print('VOC07 metric? ' + ('Yes' if use_07_metric else 'No'))
if not os.path.isdir(output_dir):
os.mkdir(output_dir)
for i, cls in enumerate(labelmap):
filename = get_voc_results_file_template(set_type, cls)
rec, prec, ap = voc_eval(
filename, annopath, imgsetpath.format(set_type), cls, cachedir,
ovthresh=0.5, use_07_metric=use_07_metric)
aps += [ap]
print('AP for {} = {:.4f}'.format(cls, ap))
with open(os.path.join(output_dir, cls + '_pr.pkl'), 'wb') as f:
pickle.dump({'rec': rec, 'prec': prec, 'ap': ap}, f)
print('Mean AP = {:.4f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('Results:')
for ap in aps:
print('{:.3f}'.format(ap))
print('{:.3f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('')
print('--------------------------------------------------------------')
print('Results computed with the **unofficial** Python eval code.')
print('Results should be very close to the official MATLAB eval code.')
print('--------------------------------------------------------------')
def voc_ap(rec, prec, use_07_metric=True):
if use_07_metric:
# 11 point metric
ap = 0.
for t in np.arange(0., 1.1, 0.1):
if np.sum(rec >= t) == 0:
p = 0
else:
p = np.max(prec[rec >= t])
ap = ap + p / 11.
else:
# correct AP calculation
# first append sentinel values at the end
mrec = np.concatenate(([0.], rec, [1.]))
mpre = np.concatenate(([0.], prec, [0.]))
# compute the precision envelope
for i in range(mpre.size - 1, 0, -1):
mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
def voc_eval(detpath,
annopath,
imagesetfile,
classname,
cachedir,
ovthresh=0.5,
use_07_metric=True):
# assumes detections are in detpath.format(classname)
# assumes annotations are in annopath.format(imagename)
# assumes imagesetfile is a text file with each line an image name
# cachedir caches the annotations in a pickle file
# first load gt
if not os.path.isdir(cachedir):
os.mkdir(cachedir)
cachefile = os.path.join(cachedir, 'annots.pkl')
# read list of images
with open(imagesetfile, 'r') as f:
lines = f.readlines()
imagenames = [x.strip() for x in lines]
if not os.path.isfile(cachefile):
# load annots
recs = {}
for i, imagename in enumerate(imagenames):
recs[imagename] = parse_rec(annopath % (imagename))
if i % 100 == 0:
print('Reading annotation for {:d}/{:d}'.format(
i + 1, len(imagenames)))
# save
print('Saving cached annotations to {:s}'.format(cachefile))
with open(cachefile, 'wb') as f:
pickle.dump(recs, f)
else:
# load
with open(cachefile, 'rb') as f:
recs = pickle.load(f)
# extract gt objects for this class
class_recs = {}
npos = 0
for imagename in imagenames:
R = [obj for obj in recs[imagename] if obj['name'] == classname]
bbox = np.array([x['bbox'] for x in R])
difficult = np.array([x['difficult'] for x in R]).astype(np.bool)
det = [False] * len(R)
npos = npos + sum(~difficult)
class_recs[imagename] = {'bbox': bbox,
'difficult': difficult,
'det': det}
# read dets
detfile = detpath.format(classname)
with open(detfile, 'r') as f:
lines = f.readlines()
if any(lines) == 1:
splitlines = [x.strip().split(' ') for x in lines]
image_ids = [x[0] for x in splitlines]
confidence = np.array([float(x[1]) for x in splitlines])
BB = np.array([[float(z) for z in x[2:]] for x in splitlines])
# sort by confidence
sorted_ind = np.argsort(-confidence)
sorted_scores = np.sort(-confidence)
BB = BB[sorted_ind, :]
image_ids = [image_ids[x] for x in sorted_ind]
# go down dets and mark TPs and FPs
nd = len(image_ids)
tp = np.zeros(nd)
fp = np.zeros(nd)
for d in range(nd):
R = class_recs[image_ids[d]]
bb = BB[d, :].astype(float)
ovmax = -np.inf
BBGT = R['bbox'].astype(float)
if BBGT.size > 0:
# compute overlaps
# intersection
ixmin = np.maximum(BBGT[:, 0], bb[0])
iymin = np.maximum(BBGT[:, 1], bb[1])
ixmax = np.minimum(BBGT[:, 2], bb[2])
iymax = np.minimum(BBGT[:, 3], bb[3])
iw = np.maximum(ixmax - ixmin, 0.)
ih = np.maximum(iymax - iymin, 0.)
inters = iw * ih
uni = ((bb[2] - bb[0]) * (bb[3] - bb[1]) +
(BBGT[:, 2] - BBGT[:, 0]) *
(BBGT[:, 3] - BBGT[:, 1]) - inters)
overlaps = inters / uni
ovmax = np.max(overlaps)
jmax = np.argmax(overlaps)
if ovmax > ovthresh:
if not R['difficult'][jmax]:
if not R['det'][jmax]:
tp[d] = 1.
R['det'][jmax] = 1
else:
fp[d] = 1.
else:
fp[d] = 1.
# compute precision recall
fp = np.cumsum(fp)
tp = np.cumsum(tp)
rec = tp / float(npos)
# avoid divide by zero in case the first detection matches a difficult
# ground truth
prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)
ap = voc_ap(rec, prec, use_07_metric)
else:
rec = -1.
prec = -1.
ap = -1.
return rec, prec, ap
def test_net(save_folder, net, cuda, dataset, transform, top_k,
im_size=300, thresh=0.05):
num_images = len(dataset)
# all detections are collected into:
# all_boxes[cls][image] = N x 5 array of detections in
# (x1, y1, x2, y2, score)
all_boxes = [[[] for _ in range(num_images)]
for _ in range(len(labelmap)+1)]
# timers
_t = {'im_detect': Timer(), 'misc': Timer()}
output_dir = get_output_dir('ssd300_120000', set_type)
det_file = os.path.join(output_dir, 'detections.pkl')
for i in range(num_images):
im, gt, h, w = dataset.pull_item(i)
x = Variable(im.unsqueeze(0))
if args.cuda:
x = x.cuda()
_t['im_detect'].tic()
detections = net(x).data
detect_time = _t['im_detect'].toc(average=False)
# skip j = 0, because it's the background class
for j in range(1, detections.size(1)):
dets = detections[0, j, :]
mask = dets[:, 0].gt(0.).expand(5, dets.size(0)).t()
dets = torch.masked_select(dets, mask).view(-1, 5)
if dets.size(0) == 0:
continue
boxes = dets[:, 1:]
boxes[:, 0] *= w
boxes[:, 2] *= w
boxes[:, 1] *= h
boxes[:, 3] *= h
scores = dets[:, 0].cpu().numpy()
cls_dets = np.hstack((boxes.cpu().numpy(),
scores[:, np.newaxis])).astype(np.float32,
copy=False)
all_boxes[j][i] = cls_dets
print('im_detect: {:d}/{:d} {:.3f}s'.format(i + 1,
num_images, detect_time))
with open(det_file, 'wb') as f:
pickle.dump(all_boxes, f, pickle.HIGHEST_PROTOCOL)
print('Evaluating detections')
evaluate_detections(all_boxes, output_dir, dataset)
def evaluate_detections(box_list, output_dir, dataset):
write_voc_results_file(box_list, dataset)
do_python_eval(output_dir)
if __name__ == '__main__':
num_classes = len(labelmap) + 1
net = build_ssd('test', 300, num_classes)
net.load_state_dict(torch.load(args.trained_model, map_location='cpu'))
net.eval()
print('Finished loading model!')
dataset = VOCDetection(args.voc_root,
[('2007', set_type)],
BaseTransform(300, dataset_mean),
VOCAnnotationTransform())
if args.cuda:
net = net.cuda()
cudnn.benchmark = True
test_net(args.save_folder, net, args.cuda, dataset,
BaseTransform(net.size, dataset_mean), args.top_k, 300,
thresh=args.confidence_threshold)
| true
| true
|
7906473a9cd846f2a116b74c8b89d57712d64dc9
| 759
|
py
|
Python
|
setup.py
|
aroberge/nonstandard
|
b415e4b5360a44ee2e1927a2b5b45c4d74ca9803
|
[
"MIT"
] | 2
|
2017-04-30T23:27:06.000Z
|
2017-05-01T18:30:54.000Z
|
setup.py
|
aroberge/nonstandard
|
b415e4b5360a44ee2e1927a2b5b45c4d74ca9803
|
[
"MIT"
] | null | null | null |
setup.py
|
aroberge/nonstandard
|
b415e4b5360a44ee2e1927a2b5b45c4d74ca9803
|
[
"MIT"
] | null | null | null |
#pylint: skip-file
from setuptools import setup, find_packages
from pypandoc import convert
def convert_md(filename):
return convert(filename, 'rst')
setup(name='nonstandard',
version='0.9.3',
description="Obsolete; see package *experimental*.",
long_description = convert_md('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Interpreters',
],
url='https://github.com/aroberge/nonstandard',
author='André Roberge',
author_email='Andre.Roberge@gmail.com',
license='MIT',
packages=find_packages(exclude=['dist', 'build', 'tools']),
zip_safe=False)
| 29.192308
| 63
| 0.661397
|
from setuptools import setup, find_packages
from pypandoc import convert
def convert_md(filename):
return convert(filename, 'rst')
setup(name='nonstandard',
version='0.9.3',
description="Obsolete; see package *experimental*.",
long_description = convert_md('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Interpreters',
],
url='https://github.com/aroberge/nonstandard',
author='André Roberge',
author_email='Andre.Roberge@gmail.com',
license='MIT',
packages=find_packages(exclude=['dist', 'build', 'tools']),
zip_safe=False)
| true
| true
|
79064849dfdc57c635437ee4fce0592a06c34f62
| 9,993
|
py
|
Python
|
torchvision/models/densenet.py
|
NunoEdgarGFlowHub/vision
|
86001a871d3335046e2dca7715d9babf73e6956f
|
[
"BSD-3-Clause"
] | 19
|
2018-10-30T22:24:54.000Z
|
2022-01-11T05:14:38.000Z
|
torchvision/models/densenet.py
|
NunoEdgarGFlowHub/vision
|
86001a871d3335046e2dca7715d9babf73e6956f
|
[
"BSD-3-Clause"
] | 4
|
2018-11-21T06:09:13.000Z
|
2019-04-14T15:09:37.000Z
|
torchvision/models/densenet.py
|
NunoEdgarGFlowHub/vision
|
86001a871d3335046e2dca7715d9babf73e6956f
|
[
"BSD-3-Clause"
] | 4
|
2018-11-06T00:31:25.000Z
|
2021-01-30T12:37:35.000Z
|
import re
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.model_zoo as model_zoo
from collections import OrderedDict
__all__ = ['DenseNet', 'densenet121', 'densenet169', 'densenet201', 'densenet161']
model_urls = {
'densenet121': 'https://download.pytorch.org/models/densenet121-a639ec97.pth',
'densenet169': 'https://download.pytorch.org/models/densenet169-b2777c0a.pth',
'densenet201': 'https://download.pytorch.org/models/densenet201-c1103571.pth',
'densenet161': 'https://download.pytorch.org/models/densenet161-8d451a50.pth',
}
def densenet121(pretrained=False, **kwargs):
r"""Densenet-121 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = DenseNet(num_init_features=64, growth_rate=32, block_config=(6, 12, 24, 16),
**kwargs)
if pretrained:
# '.'s are no longer allowed in module names, but pervious _DenseLayer
# has keys 'norm.1', 'relu.1', 'conv.1', 'norm.2', 'relu.2', 'conv.2'.
# They are also in the checkpoints in model_urls. This pattern is used
# to find such keys.
pattern = re.compile(
r'^(.*denselayer\d+\.(?:norm|relu|conv))\.((?:[12])\.(?:weight|bias|running_mean|running_var))$')
state_dict = model_zoo.load_url(model_urls['densenet121'])
for key in list(state_dict.keys()):
res = pattern.match(key)
if res:
new_key = res.group(1) + res.group(2)
state_dict[new_key] = state_dict[key]
del state_dict[key]
model.load_state_dict(state_dict)
return model
def densenet169(pretrained=False, **kwargs):
r"""Densenet-169 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = DenseNet(num_init_features=64, growth_rate=32, block_config=(6, 12, 32, 32),
**kwargs)
if pretrained:
# '.'s are no longer allowed in module names, but pervious _DenseLayer
# has keys 'norm.1', 'relu.1', 'conv.1', 'norm.2', 'relu.2', 'conv.2'.
# They are also in the checkpoints in model_urls. This pattern is used
# to find such keys.
pattern = re.compile(
r'^(.*denselayer\d+\.(?:norm|relu|conv))\.((?:[12])\.(?:weight|bias|running_mean|running_var))$')
state_dict = model_zoo.load_url(model_urls['densenet169'])
for key in list(state_dict.keys()):
res = pattern.match(key)
if res:
new_key = res.group(1) + res.group(2)
state_dict[new_key] = state_dict[key]
del state_dict[key]
model.load_state_dict(state_dict)
return model
def densenet201(pretrained=False, **kwargs):
r"""Densenet-201 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = DenseNet(num_init_features=64, growth_rate=32, block_config=(6, 12, 48, 32),
**kwargs)
if pretrained:
# '.'s are no longer allowed in module names, but pervious _DenseLayer
# has keys 'norm.1', 'relu.1', 'conv.1', 'norm.2', 'relu.2', 'conv.2'.
# They are also in the checkpoints in model_urls. This pattern is used
# to find such keys.
pattern = re.compile(
r'^(.*denselayer\d+\.(?:norm|relu|conv))\.((?:[12])\.(?:weight|bias|running_mean|running_var))$')
state_dict = model_zoo.load_url(model_urls['densenet201'])
for key in list(state_dict.keys()):
res = pattern.match(key)
if res:
new_key = res.group(1) + res.group(2)
state_dict[new_key] = state_dict[key]
del state_dict[key]
model.load_state_dict(state_dict)
return model
def densenet161(pretrained=False, **kwargs):
r"""Densenet-161 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = DenseNet(num_init_features=96, growth_rate=48, block_config=(6, 12, 36, 24),
**kwargs)
if pretrained:
# '.'s are no longer allowed in module names, but pervious _DenseLayer
# has keys 'norm.1', 'relu.1', 'conv.1', 'norm.2', 'relu.2', 'conv.2'.
# They are also in the checkpoints in model_urls. This pattern is used
# to find such keys.
pattern = re.compile(
r'^(.*denselayer\d+\.(?:norm|relu|conv))\.((?:[12])\.(?:weight|bias|running_mean|running_var))$')
state_dict = model_zoo.load_url(model_urls['densenet161'])
for key in list(state_dict.keys()):
res = pattern.match(key)
if res:
new_key = res.group(1) + res.group(2)
state_dict[new_key] = state_dict[key]
del state_dict[key]
model.load_state_dict(state_dict)
return model
class _DenseLayer(nn.Sequential):
def __init__(self, num_input_features, growth_rate, bn_size, drop_rate):
super(_DenseLayer, self).__init__()
self.add_module('norm1', nn.BatchNorm2d(num_input_features)),
self.add_module('relu1', nn.ReLU(inplace=True)),
self.add_module('conv1', nn.Conv2d(num_input_features, bn_size *
growth_rate, kernel_size=1, stride=1, bias=False)),
self.add_module('norm2', nn.BatchNorm2d(bn_size * growth_rate)),
self.add_module('relu2', nn.ReLU(inplace=True)),
self.add_module('conv2', nn.Conv2d(bn_size * growth_rate, growth_rate,
kernel_size=3, stride=1, padding=1, bias=False)),
self.drop_rate = drop_rate
def forward(self, x):
new_features = super(_DenseLayer, self).forward(x)
if self.drop_rate > 0:
new_features = F.dropout(new_features, p=self.drop_rate, training=self.training)
return torch.cat([x, new_features], 1)
class _DenseBlock(nn.Sequential):
def __init__(self, num_layers, num_input_features, bn_size, growth_rate, drop_rate):
super(_DenseBlock, self).__init__()
for i in range(num_layers):
layer = _DenseLayer(num_input_features + i * growth_rate, growth_rate, bn_size, drop_rate)
self.add_module('denselayer%d' % (i + 1), layer)
class _Transition(nn.Sequential):
def __init__(self, num_input_features, num_output_features):
super(_Transition, self).__init__()
self.add_module('norm', nn.BatchNorm2d(num_input_features))
self.add_module('relu', nn.ReLU(inplace=True))
self.add_module('conv', nn.Conv2d(num_input_features, num_output_features,
kernel_size=1, stride=1, bias=False))
self.add_module('pool', nn.AvgPool2d(kernel_size=2, stride=2))
class DenseNet(nn.Module):
r"""Densenet-BC model class, based on
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_
Args:
growth_rate (int) - how many filters to add each layer (`k` in paper)
block_config (list of 4 ints) - how many layers in each pooling block
num_init_features (int) - the number of filters to learn in the first convolution layer
bn_size (int) - multiplicative factor for number of bottle neck layers
(i.e. bn_size * k features in the bottleneck layer)
drop_rate (float) - dropout rate after each dense layer
num_classes (int) - number of classification classes
"""
def __init__(self, growth_rate=32, block_config=(6, 12, 24, 16),
num_init_features=64, bn_size=4, drop_rate=0, num_classes=1000):
super(DenseNet, self).__init__()
# First convolution
self.features = nn.Sequential(OrderedDict([
('conv0', nn.Conv2d(3, num_init_features, kernel_size=7, stride=2, padding=3, bias=False)),
('norm0', nn.BatchNorm2d(num_init_features)),
('relu0', nn.ReLU(inplace=True)),
('pool0', nn.MaxPool2d(kernel_size=3, stride=2, padding=1)),
]))
# Each denseblock
num_features = num_init_features
for i, num_layers in enumerate(block_config):
block = _DenseBlock(num_layers=num_layers, num_input_features=num_features,
bn_size=bn_size, growth_rate=growth_rate, drop_rate=drop_rate)
self.features.add_module('denseblock%d' % (i + 1), block)
num_features = num_features + num_layers * growth_rate
if i != len(block_config) - 1:
trans = _Transition(num_input_features=num_features, num_output_features=num_features // 2)
self.features.add_module('transition%d' % (i + 1), trans)
num_features = num_features // 2
# Final batch norm
self.features.add_module('norm5', nn.BatchNorm2d(num_features))
# Linear layer
self.classifier = nn.Linear(num_features, num_classes)
# Official init from torch repo.
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.constant_(m.bias, 0)
def forward(self, x):
features = self.features(x)
out = F.relu(features, inplace=True)
out = F.avg_pool2d(out, kernel_size=7, stride=1).view(features.size(0), -1)
out = self.classifier(out)
return out
| 44.216814
| 109
| 0.625638
|
import re
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.model_zoo as model_zoo
from collections import OrderedDict
__all__ = ['DenseNet', 'densenet121', 'densenet169', 'densenet201', 'densenet161']
model_urls = {
'densenet121': 'https://download.pytorch.org/models/densenet121-a639ec97.pth',
'densenet169': 'https://download.pytorch.org/models/densenet169-b2777c0a.pth',
'densenet201': 'https://download.pytorch.org/models/densenet201-c1103571.pth',
'densenet161': 'https://download.pytorch.org/models/densenet161-8d451a50.pth',
}
def densenet121(pretrained=False, **kwargs):
model = DenseNet(num_init_features=64, growth_rate=32, block_config=(6, 12, 24, 16),
**kwargs)
if pretrained:
pattern = re.compile(
r'^(.*denselayer\d+\.(?:norm|relu|conv))\.((?:[12])\.(?:weight|bias|running_mean|running_var))$')
state_dict = model_zoo.load_url(model_urls['densenet121'])
for key in list(state_dict.keys()):
res = pattern.match(key)
if res:
new_key = res.group(1) + res.group(2)
state_dict[new_key] = state_dict[key]
del state_dict[key]
model.load_state_dict(state_dict)
return model
def densenet169(pretrained=False, **kwargs):
model = DenseNet(num_init_features=64, growth_rate=32, block_config=(6, 12, 32, 32),
**kwargs)
if pretrained:
pattern = re.compile(
r'^(.*denselayer\d+\.(?:norm|relu|conv))\.((?:[12])\.(?:weight|bias|running_mean|running_var))$')
state_dict = model_zoo.load_url(model_urls['densenet169'])
for key in list(state_dict.keys()):
res = pattern.match(key)
if res:
new_key = res.group(1) + res.group(2)
state_dict[new_key] = state_dict[key]
del state_dict[key]
model.load_state_dict(state_dict)
return model
def densenet201(pretrained=False, **kwargs):
model = DenseNet(num_init_features=64, growth_rate=32, block_config=(6, 12, 48, 32),
**kwargs)
if pretrained:
pattern = re.compile(
r'^(.*denselayer\d+\.(?:norm|relu|conv))\.((?:[12])\.(?:weight|bias|running_mean|running_var))$')
state_dict = model_zoo.load_url(model_urls['densenet201'])
for key in list(state_dict.keys()):
res = pattern.match(key)
if res:
new_key = res.group(1) + res.group(2)
state_dict[new_key] = state_dict[key]
del state_dict[key]
model.load_state_dict(state_dict)
return model
def densenet161(pretrained=False, **kwargs):
model = DenseNet(num_init_features=96, growth_rate=48, block_config=(6, 12, 36, 24),
**kwargs)
if pretrained:
pattern = re.compile(
r'^(.*denselayer\d+\.(?:norm|relu|conv))\.((?:[12])\.(?:weight|bias|running_mean|running_var))$')
state_dict = model_zoo.load_url(model_urls['densenet161'])
for key in list(state_dict.keys()):
res = pattern.match(key)
if res:
new_key = res.group(1) + res.group(2)
state_dict[new_key] = state_dict[key]
del state_dict[key]
model.load_state_dict(state_dict)
return model
class _DenseLayer(nn.Sequential):
def __init__(self, num_input_features, growth_rate, bn_size, drop_rate):
super(_DenseLayer, self).__init__()
self.add_module('norm1', nn.BatchNorm2d(num_input_features)),
self.add_module('relu1', nn.ReLU(inplace=True)),
self.add_module('conv1', nn.Conv2d(num_input_features, bn_size *
growth_rate, kernel_size=1, stride=1, bias=False)),
self.add_module('norm2', nn.BatchNorm2d(bn_size * growth_rate)),
self.add_module('relu2', nn.ReLU(inplace=True)),
self.add_module('conv2', nn.Conv2d(bn_size * growth_rate, growth_rate,
kernel_size=3, stride=1, padding=1, bias=False)),
self.drop_rate = drop_rate
def forward(self, x):
new_features = super(_DenseLayer, self).forward(x)
if self.drop_rate > 0:
new_features = F.dropout(new_features, p=self.drop_rate, training=self.training)
return torch.cat([x, new_features], 1)
class _DenseBlock(nn.Sequential):
def __init__(self, num_layers, num_input_features, bn_size, growth_rate, drop_rate):
super(_DenseBlock, self).__init__()
for i in range(num_layers):
layer = _DenseLayer(num_input_features + i * growth_rate, growth_rate, bn_size, drop_rate)
self.add_module('denselayer%d' % (i + 1), layer)
class _Transition(nn.Sequential):
def __init__(self, num_input_features, num_output_features):
super(_Transition, self).__init__()
self.add_module('norm', nn.BatchNorm2d(num_input_features))
self.add_module('relu', nn.ReLU(inplace=True))
self.add_module('conv', nn.Conv2d(num_input_features, num_output_features,
kernel_size=1, stride=1, bias=False))
self.add_module('pool', nn.AvgPool2d(kernel_size=2, stride=2))
class DenseNet(nn.Module):
def __init__(self, growth_rate=32, block_config=(6, 12, 24, 16),
num_init_features=64, bn_size=4, drop_rate=0, num_classes=1000):
super(DenseNet, self).__init__()
self.features = nn.Sequential(OrderedDict([
('conv0', nn.Conv2d(3, num_init_features, kernel_size=7, stride=2, padding=3, bias=False)),
('norm0', nn.BatchNorm2d(num_init_features)),
('relu0', nn.ReLU(inplace=True)),
('pool0', nn.MaxPool2d(kernel_size=3, stride=2, padding=1)),
]))
num_features = num_init_features
for i, num_layers in enumerate(block_config):
block = _DenseBlock(num_layers=num_layers, num_input_features=num_features,
bn_size=bn_size, growth_rate=growth_rate, drop_rate=drop_rate)
self.features.add_module('denseblock%d' % (i + 1), block)
num_features = num_features + num_layers * growth_rate
if i != len(block_config) - 1:
trans = _Transition(num_input_features=num_features, num_output_features=num_features // 2)
self.features.add_module('transition%d' % (i + 1), trans)
num_features = num_features // 2
self.features.add_module('norm5', nn.BatchNorm2d(num_features))
self.classifier = nn.Linear(num_features, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.constant_(m.bias, 0)
def forward(self, x):
features = self.features(x)
out = F.relu(features, inplace=True)
out = F.avg_pool2d(out, kernel_size=7, stride=1).view(features.size(0), -1)
out = self.classifier(out)
return out
| true
| true
|
79064a24a2f2ea1b8ed6da5d1f4966737bba642e
| 1,871
|
py
|
Python
|
examples/archive_experiments.py
|
Honzys/clearml-agent
|
f58071fc74e2df9bdba8ca91569e0bfd70ad4f46
|
[
"Apache-2.0"
] | 112
|
2019-10-29T10:36:20.000Z
|
2020-12-19T08:08:27.000Z
|
examples/archive_experiments.py
|
Honzys/clearml-agent
|
f58071fc74e2df9bdba8ca91569e0bfd70ad4f46
|
[
"Apache-2.0"
] | 53
|
2020-12-29T07:52:40.000Z
|
2022-03-31T10:11:54.000Z
|
examples/archive_experiments.py
|
Honzys/clearml-agent
|
f58071fc74e2df9bdba8ca91569e0bfd70ad4f46
|
[
"Apache-2.0"
] | 29
|
2019-10-29T13:06:29.000Z
|
2020-12-19T08:09:24.000Z
|
#!/usr/bin/python3
"""
An example script that cleans up failed experiments by moving them to the archive
"""
import argparse
from datetime import datetime
from clearml_agent import APIClient
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--project", "-P", help="Project ID. Only clean up experiments from this project")
parser.add_argument("--user", "-U", help="User ID. Only clean up experiments assigned to this user")
parser.add_argument(
"--status", "-S", default="failed",
help="Experiment status. Only clean up experiments with this status (default %(default)s)"
)
parser.add_argument(
"--iterations", "-I", type=int,
help="Number of iterations. Only clean up experiments with less or equal number of iterations"
)
parser.add_argument(
"--sec-from-start", "-T", type=int,
help="Seconds from start time. "
"Only clean up experiments if less or equal number of seconds have elapsed since started"
)
args = parser.parse_args()
client = APIClient()
tasks = client.tasks.get_all(
project=[args.project] if args.project else None,
user=[args.user] if args.user else None,
status=[args.status] if args.status else None,
system_tags=["-archived"]
)
count = 0
for task in tasks:
if args.iterations and (task.last_iteration or 0) > args.iterations:
continue
if args.sec_from_start:
if not task.started:
continue
if (datetime.utcnow() - task.started.replace(tzinfo=None)).total_seconds() > args.sec_from_start:
continue
try:
client.tasks.edit(
task=task.id,
system_tags=(task.system_tags or []) + ["archived"],
force=True
)
count += 1
except Exception as ex:
print("Failed editing experiment: {}".format(ex))
print("Cleaned up {} experiments".format(count))
| 31.183333
| 105
| 0.676644
|
import argparse
from datetime import datetime
from clearml_agent import APIClient
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--project", "-P", help="Project ID. Only clean up experiments from this project")
parser.add_argument("--user", "-U", help="User ID. Only clean up experiments assigned to this user")
parser.add_argument(
"--status", "-S", default="failed",
help="Experiment status. Only clean up experiments with this status (default %(default)s)"
)
parser.add_argument(
"--iterations", "-I", type=int,
help="Number of iterations. Only clean up experiments with less or equal number of iterations"
)
parser.add_argument(
"--sec-from-start", "-T", type=int,
help="Seconds from start time. "
"Only clean up experiments if less or equal number of seconds have elapsed since started"
)
args = parser.parse_args()
client = APIClient()
tasks = client.tasks.get_all(
project=[args.project] if args.project else None,
user=[args.user] if args.user else None,
status=[args.status] if args.status else None,
system_tags=["-archived"]
)
count = 0
for task in tasks:
if args.iterations and (task.last_iteration or 0) > args.iterations:
continue
if args.sec_from_start:
if not task.started:
continue
if (datetime.utcnow() - task.started.replace(tzinfo=None)).total_seconds() > args.sec_from_start:
continue
try:
client.tasks.edit(
task=task.id,
system_tags=(task.system_tags or []) + ["archived"],
force=True
)
count += 1
except Exception as ex:
print("Failed editing experiment: {}".format(ex))
print("Cleaned up {} experiments".format(count))
| true
| true
|
79064b1fe249f52e1762ce79eb150d0770eeded4
| 2,386
|
py
|
Python
|
demo/demoproject/settings.py
|
atharvapj/django-webcam
|
a24ad4453cb55afe7f60729765b6601c11682d5a
|
[
"BSD-1-Clause"
] | 11
|
2015-01-09T16:46:04.000Z
|
2021-02-12T17:45:15.000Z
|
demo/demoproject/settings.py
|
atharvapj/django-webcam
|
a24ad4453cb55afe7f60729765b6601c11682d5a
|
[
"BSD-1-Clause"
] | 1
|
2017-01-17T04:11:53.000Z
|
2017-07-16T01:06:29.000Z
|
demo/demoproject/settings.py
|
atharvapj/django-webcam
|
a24ad4453cb55afe7f60729765b6601c11682d5a
|
[
"BSD-1-Clause"
] | 24
|
2015-08-18T16:55:28.000Z
|
2020-10-01T17:53:06.000Z
|
import os
HERE = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = ()
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'webcam_test.sqlite',
}
}
ALLOWED_HOSTS = []
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
USE_TZ = True
MEDIA_ROOT = os.path.join(HERE, 'media')
MEDIA_URL = ''
STATIC_ROOT = os.path.join(HERE, 'static')
STATIC_URL = '/static/'
AUTHENTICATION_BACKENDS = ('demoproject.backends.AnyUserBackend',)
# STATICFILES_DIRS = (
# )
# STATICFILES_FINDERS = (
# 'django.contrib.staticfiles.finders.FileSystemFinder',
# 'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# )
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'l@y%zd0gn+7(yz(s2jx!_f!7u4jexrj396g($2b_#^_d%6*xfc'
# List of callables that know how to import templates from various sources.
# TEMPLATE_LOADERS = (
# 'django.template.loaders.filesystem.Loader',
# 'django.template.loaders.app_directories.Loader',
# )
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'demoproject.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'demoproject.wsgi.application'
# TEMPLATE_DIRS = (
# )
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'webcam',
'demoproject',
)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| 25.115789
| 75
| 0.657167
|
import os
HERE = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = ()
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'webcam_test.sqlite',
}
}
ALLOWED_HOSTS = []
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
USE_TZ = True
MEDIA_ROOT = os.path.join(HERE, 'media')
MEDIA_URL = ''
STATIC_ROOT = os.path.join(HERE, 'static')
STATIC_URL = '/static/'
AUTHENTICATION_BACKENDS = ('demoproject.backends.AnyUserBackend',)
SECRET_KEY = 'l@y%zd0gn+7(yz(s2jx!_f!7u4jexrj396g($2b_
# List of callables that know how to import templates from various sources.
# TEMPLATE_LOADERS = (
# 'django.template.loaders.filesystem.Loader',
# 'django.template.loaders.app_directories.Loader',
# )
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'demoproject.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'demoproject.wsgi.application'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'webcam',
'demoproject',
)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| true
| true
|
79064b5bb5deec657dfe19a602df8c39e5f4df11
| 3,039
|
py
|
Python
|
ambari-agent/src/main/python/ambari_agent/Heartbeat.py
|
zhanganha/ambari
|
c99dbff12a6b180c74f14e2fda06a204181e6e2c
|
[
"Apache-2.0"
] | null | null | null |
ambari-agent/src/main/python/ambari_agent/Heartbeat.py
|
zhanganha/ambari
|
c99dbff12a6b180c74f14e2fda06a204181e6e2c
|
[
"Apache-2.0"
] | null | null | null |
ambari-agent/src/main/python/ambari_agent/Heartbeat.py
|
zhanganha/ambari
|
c99dbff12a6b180c74f14e2fda06a204181e6e2c
|
[
"Apache-2.0"
] | 2
|
2020-11-04T06:30:31.000Z
|
2020-11-06T11:02:33.000Z
|
#!/usr/bin/env python2.6
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import json
import logging
import time
from pprint import pformat
from ActionQueue import ActionQueue
import AmbariConfig
import hostname
from HostInfo import HostInfo
logger = logging.getLogger()
firstContact = True
class Heartbeat:
def __init__(self, actionQueue, config=None):
self.actionQueue = actionQueue
self.config = config
self.reports = []
def build(self, id='-1', state_interval=-1, componentsMapped=False):
global clusterId, clusterDefinitionRevision, firstContact
timestamp = int(time.time()*1000)
queueResult = self.actionQueue.result()
nodeStatus = { "status" : "HEALTHY",
"cause" : "NONE"}
heartbeat = { 'responseId' : int(id),
'timestamp' : timestamp,
'hostname' : hostname.hostname(),
'nodeStatus' : nodeStatus
}
commandsInProgress = False
if self.actionQueue.commandQueue.empty() == False:
commandsInProgress = True
if len(queueResult) != 0:
heartbeat['reports'] = queueResult['reports']
heartbeat['componentStatus'] = queueResult['componentStatus']
if len(heartbeat['reports']) > 0:
# There may be IN_PROGRESS tasks
commandsInProgress = True
pass
logger.info("Sending heartbeat with response id: " + str(id) + " and "
"timestamp: " + str(timestamp) +
". Command(s) in progress: " + repr(commandsInProgress) +
". Components mapped: " + repr(componentsMapped))
logger.debug("Heartbeat : " + pformat(heartbeat))
if (int(id) >= 0) and state_interval > 0 and (int(id) % state_interval) == 0:
hostInfo = HostInfo(self.config)
nodeInfo = { }
# for now, just do the same work as registration
# this must be the last step before returning heartbeat
hostInfo.register(nodeInfo, componentsMapped, commandsInProgress)
heartbeat['agentEnv'] = nodeInfo
logger.debug("agentEnv : " + str(nodeInfo))
return heartbeat
def main(argv=None):
actionQueue = ActionQueue(AmbariConfig.config)
heartbeat = Heartbeat(actionQueue)
print json.dumps(heartbeat.build('3',3))
if __name__ == '__main__':
main()
| 33.395604
| 81
| 0.676867
|
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import json
import logging
import time
from pprint import pformat
from ActionQueue import ActionQueue
import AmbariConfig
import hostname
from HostInfo import HostInfo
logger = logging.getLogger()
firstContact = True
class Heartbeat:
def __init__(self, actionQueue, config=None):
self.actionQueue = actionQueue
self.config = config
self.reports = []
def build(self, id='-1', state_interval=-1, componentsMapped=False):
global clusterId, clusterDefinitionRevision, firstContact
timestamp = int(time.time()*1000)
queueResult = self.actionQueue.result()
nodeStatus = { "status" : "HEALTHY",
"cause" : "NONE"}
heartbeat = { 'responseId' : int(id),
'timestamp' : timestamp,
'hostname' : hostname.hostname(),
'nodeStatus' : nodeStatus
}
commandsInProgress = False
if self.actionQueue.commandQueue.empty() == False:
commandsInProgress = True
if len(queueResult) != 0:
heartbeat['reports'] = queueResult['reports']
heartbeat['componentStatus'] = queueResult['componentStatus']
if len(heartbeat['reports']) > 0:
commandsInProgress = True
pass
logger.info("Sending heartbeat with response id: " + str(id) + " and "
"timestamp: " + str(timestamp) +
". Command(s) in progress: " + repr(commandsInProgress) +
". Components mapped: " + repr(componentsMapped))
logger.debug("Heartbeat : " + pformat(heartbeat))
if (int(id) >= 0) and state_interval > 0 and (int(id) % state_interval) == 0:
hostInfo = HostInfo(self.config)
nodeInfo = { }
hostInfo.register(nodeInfo, componentsMapped, commandsInProgress)
heartbeat['agentEnv'] = nodeInfo
logger.debug("agentEnv : " + str(nodeInfo))
return heartbeat
def main(argv=None):
actionQueue = ActionQueue(AmbariConfig.config)
heartbeat = Heartbeat(actionQueue)
print json.dumps(heartbeat.build('3',3))
if __name__ == '__main__':
main()
| false
| true
|
79064d978e8556f453a514dbe1eca9b9bc5463b1
| 1,228
|
bzl
|
Python
|
src/third_party/mtools/mtools_repositories.bzl
|
rkolchmeyer/cos-customizer
|
57712f0f5b4bfdbce8d4b8aac3e9e1a685e531d3
|
[
"Apache-2.0"
] | 39
|
2019-04-01T18:00:44.000Z
|
2021-10-09T00:40:49.000Z
|
src/third_party/mtools/mtools_repositories.bzl
|
rkolchmeyer/cos-customizer
|
57712f0f5b4bfdbce8d4b8aac3e9e1a685e531d3
|
[
"Apache-2.0"
] | 24
|
2019-04-02T22:45:14.000Z
|
2021-08-16T09:25:22.000Z
|
src/third_party/mtools/mtools_repositories.bzl
|
rkolchmeyer/cos-customizer
|
57712f0f5b4bfdbce8d4b8aac3e9e1a685e531d3
|
[
"Apache-2.0"
] | 18
|
2019-05-07T22:24:27.000Z
|
2021-07-30T00:56:35.000Z
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
def mtools_repositories():
"""Load all repositories needed for mtools."""
maybe(
http_archive,
name = "mtools",
build_file = Label("//src/third_party/mtools:BUILD.mtools.bazel"),
strip_prefix = "mtools-4.0.26",
urls = [
"https://mirror.bazel.build/ftp.gnu.org/gnu/mtools/mtools-4.0.26.tar.gz",
"http://ftp.gnu.org/gnu/mtools/mtools-4.0.26.tar.gz",
],
sha256 = "b1adb6973d52b3b70b16047e682f96ef1b669d6b16894c9056a55f407e71cd0f",
)
| 38.375
| 85
| 0.699511
|
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
def mtools_repositories():
maybe(
http_archive,
name = "mtools",
build_file = Label("//src/third_party/mtools:BUILD.mtools.bazel"),
strip_prefix = "mtools-4.0.26",
urls = [
"https://mirror.bazel.build/ftp.gnu.org/gnu/mtools/mtools-4.0.26.tar.gz",
"http://ftp.gnu.org/gnu/mtools/mtools-4.0.26.tar.gz",
],
sha256 = "b1adb6973d52b3b70b16047e682f96ef1b669d6b16894c9056a55f407e71cd0f",
)
| true
| true
|
79064df0e2e2cd614f0700ccfc7fabd3de24ae63
| 288
|
py
|
Python
|
onepanman_api/admin/code.py
|
Capstone-onepanman/api-server
|
1a5174fbc441d2718f3963863590f634ba2014e1
|
[
"MIT"
] | null | null | null |
onepanman_api/admin/code.py
|
Capstone-onepanman/api-server
|
1a5174fbc441d2718f3963863590f634ba2014e1
|
[
"MIT"
] | 12
|
2020-03-24T18:09:30.000Z
|
2022-03-12T00:15:07.000Z
|
onepanman_api/admin/code.py
|
Capstone-onepanman/api-server
|
1a5174fbc441d2718f3963863590f634ba2014e1
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .. import models
@admin.register(models.Code)
class CodeAdmin(admin.ModelAdmin):
"""
코드 정보
"""
list_display = ['id', 'author', 'problem', 'language', 'name', 'available_game', 'date' ]
class Meta:
model = models.Code
| 18
| 93
| 0.628472
|
from django.contrib import admin
from .. import models
@admin.register(models.Code)
class CodeAdmin(admin.ModelAdmin):
list_display = ['id', 'author', 'problem', 'language', 'name', 'available_game', 'date' ]
class Meta:
model = models.Code
| true
| true
|
79064e0dd9a36aac9346be63f116f0bfd30fa81a
| 6,592
|
py
|
Python
|
netbox/secrets/forms.py
|
Megzo/netbox
|
f8a21da9f034b31d7b91587cc6a295bbc4d9edea
|
[
"Apache-2.0"
] | null | null | null |
netbox/secrets/forms.py
|
Megzo/netbox
|
f8a21da9f034b31d7b91587cc6a295bbc4d9edea
|
[
"Apache-2.0"
] | null | null | null |
netbox/secrets/forms.py
|
Megzo/netbox
|
f8a21da9f034b31d7b91587cc6a295bbc4d9edea
|
[
"Apache-2.0"
] | null | null | null |
from Crypto.Cipher import PKCS1_OAEP
from Crypto.PublicKey import RSA
from django import forms
from taggit.forms import TagField
from dcim.models import Device
from extras.forms import (
AddRemoveTagsForm, CustomFieldBulkEditForm, CustomFieldFilterForm, CustomFieldModelForm, CustomFieldModelCSVForm,
)
from utilities.forms import (
APISelect, APISelectMultiple, BootstrapMixin, DynamicModelChoiceField, DynamicModelMultipleChoiceField,
FlexibleModelChoiceField, SlugField, StaticSelect2Multiple, TagFilterField,
)
from .constants import *
from .models import Secret, SecretRole, UserKey
def validate_rsa_key(key, is_secret=True):
"""
Validate the format and type of an RSA key.
"""
if key.startswith('ssh-rsa '):
raise forms.ValidationError("OpenSSH line format is not supported. Please ensure that your public is in PEM (base64) format.")
try:
key = RSA.importKey(key)
except ValueError:
raise forms.ValidationError("Invalid RSA key. Please ensure that your key is in PEM (base64) format.")
except Exception as e:
raise forms.ValidationError("Invalid key detected: {}".format(e))
if is_secret and not key.has_private():
raise forms.ValidationError("This looks like a public key. Please provide your private RSA key.")
elif not is_secret and key.has_private():
raise forms.ValidationError("This looks like a private key. Please provide your public RSA key.")
try:
PKCS1_OAEP.new(key)
except Exception:
raise forms.ValidationError("Error validating RSA key. Please ensure that your key supports PKCS#1 OAEP.")
#
# Secret roles
#
class SecretRoleForm(BootstrapMixin, forms.ModelForm):
slug = SlugField()
class Meta:
model = SecretRole
fields = [
'name', 'slug', 'description', 'users', 'groups',
]
widgets = {
'users': StaticSelect2Multiple(),
'groups': StaticSelect2Multiple(),
}
class SecretRoleCSVForm(forms.ModelForm):
slug = SlugField()
class Meta:
model = SecretRole
fields = SecretRole.csv_headers
help_texts = {
'name': 'Name of secret role',
}
#
# Secrets
#
class SecretForm(BootstrapMixin, CustomFieldModelForm):
plaintext = forms.CharField(
max_length=SECRET_PLAINTEXT_MAX_LENGTH,
required=False,
label='Plaintext',
widget=forms.PasswordInput(
attrs={
'class': 'requires-session-key',
}
)
)
plaintext2 = forms.CharField(
max_length=SECRET_PLAINTEXT_MAX_LENGTH,
required=False,
label='Plaintext (verify)',
widget=forms.PasswordInput()
)
role = DynamicModelChoiceField(
queryset=SecretRole.objects.all(),
widget=APISelect(
api_url="/api/secrets/secret-roles/"
)
)
tags = TagField(
required=False
)
class Meta:
model = Secret
fields = [
'role', 'name', 'plaintext', 'plaintext2', 'tags',
]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# A plaintext value is required when creating a new Secret
if not self.instance.pk:
self.fields['plaintext'].required = True
def clean(self):
# Verify that the provided plaintext values match
if self.cleaned_data['plaintext'] != self.cleaned_data['plaintext2']:
raise forms.ValidationError({
'plaintext2': "The two given plaintext values do not match. Please check your input."
})
class SecretCSVForm(CustomFieldModelCSVForm):
device = FlexibleModelChoiceField(
queryset=Device.objects.all(),
to_field_name='name',
help_text='Device name or ID',
error_messages={
'invalid_choice': 'Device not found.',
}
)
role = forms.ModelChoiceField(
queryset=SecretRole.objects.all(),
to_field_name='name',
help_text='Name of assigned role',
error_messages={
'invalid_choice': 'Invalid secret role.',
}
)
plaintext = forms.CharField(
help_text='Plaintext secret data'
)
class Meta:
model = Secret
fields = Secret.csv_headers
help_texts = {
'name': 'Name or username',
}
def save(self, *args, **kwargs):
s = super().save(*args, **kwargs)
s.plaintext = str(self.cleaned_data['plaintext'])
return s
class SecretBulkEditForm(BootstrapMixin, AddRemoveTagsForm, CustomFieldBulkEditForm):
pk = forms.ModelMultipleChoiceField(
queryset=Secret.objects.all(),
widget=forms.MultipleHiddenInput()
)
role = DynamicModelChoiceField(
queryset=SecretRole.objects.all(),
required=False,
widget=APISelect(
api_url="/api/secrets/secret-roles/"
)
)
name = forms.CharField(
max_length=100,
required=False
)
class Meta:
nullable_fields = [
'name',
]
class SecretFilterForm(BootstrapMixin, CustomFieldFilterForm):
model = Secret
q = forms.CharField(
required=False,
label='Search'
)
role = DynamicModelMultipleChoiceField(
queryset=SecretRole.objects.all(),
to_field_name='slug',
required=True,
widget=APISelectMultiple(
api_url="/api/secrets/secret-roles/",
value_field="slug",
)
)
tag = TagFilterField(model)
#
# UserKeys
#
class UserKeyForm(BootstrapMixin, forms.ModelForm):
class Meta:
model = UserKey
fields = ['public_key']
help_texts = {
'public_key': "Enter your public RSA key. Keep the private one with you; you'll need it for decryption. "
"Please note that passphrase-protected keys are not supported.",
}
labels = {
'public_key': ''
}
def clean_public_key(self):
key = self.cleaned_data['public_key']
# Validate the RSA key format.
validate_rsa_key(key, is_secret=False)
return key
class ActivateUserKeyForm(forms.Form):
_selected_action = forms.ModelMultipleChoiceField(
queryset=UserKey.objects.all(),
label='User Keys'
)
secret_key = forms.CharField(
widget=forms.Textarea(
attrs={
'class': 'vLargeTextField',
}
),
label='Your private key'
)
| 27.932203
| 134
| 0.622118
|
from Crypto.Cipher import PKCS1_OAEP
from Crypto.PublicKey import RSA
from django import forms
from taggit.forms import TagField
from dcim.models import Device
from extras.forms import (
AddRemoveTagsForm, CustomFieldBulkEditForm, CustomFieldFilterForm, CustomFieldModelForm, CustomFieldModelCSVForm,
)
from utilities.forms import (
APISelect, APISelectMultiple, BootstrapMixin, DynamicModelChoiceField, DynamicModelMultipleChoiceField,
FlexibleModelChoiceField, SlugField, StaticSelect2Multiple, TagFilterField,
)
from .constants import *
from .models import Secret, SecretRole, UserKey
def validate_rsa_key(key, is_secret=True):
if key.startswith('ssh-rsa '):
raise forms.ValidationError("OpenSSH line format is not supported. Please ensure that your public is in PEM (base64) format.")
try:
key = RSA.importKey(key)
except ValueError:
raise forms.ValidationError("Invalid RSA key. Please ensure that your key is in PEM (base64) format.")
except Exception as e:
raise forms.ValidationError("Invalid key detected: {}".format(e))
if is_secret and not key.has_private():
raise forms.ValidationError("This looks like a public key. Please provide your private RSA key.")
elif not is_secret and key.has_private():
raise forms.ValidationError("This looks like a private key. Please provide your public RSA key.")
try:
PKCS1_OAEP.new(key)
except Exception:
raise forms.ValidationError("Error validating RSA key. Please ensure that your key supports PKCS#1 OAEP.")
class SecretRoleForm(BootstrapMixin, forms.ModelForm):
slug = SlugField()
class Meta:
model = SecretRole
fields = [
'name', 'slug', 'description', 'users', 'groups',
]
widgets = {
'users': StaticSelect2Multiple(),
'groups': StaticSelect2Multiple(),
}
class SecretRoleCSVForm(forms.ModelForm):
slug = SlugField()
class Meta:
model = SecretRole
fields = SecretRole.csv_headers
help_texts = {
'name': 'Name of secret role',
}
class SecretForm(BootstrapMixin, CustomFieldModelForm):
plaintext = forms.CharField(
max_length=SECRET_PLAINTEXT_MAX_LENGTH,
required=False,
label='Plaintext',
widget=forms.PasswordInput(
attrs={
'class': 'requires-session-key',
}
)
)
plaintext2 = forms.CharField(
max_length=SECRET_PLAINTEXT_MAX_LENGTH,
required=False,
label='Plaintext (verify)',
widget=forms.PasswordInput()
)
role = DynamicModelChoiceField(
queryset=SecretRole.objects.all(),
widget=APISelect(
api_url="/api/secrets/secret-roles/"
)
)
tags = TagField(
required=False
)
class Meta:
model = Secret
fields = [
'role', 'name', 'plaintext', 'plaintext2', 'tags',
]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not self.instance.pk:
self.fields['plaintext'].required = True
def clean(self):
if self.cleaned_data['plaintext'] != self.cleaned_data['plaintext2']:
raise forms.ValidationError({
'plaintext2': "The two given plaintext values do not match. Please check your input."
})
class SecretCSVForm(CustomFieldModelCSVForm):
device = FlexibleModelChoiceField(
queryset=Device.objects.all(),
to_field_name='name',
help_text='Device name or ID',
error_messages={
'invalid_choice': 'Device not found.',
}
)
role = forms.ModelChoiceField(
queryset=SecretRole.objects.all(),
to_field_name='name',
help_text='Name of assigned role',
error_messages={
'invalid_choice': 'Invalid secret role.',
}
)
plaintext = forms.CharField(
help_text='Plaintext secret data'
)
class Meta:
model = Secret
fields = Secret.csv_headers
help_texts = {
'name': 'Name or username',
}
def save(self, *args, **kwargs):
s = super().save(*args, **kwargs)
s.plaintext = str(self.cleaned_data['plaintext'])
return s
class SecretBulkEditForm(BootstrapMixin, AddRemoveTagsForm, CustomFieldBulkEditForm):
pk = forms.ModelMultipleChoiceField(
queryset=Secret.objects.all(),
widget=forms.MultipleHiddenInput()
)
role = DynamicModelChoiceField(
queryset=SecretRole.objects.all(),
required=False,
widget=APISelect(
api_url="/api/secrets/secret-roles/"
)
)
name = forms.CharField(
max_length=100,
required=False
)
class Meta:
nullable_fields = [
'name',
]
class SecretFilterForm(BootstrapMixin, CustomFieldFilterForm):
model = Secret
q = forms.CharField(
required=False,
label='Search'
)
role = DynamicModelMultipleChoiceField(
queryset=SecretRole.objects.all(),
to_field_name='slug',
required=True,
widget=APISelectMultiple(
api_url="/api/secrets/secret-roles/",
value_field="slug",
)
)
tag = TagFilterField(model)
class UserKeyForm(BootstrapMixin, forms.ModelForm):
class Meta:
model = UserKey
fields = ['public_key']
help_texts = {
'public_key': "Enter your public RSA key. Keep the private one with you; you'll need it for decryption. "
"Please note that passphrase-protected keys are not supported.",
}
labels = {
'public_key': ''
}
def clean_public_key(self):
key = self.cleaned_data['public_key']
# Validate the RSA key format.
validate_rsa_key(key, is_secret=False)
return key
class ActivateUserKeyForm(forms.Form):
_selected_action = forms.ModelMultipleChoiceField(
queryset=UserKey.objects.all(),
label='User Keys'
)
secret_key = forms.CharField(
widget=forms.Textarea(
attrs={
'class': 'vLargeTextField',
}
),
label='Your private key'
)
| true
| true
|
79064e6afedde8610b1ef693125b360838dd1d59
| 20,897
|
py
|
Python
|
pygbif/occurrences/search.py
|
livatras/pygbif
|
ede8a66e824f030c13b6e9feaeb8f25c9e6ef923
|
[
"MIT"
] | null | null | null |
pygbif/occurrences/search.py
|
livatras/pygbif
|
ede8a66e824f030c13b6e9feaeb8f25c9e6ef923
|
[
"MIT"
] | null | null | null |
pygbif/occurrences/search.py
|
livatras/pygbif
|
ede8a66e824f030c13b6e9feaeb8f25c9e6ef923
|
[
"MIT"
] | null | null | null |
import re
from pygbif.gbifutils import gbif_baseurl, bool2str, requests_argset, gbif_GET
def search(
taxonKey=None,
repatriated=None,
kingdomKey=None,
phylumKey=None,
classKey=None,
orderKey=None,
familyKey=None,
genusKey=None,
subgenusKey=None,
scientificName=None,
country=None,
publishingCountry=None,
hasCoordinate=None,
typeStatus=None,
recordNumber=None,
lastInterpreted=None,
continent=None,
geometry=None,
recordedBy=None,
recordedByID=None,
identifiedByID=None,
basisOfRecord=None,
datasetKey=None,
eventDate=None,
catalogNumber=None,
year=None,
month=None,
decimalLatitude=None,
decimalLongitude=None,
elevation=None,
depth=None,
institutionCode=None,
collectionCode=None,
hasGeospatialIssue=None,
issue=None,
q=None,
spellCheck=None,
mediatype=None,
limit=300,
offset=0,
establishmentMeans=None,
facet=None,
facetMincount=None,
facetMultiselect=None,
timeout=60,
**kwargs
):
"""
Search GBIF occurrences
:param taxonKey: [int] A GBIF occurrence identifier
:param q: [str] Simple search parameter. The value for this parameter can be a simple word or a phrase.
:param spellCheck: [bool] If ``True`` ask GBIF to check your spelling of the value passed to the ``search`` parameter.
IMPORTANT: This only checks the input to the ``search`` parameter, and no others. Default: ``False``
:param repatriated: [str] Searches for records whose publishing country is different to the country where the record was recorded in
:param kingdomKey: [int] Kingdom classification key
:param phylumKey: [int] Phylum classification key
:param classKey: [int] Class classification key
:param orderKey: [int] Order classification key
:param familyKey: [int] Family classification key
:param genusKey: [int] Genus classification key
:param subgenusKey: [int] Subgenus classification key
:param scientificName: [str] A scientific name from the GBIF backbone. All included and synonym taxa are included in the search.
:param datasetKey: [str] The occurrence dataset key (a uuid)
:param catalogNumber: [str] An identifier of any form assigned by the source within a physical collection or digital dataset for the record which may not unique, but should be fairly unique in combination with the institution and collection code.
:param recordedBy: [str] The person who recorded the occurrence.
:param recordedByID: [str] Identifier (e.g. ORCID) for the person who recorded the occurrence
:param identifiedByID: [str] Identifier (e.g. ORCID) for the person who provided the taxonomic identification of the occurrence.
:param collectionCode: [str] An identifier of any form assigned by the source to identify the physical collection or digital dataset uniquely within the text of an institution.
:param institutionCode: [str] An identifier of any form assigned by the source to identify the institution the record belongs to. Not guaranteed to be que.
:param country: [str] The 2-letter country code (as per ISO-3166-1) of the country in which the occurrence was recorded. See here http://en.wikipedia.org/wiki/ISO_3166-1_alpha-2
:param basisOfRecord: [str] Basis of record, as defined in our BasisOfRecord enum here http://gbif.github.io/gbif-api/apidocs/org/gbif/api/vocabulary/BasisOfRecord.html Acceptable values are:
- ``FOSSIL_SPECIMEN`` An occurrence record describing a fossilized specimen.
- ``HUMAN_OBSERVATION`` An occurrence record describing an observation made by one or more people.
- ``LIVING_SPECIMEN`` An occurrence record describing a living specimen.
- ``MACHINE_OBSERVATION`` An occurrence record describing an observation made by a machine.
- ``MATERIAL_CITATION`` An occurrence record based on a reference to a scholarly publication.
- ``OBSERVATION`` An occurrence record describing an observation.
- ``OCCURRENCE`` An existence of an organism at a particular place and time. No more specific basis.
- ``PRESERVED_SPECIMEN`` An occurrence record describing a preserved specimen.
:param eventDate: [date] Occurrence date in ISO 8601 format: yyyy, yyyy-MM, yyyy-MM-dd, or
MM-dd. Supports range queries, smaller,larger (e.g., ``1990,1991``, whereas ``1991,1990``
wouldn't work)
:param year: [int] The 4 digit year. A year of 98 will be interpreted as AD 98. Supports range queries,
smaller,larger (e.g., ``1990,1991``, whereas ``1991,1990`` wouldn't work)
:param month: [int] The month of the year, starting with 1 for January. Supports range queries,
smaller,larger (e.g., ``1,2``, whereas ``2,1`` wouldn't work)
:param decimalLatitude: [float] Latitude in decimals between -90 and 90 based on WGS 84.
Supports range queries, smaller,larger (e.g., ``25,30``, whereas ``30,25`` wouldn't work)
:param decimalLongitude: [float] Longitude in decimals between -180 and 180 based on WGS 84.
Supports range queries (e.g., ``-0.4,-0.2``, whereas ``-0.2,-0.4`` wouldn't work).
:param publishingCountry: [str] The 2-letter country code (as per ISO-3166-1) of the
country in which the occurrence was recorded.
:param elevation: [int/str] Elevation in meters above sea level. Supports range queries, smaller,larger
(e.g., ``5,30``, whereas ``30,5`` wouldn't work)
:param depth: [int/str] Depth in meters relative to elevation. For example 10 meters below a
lake surface with given elevation. Supports range queries, smaller,larger (e.g., ``5,30``,
whereas ``30,5`` wouldn't work)
:param geometry: [str] Searches for occurrences inside a polygon described in Well Known
Text (WKT) format. A WKT shape written as either POINT, LINESTRING, LINEARRING
POLYGON, or MULTIPOLYGON. Example of a polygon: ``((30.1 10.1, 20, 20 40, 40 40, 30.1 10.1))`` would be queried as http://bit.ly/1BzNwDq.
Polygons must have counter-clockwise ordering of points.
:param hasGeospatialIssue: [bool] Includes/excludes occurrence records which contain spatial
issues (as determined in our record interpretation), i.e. ``hasGeospatialIssue=TRUE``
returns only those records with spatial issues while ``hasGeospatialIssue=FALSE`` includes
only records without spatial issues. The absence of this parameter returns any
record with or without spatial issues.
:param issue: [str] One or more of many possible issues with each occurrence record. See
Details. Issues passed to this parameter filter results by the issue.
:param hasCoordinate: [bool] Return only occurence records with lat/long data (``True``) or
all records (``False``, default).
:param typeStatus: [str] Type status of the specimen. One of many options. See ?typestatus
:param recordNumber: [int] Number recorded by collector of the data, different from GBIF record
number. See http://rs.tdwg.org/dwc/terms/#recordNumber} for more info
:param lastInterpreted: [date] Date the record was last modified in GBIF, in ISO 8601 format:
yyyy, yyyy-MM, yyyy-MM-dd, or MM-dd. Supports range queries, smaller,larger (e.g.,
``1990,1991``, whereas ``1991,1990`` wouldn't work)
:param continent: [str] Continent. One of ``africa``, ``antarctica``, ``asia``, ``europe``, ``north_america``
(North America includes the Caribbean and reachies down and includes Panama), ``oceania``,
or ``south_america``
:param fields: [str] Default (``all``) returns all fields. ``minimal`` returns just taxon name,
key, latitude, and longitude. Or specify each field you want returned by name, e.g.
``fields = c('name','latitude','elevation')``.
:param mediatype: [str] Media type. Default is ``NULL``, so no filtering on mediatype. Options:
``NULL``, ``MovingImage``, ``Sound``, and ``StillImage``
:param limit: [int] Number of results to return. Default: ``300``
:param offset: [int] Record to start at. Default: ``0``
:param facet: [str] a character vector of length 1 or greater
:param establishmentMeans: [str] EstablishmentMeans, possible values include: INTRODUCED,
INVASIVE, MANAGED, NATIVE, NATURALISED, UNCERTAIN
:param facetMincount: [int] minimum number of records to be included in the faceting results
:param facetMultiselect: [bool] Set to ``True`` to still return counts for values that are not currently
filtered. See examples. Default: ``False``
:return: A dictionary
Usage::
from pygbif import occurrences
occurrences.search(taxonKey = 3329049)
# Return 2 results, this is the default by the way
occurrences.search(taxonKey=3329049, limit=2)
# Instead of getting a taxon key first, you can search for a name directly
# However, note that using this approach (with `scientificName="..."`)
# you are getting synonyms too. The results for using `scientifcName` and
# `taxonKey` parameters are the same in this case, but I wouldn't be surprised if for some
# names they return different results
occurrences.search(scientificName = 'Ursus americanus')
from pygbif import species
key = species.name_backbone(name = 'Ursus americanus', rank='species')['usageKey']
occurrences.search(taxonKey = key)
# Search by dataset key
occurrences.search(datasetKey='7b5d6a48-f762-11e1-a439-00145eb45e9a', limit=20)
# Search by catalog number
occurrences.search(catalogNumber="49366", limit=20)
# occurrences.search(catalogNumber=["49366","Bird.27847588"], limit=20)
# Use paging parameters (limit and offset) to page. Note the different results
# for the two queries below.
occurrences.search(datasetKey='7b5d6a48-f762-11e1-a439-00145eb45e9a', offset=10, limit=5)
occurrences.search(datasetKey='7b5d6a48-f762-11e1-a439-00145eb45e9a', offset=20, limit=5)
# Many dataset keys
# occurrences.search(datasetKey=["50c9509d-22c7-4a22-a47d-8c48425ef4a7", "7b5d6a48-f762-11e1-a439-00145eb45e9a"], limit=20)
# Search by collector name
res = occurrences.search(recordedBy="smith", limit=20)
[ x['recordedBy'] for x in res['results'] ]
# Many collector names
# occurrences.search(recordedBy=["smith","BJ Stacey"], limit=20)
# recordedByID
occurrences.search(recordedByID="https://orcid.org/0000-0003-1691-239X", limit = 3)
# identifiedByID
occurrences.search(identifiedByID="https://orcid.org/0000-0003-1691-239X", limit = 3)
# Search for many species
splist = ['Cyanocitta stelleri', 'Junco hyemalis', 'Aix sponsa']
keys = [ species.name_suggest(x)[0]['key'] for x in splist ]
out = [ occurrences.search(taxonKey = x, limit=1) for x in keys ]
[ x['results'][0]['speciesKey'] for x in out ]
# Search - q parameter
occurrences.search(q = "kingfisher", limit=20)
## spell check - only works with the `search` parameter
### spelled correctly - same result as above call
occurrences.search(q = "kingfisher", limit=20, spellCheck = True)
### spelled incorrectly - stops with suggested spelling
occurrences.search(q = "kajsdkla", limit=20, spellCheck = True)
### spelled incorrectly - stops with many suggested spellings
### and number of results for each
occurrences.search(q = "helir", limit=20, spellCheck = True)
# Search on latitidue and longitude
occurrences.search(decimalLatitude=50, decimalLongitude=10, limit=2)
# Search on a bounding box
## in well known text format
occurrences.search(geometry='POLYGON((30.1 10.1, 10 20, 20 40, 40 40, 30.1 10.1))', limit=20)
from pygbif import species
key = species.name_suggest(q='Aesculus hippocastanum')[0]['key']
occurrences.search(taxonKey=key, geometry='POLYGON((30.1 10.1, 10 20, 20 40, 40 40, 30.1 10.1))', limit=20)
## multipolygon
wkt = 'MULTIPOLYGON(((-123 38, -123 43, -116 43, -116 38, -123 38)),((-97 41, -97 45, -93 45, -93 41, -97 41)))'
occurrences.search(geometry = wkt, limit = 20)
# Search on country
occurrences.search(country='US', limit=20)
occurrences.search(country='FR', limit=20)
occurrences.search(country='DE', limit=20)
# Get only occurrences with lat/long data
occurrences.search(taxonKey=key, hasCoordinate=True, limit=20)
# Get only occurrences that were recorded as living specimens
occurrences.search(taxonKey=key, basisOfRecord="LIVING_SPECIMEN", hasCoordinate=True, limit=20)
# Get occurrences for a particular eventDate
occurrences.search(taxonKey=key, eventDate="2013", limit=20)
occurrences.search(taxonKey=key, year="2013", limit=20)
occurrences.search(taxonKey=key, month="6", limit=20)
# Get occurrences based on depth
key = species.name_backbone(name='Salmo salar', kingdom='animals')['usageKey']
occurrences.search(taxonKey=key, depth="5", limit=20)
# Get occurrences based on elevation
key = species.name_backbone(name='Puma concolor', kingdom='animals')['usageKey']
occurrences.search(taxonKey=key, elevation=50, hasCoordinate=True, limit=20)
# Get occurrences based on institutionCode
occurrences.search(institutionCode="TLMF", limit=20)
# Get occurrences based on collectionCode
occurrences.search(collectionCode="Floristic Databases MV - Higher Plants", limit=20)
# Get only those occurrences with spatial issues
occurrences.search(taxonKey=key, hasGeospatialIssue=True, limit=20)
# Search using a query string
occurrences.search(q="kingfisher", limit=20)
# Range queries
## See Detail for parameters that support range queries
### this is a range depth, with lower/upper limits in character string
occurrences.search(depth='50,100')
## Range search with year
occurrences.search(year='1999,2000', limit=20)
## Range search with latitude
occurrences.search(decimalLatitude='29.59,29.6')
# Search by specimen type status
## Look for possible values of the typeStatus parameter looking at the typestatus dataset
occurrences.search(typeStatus = 'allotype')
# Search by specimen record number
## This is the record number of the person/group that submitted the data, not GBIF's numbers
## You can see that many different groups have record number 1, so not super helpful
occurrences.search(recordNumber = 1)
# Search by last time interpreted: Date the record was last modified in GBIF
## The lastInterpreted parameter accepts ISO 8601 format dates, including
## yyyy, yyyy-MM, yyyy-MM-dd, or MM-dd. Range queries are accepted for lastInterpreted
occurrences.search(lastInterpreted = '2014-04-01')
# Search by continent
## One of africa, antarctica, asia, europe, north_america, oceania, or south_america
occurrences.search(continent = 'south_america')
occurrences.search(continent = 'africa')
occurrences.search(continent = 'oceania')
occurrences.search(continent = 'antarctica')
# Search for occurrences with images
occurrences.search(mediatype = 'StillImage')
occurrences.search(mediatype = 'MovingImage')
x = occurrences.search(mediatype = 'Sound')
[z['media'] for z in x['results']]
# Query based on issues
occurrences.search(taxonKey=1, issue='DEPTH_UNLIKELY')
occurrences.search(taxonKey=1, issue=['DEPTH_UNLIKELY','COORDINATE_ROUNDED'])
# Show all records in the Arizona State Lichen Collection that cant be matched to the GBIF
# backbone properly:
occurrences.search(datasetKey='84c0e1a0-f762-11e1-a439-00145eb45e9a', issue=['TAXON_MATCH_NONE','TAXON_MATCH_HIGHERRANK'])
# If you pass in an invalid polygon you get hopefully informative errors
### the WKT string is fine, but GBIF says bad polygon
wkt = 'POLYGON((-178.59375 64.83258989321493,-165.9375 59.24622380205539,
-147.3046875 59.065977905449806,-130.78125 51.04484764446178,-125.859375 36.70806354647625,
-112.1484375 23.367471303759686,-105.1171875 16.093320185359257,-86.8359375 9.23767076398516,
-82.96875 2.9485268155066175,-82.6171875 -14.812060061226388,-74.8828125 -18.849111862023985,
-77.34375 -47.661687803329166,-84.375 -49.975955187343295,174.7265625 -50.649460483096114,
179.296875 -42.19189902447192,-176.8359375 -35.634976650677295,176.8359375 -31.835565983656227,
163.4765625 -6.528187613695323,152.578125 1.894796132058301,135.703125 4.702353722559447,
127.96875 15.077427674847987,127.96875 23.689804541429606,139.921875 32.06861069132688,
149.4140625 42.65416193033991,159.2578125 48.3160811030533,168.3984375 57.019804336633165,
178.2421875 59.95776046458139,-179.6484375 61.16708631440347,-178.59375 64.83258989321493))'
occurrences.search(geometry = wkt)
# Faceting
## return no occurrence records with limit=0
x = occurrences.search(facet = "country", limit = 0)
x['facets']
## also return occurrence records
x = occurrences.search(facet = "establishmentMeans", limit = 10)
x['facets']
x['results']
## multiple facet variables
x = occurrences.search(facet = ["country", "basisOfRecord"], limit = 10)
x['results']
x['facets']
x['facets']['country']
x['facets']['basisOfRecord']
x['facets']['basisOfRecord']['count']
## set a minimum facet count
x = occurrences.search(facet = "country", facetMincount = 30000000L, limit = 0)
x['facets']
## paging per each faceted variable
### do so by passing in variables like "country" + "_facetLimit" = "country_facetLimit"
### or "country" + "_facetOffset" = "country_facetOffset"
x = occurrences.search(
facet = ["country", "basisOfRecord", "hasCoordinate"],
country_facetLimit = 3,
basisOfRecord_facetLimit = 6,
limit = 0
)
x['facets']
# requests package options
## There's an acceptable set of requests options (['timeout', 'cookies', 'auth',
## 'allow_redirects', 'proxies', 'verify', 'stream', 'cert']) you can pass
## in via **kwargs, e.g., set a timeout. Default timeout set to 60 seconds.
x = occurrences.search(timeout = 1)
"""
url = gbif_baseurl + "occurrence/search"
args = {
"taxonKey": taxonKey,
"repatriated": repatriated,
"kingdomKey": kingdomKey,
"phylumKey": phylumKey,
"classKey": classKey,
"orderKey": orderKey,
"familyKey": familyKey,
"genusKey": genusKey,
"subgenusKey": subgenusKey,
"scientificName": scientificName,
"country": country,
"publishingCountry": publishingCountry,
"hasCoordinate": bool2str(hasCoordinate),
"typeStatus": typeStatus,
"recordNumber": recordNumber,
"lastInterpreted": lastInterpreted,
"continent": continent,
"geometry": geometry,
"recordedBy": recordedBy,
"recordedByID": recordedByID,
"identifiedByID": identifiedByID,
"basisOfRecord": basisOfRecord,
"datasetKey": datasetKey,
"eventDate": eventDate,
"catalogNumber": catalogNumber,
"year": year,
"month": month,
"decimalLatitude": decimalLatitude,
"decimalLongitude": decimalLongitude,
"elevation": elevation,
"depth": depth,
"institutionCode": institutionCode,
"collectionCode": collectionCode,
"hasGeospatialIssue": bool2str(hasGeospatialIssue),
"issue": issue,
"q": q,
"spellCheck": bool2str(spellCheck),
"mediatype": mediatype,
"limit": limit,
"offset": offset,
"establishmentMeans": establishmentMeans,
"facetMincount": facetMincount,
"facet": facet,
"facetMultiselect": bool2str(facetMultiselect),
}
gbif_kwargs = {key: kwargs[key] for key in kwargs if key not in requests_argset}
if gbif_kwargs is not None:
xx = dict(
zip([re.sub("_", ".", x) for x in gbif_kwargs.keys()], gbif_kwargs.values())
)
args.update(xx)
kwargs = {key: kwargs[key] for key in kwargs if key in requests_argset}
out = gbif_GET(url, args, **kwargs)
return out
| 50.844282
| 250
| 0.676748
|
import re
from pygbif.gbifutils import gbif_baseurl, bool2str, requests_argset, gbif_GET
def search(
taxonKey=None,
repatriated=None,
kingdomKey=None,
phylumKey=None,
classKey=None,
orderKey=None,
familyKey=None,
genusKey=None,
subgenusKey=None,
scientificName=None,
country=None,
publishingCountry=None,
hasCoordinate=None,
typeStatus=None,
recordNumber=None,
lastInterpreted=None,
continent=None,
geometry=None,
recordedBy=None,
recordedByID=None,
identifiedByID=None,
basisOfRecord=None,
datasetKey=None,
eventDate=None,
catalogNumber=None,
year=None,
month=None,
decimalLatitude=None,
decimalLongitude=None,
elevation=None,
depth=None,
institutionCode=None,
collectionCode=None,
hasGeospatialIssue=None,
issue=None,
q=None,
spellCheck=None,
mediatype=None,
limit=300,
offset=0,
establishmentMeans=None,
facet=None,
facetMincount=None,
facetMultiselect=None,
timeout=60,
**kwargs
):
url = gbif_baseurl + "occurrence/search"
args = {
"taxonKey": taxonKey,
"repatriated": repatriated,
"kingdomKey": kingdomKey,
"phylumKey": phylumKey,
"classKey": classKey,
"orderKey": orderKey,
"familyKey": familyKey,
"genusKey": genusKey,
"subgenusKey": subgenusKey,
"scientificName": scientificName,
"country": country,
"publishingCountry": publishingCountry,
"hasCoordinate": bool2str(hasCoordinate),
"typeStatus": typeStatus,
"recordNumber": recordNumber,
"lastInterpreted": lastInterpreted,
"continent": continent,
"geometry": geometry,
"recordedBy": recordedBy,
"recordedByID": recordedByID,
"identifiedByID": identifiedByID,
"basisOfRecord": basisOfRecord,
"datasetKey": datasetKey,
"eventDate": eventDate,
"catalogNumber": catalogNumber,
"year": year,
"month": month,
"decimalLatitude": decimalLatitude,
"decimalLongitude": decimalLongitude,
"elevation": elevation,
"depth": depth,
"institutionCode": institutionCode,
"collectionCode": collectionCode,
"hasGeospatialIssue": bool2str(hasGeospatialIssue),
"issue": issue,
"q": q,
"spellCheck": bool2str(spellCheck),
"mediatype": mediatype,
"limit": limit,
"offset": offset,
"establishmentMeans": establishmentMeans,
"facetMincount": facetMincount,
"facet": facet,
"facetMultiselect": bool2str(facetMultiselect),
}
gbif_kwargs = {key: kwargs[key] for key in kwargs if key not in requests_argset}
if gbif_kwargs is not None:
xx = dict(
zip([re.sub("_", ".", x) for x in gbif_kwargs.keys()], gbif_kwargs.values())
)
args.update(xx)
kwargs = {key: kwargs[key] for key in kwargs if key in requests_argset}
out = gbif_GET(url, args, **kwargs)
return out
| true
| true
|
79065068ec60bff32a4fab5b386f44567c3f3a7e
| 52
|
py
|
Python
|
python-example/local.py
|
DavidLeoni/prova-qcb
|
38520ab66b34a145e43ffb0ee808562eae31c325
|
[
"Apache-2.0"
] | null | null | null |
python-example/local.py
|
DavidLeoni/prova-qcb
|
38520ab66b34a145e43ffb0ee808562eae31c325
|
[
"Apache-2.0"
] | 85
|
2017-09-20T12:29:11.000Z
|
2022-02-22T09:42:33.000Z
|
python-example/local.py
|
DavidLeoni/prova-qcb
|
38520ab66b34a145e43ffb0ee808562eae31c325
|
[
"Apache-2.0"
] | 3
|
2020-09-11T11:59:22.000Z
|
2021-07-28T09:09:36.000Z
|
def gimme(x):
print("It was a %s indeed" % x)
| 10.4
| 35
| 0.538462
|
def gimme(x):
print("It was a %s indeed" % x)
| true
| true
|
790652187b5cf6041a3ae071dce885a612225a0f
| 3,309
|
py
|
Python
|
scannerKH/scannerKH/settings.py
|
JanGut/scannerKH
|
1814d2f110af3abdde75e253cab96300701dd531
|
[
"MIT"
] | 1
|
2020-05-27T16:18:43.000Z
|
2020-05-27T16:18:43.000Z
|
scannerKH/scannerKH/settings.py
|
JanGut/scannerKH
|
1814d2f110af3abdde75e253cab96300701dd531
|
[
"MIT"
] | null | null | null |
scannerKH/scannerKH/settings.py
|
JanGut/scannerKH
|
1814d2f110af3abdde75e253cab96300701dd531
|
[
"MIT"
] | null | null | null |
"""
Django settings for scannerKH project.
Generated by 'django-admin startproject' using Django 3.0.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '74lbuyy!_ihecg*uh8i9^j!wq3gc_)vv$55!h&0yon03f2%c$$'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'scanner.apps.ScannerConfig',
'user.apps.UserConfig',
'grosshaendler.apps.GrosshaendlerConfig',
'artikel.apps.ArtikelConfig',
'bestellung.apps.BestellungConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'scannerKH.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'scannerKH.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
AUTH_USER_MODEL = 'user.User'
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'de'
TIME_ZONE = 'CEST'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| 25.651163
| 91
| 0.699003
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = '74lbuyy!_ihecg*uh8i9^j!wq3gc_)vv$55!h&0yon03f2%c$$'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'scanner.apps.ScannerConfig',
'user.apps.UserConfig',
'grosshaendler.apps.GrosshaendlerConfig',
'artikel.apps.ArtikelConfig',
'bestellung.apps.BestellungConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'scannerKH.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'scannerKH.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
AUTH_USER_MODEL = 'user.User'
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'de'
TIME_ZONE = 'CEST'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| true
| true
|
79065290e0d3103c497382cd8b322890f0dd3eb1
| 11,196
|
py
|
Python
|
app.py
|
jtsuchiyama/hawaii-covid-tracker
|
456f63728f6e17208477e7b585e997e8f8b35657
|
[
"MIT"
] | null | null | null |
app.py
|
jtsuchiyama/hawaii-covid-tracker
|
456f63728f6e17208477e7b585e997e8f8b35657
|
[
"MIT"
] | null | null | null |
app.py
|
jtsuchiyama/hawaii-covid-tracker
|
456f63728f6e17208477e7b585e997e8f8b35657
|
[
"MIT"
] | null | null | null |
import os
import time
import requests
from bs4 import BeautifulSoup
import datetime
from twilio.rest import Client
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
import math
class Data():
def __init__(self,link): # Automatically stores the data from parsed link as the object's attribute
self.data = self.update(link)
self.prev = None
def update(self,link): # Parses the site's HTML code
result = requests.get(link)
soup = BeautifulSoup(result.content,'lxml')
return soup
class Hawaii(Data):
def __init__(self,link): # Automatically stores the data from parsed link as the object's attribute (Same constructor as Class Data)
super().__init__(link)
def do_sms(self,numbers): # Creates SMS notification with the COVID data for each island
for number in numbers:
smsNotification.notify(number,self.get_data())
smsNotification.save_number(numbers)
def get_data(self): # Returns the data from today:
# Gathering all the data
today = self.get_dataframe()
order = ["Total cases","Hawai’i","Oahu","Kaua’i","Maui","Pending","Residents diagnosed outside of Hawai‘i","Required Hospitalization","Hawaii deaths","Lanai","Molokai"]
data = today.to_numpy()[0]
message = ""
for index in range(len(order)):
diff = int(data[index+1]) - int(self.prev[index])
if diff >= 0:
diff = "+" + str(diff)
else:
diff = "-" + str(diff)
line = order[index] + ": " + str(data[index+1]) + " (" + diff + ") \n"
message = message + line
return message
def get_dataframe(self): # Returns the data structure for today's data
date = self.get_date()
names = self.data.find_all('span',{'class': 'label'})
values = self.data.find_all('span',{'class': 'value'})
df = pd.DataFrame()
# Formats the names and values
for i in range(len(names)):
names[i] = names[i].text.replace(":","")
values[i] = int(values[i].text.replace("§","").replace("†","").replace("‡","").replace("*","").split(" ")[0])
# Orders the names and values in the order of the .csv
order = ["Total cases","Hawai’i","Oahu","Kaua’i","Maui","Pending","Residents diagnosed outside of Hawai‘i","Required Hospitalization","Hawaii deaths","Lanai","Molokai"]
namesOrdered = ["","","","","","","","","","",""]
valuesOrdered = ["","","","","","","","","","",""]
for i in range(len(order)):
for j in range(len(names)):
if order[i] == names[j]:
namesOrdered[i] = names[j]
valuesOrdered[i] = values[j]
dfNew = pd.DataFrame({
"Date": date,
namesOrdered[0]: valuesOrdered[0],
namesOrdered[1]: valuesOrdered[1],
namesOrdered[2]: valuesOrdered[2],
namesOrdered[3]: valuesOrdered[3],
namesOrdered[4]: valuesOrdered[4],
namesOrdered[5]: valuesOrdered[5],
namesOrdered[6]: valuesOrdered[6],
namesOrdered[7]: valuesOrdered[7],
namesOrdered[8]: valuesOrdered[8],
namesOrdered[9]: valuesOrdered[9],
namesOrdered[10]: valuesOrdered[10],
}, index = [0])
return dfNew
def get_date(self): # Returns the update date of the data in the datetime format
# Formatting
date = self.data.find_all('dd',{'class': 'small'})
date = date[0].text[33:]
date = datetime.datetime.strptime(date, '%B %d, %Y')
date = str(date.date())
return date
def do_update(self): # Does an update if the history.txt is not updated
# If the history.txt is not updated relevant to the available data, the update proceeds
if self.check_update() == False:
# Checks if the data on the website is updated; Loops the program until the data is updated
if self.get_date() != str(datetime.date.today()):
print("Data not updated. Sleeping for 1 minute.\n")
time.sleep(60)
print("Rechecking.\n")
self.do_update()
return
dfOld = pd.read_csv('data.csv', index_col = False)
dfOld = dfOld.append(self.get_dataframe())
dfOld.to_csv('data.csv', index=False)
file = "phoneNumbers.txt"
numbers = open(file,"r")
# Checks if there are any recently saved numbers
if(os.stat(file).st_size) == 0:
print("No recent phone numbers found. Please enter your phone numbers including area code and no dashes into the phoneNumbers.txt file, with each phone number tabbed.")
return
else:
paste=[]
for line in numbers:
paste.append(line.replace("\n",""))
self.do_sms(paste)
def check_update(self): # Checks when the data.csv was last updated; Returns True if already updated today; Returns False if not
file = "data.csv"
history = open(file,'r')
# Checks if the file is empty ahead of time to prevent crash and formats the document if it is empty
if(os.stat(file).st_size) == 0:
File.append_file(file, "Date,Total cases,Hawai’i,Oahu,Kaua’i,Maui,Pending,Residents diagnosed outside of Hawai‘i,Required Hospitalization,Hawaii deaths,Lanai,Molokai")
return False
# Finds the last line in the .txt
for line in history:
pass
lastLine = line
history.close()
# Checks if the last updated date was today
if self.get_date() in lastLine:
return True
# Formats the data from .csv to a Python list
lastLine = lastLine.split(",")
lastLine.pop(0)
self.prev = lastLine
return False
class smsNotification:
@staticmethod
def notify(toNumber,message): # Creates SMS notifications; (IMPORTANT) List your Twilio account sid, auth token, and phone number in the token.txt file by tabbing each token
f = open('token.txt','r')
accountSid, authToken, fromNumber = f.readlines()
accountSid = accountSid.replace("\n","")
authToken = authToken.replace("\n","")
fromNumber = fromNumber.replace("\n","")
client = Client(accountSid, authToken)
client.messages.create(to=toNumber,from_=fromNumber,body=message)
print("SMS sent")
@staticmethod
def save_number(paste): # Saves the recently used phone number on file
numbers = open("phoneNumbers.txt","w")
for number in paste:
numbers.write(str(number) + "\n")
class Graph:
@staticmethod
def display_graph(islands,scope=[],graphType='Cases'): # Displays data in a graph format where islands is a list containing the statistics that should be included, the scope is the time period, and the graph type differentiates between cases vs change in cases
if graphType == 'Cases': # For graphing cases
df = pd.read_csv('data.csv', index_col = False)
else: # For graphing the change in cases
df = App.get_df_change()
if scope[0] == 0: # Adjust the scope to not include the first entry since there is no change observerd on that day
scope[0] = 1
plt.figure(figsize=(8,8))
min_ = -1
max_ = -1
for island in islands: # Plots data for each island on the same plot
plt.plot(df["Date"], df[island], label = island)
if graphType == 'Cases':
if scope != []:
if min_ == - 1 and max_ == -1:
min_ = df[island].get(scope[0])
max_ = df[island].get(scope[1])
else:
minNow = df[island].get(scope[0])
maxNow = df[island].get(scope[1])
if minNow < min_:
min_ = minNow
elif maxNow > max_:
max_ = maxNow
plt.ylim(min_,max_)
title = "COVID Cases vs Time"
if scope != []: # Scales the interval to the scope
intervals = (scope[1]-scope[0])/4
if intervals < 1:
intervals = 1
plt.gca().xaxis.set_major_locator(matplotlib.dates.DayLocator(interval=math.floor(intervals)))
plt.xlim(scope[0],scope[1])
title = title + " (" + df["Date"].get(scope[0]) + " to " + df["Date"].get(scope[1]) + ")" # Title formatting
else:
plt.gca().xaxis.set_major_locator(matplotlib.dates.DayLocator(interval=30)) # Automatically sets the scale if there is no scale
plt.xlabel("Date")
if graphType == 'Cases':
plt.ylabel("# of Cases")
else:
plt.ylabel("Change in Cases")
title = title.replace("COVID Cases","Change in COVID Cases")
plt.title(title)
plt.grid()
plt.legend()
plt.show()
class File:
@staticmethod # Appends the passed file with the passed text
def append_file(file,text):
history = open(file,'a')
history.write(text)
history.close()
class App:
@staticmethod
def get_df(): # Returns the dataframe
return pd.read_csv('data.csv', index_col = False)
def format_date(date): # Receives the data and returns the index of the date
df = pd.read_csv('data.csv', index_col = False)
for x in range(len(df["Date"])):
if df["Date"][x] == date:
return x
def get_last_index(): # Returns the index of the last element in the dataframe
df = pd.read_csv('data.csv', index_col = False)
for index in range(len(df["Date"])):
pass
return index
def get_df_change(): # Returns the change over time dataframe
df = pd.read_csv('data.csv', index_col = False)
dates = df['Date']
dates = pd.DataFrame(dates) # Save datafrmae
df = df.drop(columns=['Date']) # Must drop the dates since the dataframe diff() function will produce an unideal dataframe otherwise
dfDiff = df.diff()
dfDiff = dates.join(dfDiff) # Rejoin dataframes
dfDiff = dfDiff.iloc[1:] # Get rid of bad data from first row
return dfDiff
if __name__ == "__main__":
data=Hawaii("https://health.hawaii.gov/coronavirusdisease2019/")
data.do_update()
lastIndex = App.get_last_index()
firstIndex = lastIndex - 6 # The scope is automatically set to the past 7 days
Graph.display_graph(["Total cases"],[firstIndex,lastIndex],"Change") # Displays total cases over the past seven days
| 36.828947
| 264
| 0.567256
|
import os
import time
import requests
from bs4 import BeautifulSoup
import datetime
from twilio.rest import Client
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
import math
class Data():
def __init__(self,link):
self.data = self.update(link)
self.prev = None
def update(self,link): # Parses the site's HTML code
result = requests.get(link)
soup = BeautifulSoup(result.content,'lxml')
return soup
class Hawaii(Data):
def __init__(self,link):
super().__init__(link)
def do_sms(self,numbers): # Creates SMS notification with the COVID data for each island
for number in numbers:
smsNotification.notify(number,self.get_data())
smsNotification.save_number(numbers)
def get_data(self): # Returns the data from today:
# Gathering all the data
today = self.get_dataframe()
order = ["Total cases","Hawai’i","Oahu","Kaua’i","Maui","Pending","Residents diagnosed outside of Hawai‘i","Required Hospitalization","Hawaii deaths","Lanai","Molokai"]
data = today.to_numpy()[0]
message = ""
for index in range(len(order)):
diff = int(data[index+1]) - int(self.prev[index])
if diff >= 0:
diff = "+" + str(diff)
else:
diff = "-" + str(diff)
line = order[index] + ": " + str(data[index+1]) + " (" + diff + ") \n"
message = message + line
return message
def get_dataframe(self): # Returns the data structure for today's data
date = self.get_date()
names = self.data.find_all('span',{'class': 'label'})
values = self.data.find_all('span',{'class': 'value'})
df = pd.DataFrame()
for i in range(len(names)):
names[i] = names[i].text.replace(":","")
values[i] = int(values[i].text.replace("§","").replace("†","").replace("‡","").replace("*","").split(" ")[0])
order = ["Total cases","Hawai’i","Oahu","Kaua’i","Maui","Pending","Residents diagnosed outside of Hawai‘i","Required Hospitalization","Hawaii deaths","Lanai","Molokai"]
namesOrdered = ["","","","","","","","","","",""]
valuesOrdered = ["","","","","","","","","","",""]
for i in range(len(order)):
for j in range(len(names)):
if order[i] == names[j]:
namesOrdered[i] = names[j]
valuesOrdered[i] = values[j]
dfNew = pd.DataFrame({
"Date": date,
namesOrdered[0]: valuesOrdered[0],
namesOrdered[1]: valuesOrdered[1],
namesOrdered[2]: valuesOrdered[2],
namesOrdered[3]: valuesOrdered[3],
namesOrdered[4]: valuesOrdered[4],
namesOrdered[5]: valuesOrdered[5],
namesOrdered[6]: valuesOrdered[6],
namesOrdered[7]: valuesOrdered[7],
namesOrdered[8]: valuesOrdered[8],
namesOrdered[9]: valuesOrdered[9],
namesOrdered[10]: valuesOrdered[10],
}, index = [0])
return dfNew
def get_date(self):
date = self.data.find_all('dd',{'class': 'small'})
date = date[0].text[33:]
date = datetime.datetime.strptime(date, '%B %d, %Y')
date = str(date.date())
return date
def do_update(self):
if self.check_update() == False:
if self.get_date() != str(datetime.date.today()):
print("Data not updated. Sleeping for 1 minute.\n")
time.sleep(60)
print("Rechecking.\n")
self.do_update()
return
dfOld = pd.read_csv('data.csv', index_col = False)
dfOld = dfOld.append(self.get_dataframe())
dfOld.to_csv('data.csv', index=False)
file = "phoneNumbers.txt"
numbers = open(file,"r")
if(os.stat(file).st_size) == 0:
print("No recent phone numbers found. Please enter your phone numbers including area code and no dashes into the phoneNumbers.txt file, with each phone number tabbed.")
return
else:
paste=[]
for line in numbers:
paste.append(line.replace("\n",""))
self.do_sms(paste)
def check_update(self):
file = "data.csv"
history = open(file,'r')
if(os.stat(file).st_size) == 0:
File.append_file(file, "Date,Total cases,Hawai’i,Oahu,Kaua’i,Maui,Pending,Residents diagnosed outside of Hawai‘i,Required Hospitalization,Hawaii deaths,Lanai,Molokai")
return False
for line in history:
pass
lastLine = line
history.close()
if self.get_date() in lastLine:
return True
lastLine = lastLine.split(",")
lastLine.pop(0)
self.prev = lastLine
return False
class smsNotification:
@staticmethod
def notify(toNumber,message):
f = open('token.txt','r')
accountSid, authToken, fromNumber = f.readlines()
accountSid = accountSid.replace("\n","")
authToken = authToken.replace("\n","")
fromNumber = fromNumber.replace("\n","")
client = Client(accountSid, authToken)
client.messages.create(to=toNumber,from_=fromNumber,body=message)
print("SMS sent")
@staticmethod
def save_number(paste):
numbers = open("phoneNumbers.txt","w")
for number in paste:
numbers.write(str(number) + "\n")
class Graph:
@staticmethod
def display_graph(islands,scope=[],graphType='Cases'):
if graphType == 'Cases':
df = pd.read_csv('data.csv', index_col = False)
else:
df = App.get_df_change()
if scope[0] == 0:
scope[0] = 1
plt.figure(figsize=(8,8))
min_ = -1
max_ = -1
for island in islands:
plt.plot(df["Date"], df[island], label = island)
if graphType == 'Cases':
if scope != []:
if min_ == - 1 and max_ == -1:
min_ = df[island].get(scope[0])
max_ = df[island].get(scope[1])
else:
minNow = df[island].get(scope[0])
maxNow = df[island].get(scope[1])
if minNow < min_:
min_ = minNow
elif maxNow > max_:
max_ = maxNow
plt.ylim(min_,max_)
title = "COVID Cases vs Time"
if scope != []:
intervals = (scope[1]-scope[0])/4
if intervals < 1:
intervals = 1
plt.gca().xaxis.set_major_locator(matplotlib.dates.DayLocator(interval=math.floor(intervals)))
plt.xlim(scope[0],scope[1])
title = title + " (" + df["Date"].get(scope[0]) + " to " + df["Date"].get(scope[1]) + ")"
else:
plt.gca().xaxis.set_major_locator(matplotlib.dates.DayLocator(interval=30))
plt.xlabel("Date")
if graphType == 'Cases':
plt.ylabel("# of Cases")
else:
plt.ylabel("Change in Cases")
title = title.replace("COVID Cases","Change in COVID Cases")
plt.title(title)
plt.grid()
plt.legend()
plt.show()
class File:
@staticmethod
def append_file(file,text):
history = open(file,'a')
history.write(text)
history.close()
class App:
@staticmethod
def get_df():
return pd.read_csv('data.csv', index_col = False)
def format_date(date):
df = pd.read_csv('data.csv', index_col = False)
for x in range(len(df["Date"])):
if df["Date"][x] == date:
return x
def get_last_index():
df = pd.read_csv('data.csv', index_col = False)
for index in range(len(df["Date"])):
pass
return index
def get_df_change():
df = pd.read_csv('data.csv', index_col = False)
dates = df['Date']
dates = pd.DataFrame(dates)
df = df.drop(columns=['Date'])
dfDiff = df.diff()
dfDiff = dates.join(dfDiff)
dfDiff = dfDiff.iloc[1:]
return dfDiff
if __name__ == "__main__":
data=Hawaii("https://health.hawaii.gov/coronavirusdisease2019/")
data.do_update()
lastIndex = App.get_last_index()
firstIndex = lastIndex - 6
Graph.display_graph(["Total cases"],[firstIndex,lastIndex],"Change")
| true
| true
|
790653625db722369992146051dd80a8d2df7b86
| 10,465
|
py
|
Python
|
voc_classifier/metrics_for_multilabel.py
|
myeonghak/kobert-multi-label-VOC-classifier
|
983524e8331b5e833d85779dfe7521c21bf2d1cd
|
[
"Apache-2.0"
] | 6
|
2021-08-18T00:52:38.000Z
|
2021-12-03T12:37:18.000Z
|
voc_classifier/metrics_for_multilabel.py
|
myeonghak/kobert-multi-label-VOC-classifier
|
983524e8331b5e833d85779dfe7521c21bf2d1cd
|
[
"Apache-2.0"
] | null | null | null |
voc_classifier/metrics_for_multilabel.py
|
myeonghak/kobert-multi-label-VOC-classifier
|
983524e8331b5e833d85779dfe7521c21bf2d1cd
|
[
"Apache-2.0"
] | 1
|
2022-03-24T08:02:44.000Z
|
2022-03-24T08:02:44.000Z
|
# https://github.com/iliaschalkidis/lmtc-eurlex57k/blob/master/metrics.py
from sklearn.metrics import accuracy_score
from sklearn.metrics import precision_score
from sklearn.metrics import recall_score
from sklearn.metrics import f1_score
import numpy as np
def mean_precision_k(y_true, y_score, k=10):
"""Mean precision at rank k
Parameters
----------
y_true : array-like, shape = [n_samples]
Ground truth (true relevance labels).
y_score : array-like, shape = [n_samples]
Predicted scores.
k : int
Rank.
Returns
-------
mean precision @k : float
"""
p_ks = []
for y_t, y_s in zip(y_true, y_score):
if np.sum(y_t == 1):
p_ks.append(ranking_precision_score(y_t, y_s, k=k))
return np.mean(p_ks)
def mean_recall_k(y_true, y_score, k=10):
"""Mean recall at rank k
Parameters
----------
y_true : array-like, shape = [n_samples]
Ground truth (true relevance labels).
y_score : array-like, shape = [n_samples]
Predicted scores.
k : int
Rank.
Returns
-------
mean recall @k : float
"""
r_ks = []
for y_t, y_s in zip(y_true, y_score):
if np.sum(y_t == 1):
r_ks.append(ranking_recall_score(y_t, y_s, k=k))
return np.mean(r_ks)
def mean_ndcg_score(y_true, y_score, k=10, gains="exponential"):
"""Normalized discounted cumulative gain (NDCG) at rank k
Parameters
----------
y_true : array-like, shape = [n_samples]
Ground truth (true relevance labels).
y_score : array-like, shape = [n_samples]
Predicted scores.
k : int
Rank.
gains : str
Whether gains should be "exponential" (default) or "linear".
Returns
-------
Mean NDCG @k : float
"""
ndcg_s = []
for y_t, y_s in zip(y_true, y_score):
if np.sum(y_t == 1):
ndcg_s.append(ndcg_score(y_t, y_s, k=k, gains=gains))
return np.mean(ndcg_s)
def mean_rprecision_k(y_true, y_score, k=10):
"""Mean precision at rank k
Parameters
----------
y_true : array-like, shape = [n_samples]
Ground truth (true relevance labels).
y_score : array-like, shape = [n_samples]
Predicted scores.
k : int
Rank.
Returns
-------
mean precision @k : float
"""
p_ks = []
for y_t, y_s in zip(y_true, y_score):
if np.sum(y_t == 1):
p_ks.append(ranking_rprecision_score(y_t, y_s, k=k))
return np.mean(p_ks)
def ranking_recall_score(y_true, y_score, k=10):
# https://ils.unc.edu/courses/2013_spring/inls509_001/lectures/10-EvaluationMetrics.pdf
"""Recall at rank k
Parameters
----------
y_true : array-like, shape = [n_samples]
Ground truth (true relevance labels).
y_score : array-like, shape = [n_samples]
Predicted scores.
k : int
Rank.
Returns
-------
precision @k : float
"""
unique_y = np.unique(y_true)
if len(unique_y) == 1:
return ValueError("The score cannot be approximated.")
elif len(unique_y) > 2:
raise ValueError("Only supported for two relevance levels.")
pos_label = unique_y[1]
n_pos = np.sum(y_true == pos_label)
order = np.argsort(y_score)[::-1]
y_true = np.take(y_true, order[:k])
n_relevant = np.sum(y_true == pos_label)
return float(n_relevant) / n_pos
def ranking_precision_score(y_true, y_score, k=10):
"""Precision at rank k
Parameters
----------
y_true : array-like, shape = [n_samples]
Ground truth (true relevance labels).
y_score : array-like, shape = [n_samples]
Predicted scores.
k : int
Rank.
Returns
-------
precision @k : float
"""
unique_y = np.unique(y_true)
if len(unique_y) == 1:
return ValueError("The score cannot be approximated.")
elif len(unique_y) > 2:
raise ValueError("Only supported for two relevance levels.")
pos_label = unique_y[1]
order = np.argsort(y_score)[::-1]
y_true = np.take(y_true, order[:k])
n_relevant = np.sum(y_true == pos_label)
return float(n_relevant) / k
def ranking_rprecision_score(y_true, y_score, k=10):
"""Precision at rank k
Parameters
----------
y_true : array-like, shape = [n_samples]
Ground truth (true relevance labels).
y_score : array-like, shape = [n_samples]
Predicted scores.
k : int
Rank.
Returns
-------
precision @k : float
"""
unique_y = np.unique(y_true)
if len(unique_y) == 1:
return ValueError("The score cannot be approximated.")
elif len(unique_y) > 2:
raise ValueError("Only supported for two relevance levels.")
pos_label = unique_y[1]
n_pos = np.sum(y_true == pos_label)
order = np.argsort(y_score)[::-1]
y_true = np.take(y_true, order[:k])
n_relevant = np.sum(y_true == pos_label)
# Divide by min(n_pos, k) such that the best achievable score is always 1.0.
return float(n_relevant) / min(k, n_pos)
def average_precision_score(y_true, y_score, k=10):
"""Average precision at rank k
Parameters
----------
y_true : array-like, shape = [n_samples]
Ground truth (true relevance labels).
y_score : array-like, shape = [n_samples]
Predicted scores.
k : int
Rank.
Returns
-------
average precision @k : float
"""
unique_y = np.unique(y_true)
if len(unique_y) == 1:
return ValueError("The score cannot be approximated.")
elif len(unique_y) > 2:
raise ValueError("Only supported for two relevance levels.")
pos_label = unique_y[1]
n_pos = np.sum(y_true == pos_label)
order = np.argsort(y_score)[::-1][:min(n_pos, k)]
y_true = np.asarray(y_true)[order]
score = 0
for i in range(len(y_true)):
if y_true[i] == pos_label:
# Compute precision up to document i
# i.e, percentage of relevant documents up to document i.
prec = 0
for j in range(0, i + 1):
if y_true[j] == pos_label:
prec += 1.0
prec /= (i + 1.0)
score += prec
if n_pos == 0:
return 0
return score / n_pos
def dcg_score(y_true, y_score, k=10, gains="exponential"):
"""Discounted cumulative gain (DCG) at rank k
Parameters
----------
y_true : array-like, shape = [n_samples]
Ground truth (true relevance labels).
y_score : array-like, shape = [n_samples]
Predicted scores.
k : int
Rank.
gains : str
Whether gains should be "exponential" (default) or "linear".
Returns
-------
DCG @k : float
"""
order = np.argsort(y_score)[::-1]
y_true = np.take(y_true, order[:k])
if gains == "exponential":
gains = 2 ** y_true - 1
elif gains == "linear":
gains = y_true
else:
raise ValueError("Invalid gains option.")
# highest rank is 1 so +2 instead of +1
discounts = np.log2(np.arange(len(y_true)) + 2)
return np.sum(gains / discounts)
def ndcg_score(y_true, y_score, k=10, gains="exponential"):
"""Normalized discounted cumulative gain (NDCG) at rank k
Parameters
----------
y_true : array-like, shape = [n_samples]
Ground truth (true relevance labels).
y_score : array-like, shape = [n_samples]
Predicted scores.
k : int
Rank.
gains : str
Whether gains should be "exponential" (default) or "linear".
Returns
-------
NDCG @k : float
"""
best = dcg_score(y_true, y_true, k, gains)
actual = dcg_score(y_true, y_score, k, gains)
return actual / best
# Alternative API.
def dcg_from_ranking(y_true, ranking):
"""Discounted cumulative gain (DCG) at rank k
Parameters
----------
y_true : array-like, shape = [n_samples]
Ground truth (true relevance labels).
ranking : array-like, shape = [k]
Document indices, i.e.,
ranking[0] is the index of top-ranked document,
ranking[1] is the index of second-ranked document,
...
k : int
Rank.
Returns
-------
DCG @k : float
"""
y_true = np.asarray(y_true)
ranking = np.asarray(ranking)
rel = y_true[ranking]
gains = 2 ** rel - 1
discounts = np.log2(np.arange(len(ranking)) + 2)
return np.sum(gains / discounts)
def ndcg_from_ranking(y_true, ranking):
"""Normalized discounted cumulative gain (NDCG) at rank k
Parameters
----------
y_true : array-like, shape = [n_samples]
Ground truth (true relevance labels).
ranking : array-like, shape = [k]
Document indices, i.e.,
ranking[0] is the index of top-ranked document,
ranking[1] is the index of second-ranked document,
...
k : int
Rank.
Returns
-------
NDCG @k : float
"""
k = len(ranking)
best_ranking = np.argsort(y_true)[::-1]
best = dcg_from_ranking(y_true, best_ranking[:k])
return dcg_from_ranking(y_true, ranking) / best
def colwise_accuracy(y_true,y_pred):
y_pred=y_pred.T
y_true=y_true.T
acc_list=[]
for cate in range(0,y_pred.shape[0]):
acc_list.append(accuracy_score(y_pred[cate],y_true[cate]))
return sum(acc_list)/len(acc_list)
def calculate_metrics(pred, target, threshold=0.5):
pred = np.array(pred > threshold, dtype=float)
return {'Accuracy': accuracy_score(y_true=target, y_pred=pred),
'Column-wise Accuracy': colwise_accuracy(y_true=target, y_pred=pred),
'micro/precision': precision_score(y_true=target, y_pred=pred, average='micro'),
'micro/recall': recall_score(y_true=target, y_pred=pred, average='micro'),
'micro/f1': f1_score(y_true=target, y_pred=pred, average='micro'),
'macro/precision': precision_score(y_true=target, y_pred=pred, average='macro'),
'macro/recall': recall_score(y_true=target, y_pred=pred, average='macro'),
'macro/f1': f1_score(y_true=target, y_pred=pred, average='macro'),
'samples/precision': precision_score(y_true=target, y_pred=pred, average='samples'),
'samples/recall': recall_score(y_true=target, y_pred=pred, average='samples'),
'samples/f1': f1_score(y_true=target, y_pred=pred, average='samples'),
}
| 28.360434
| 96
| 0.602389
|
from sklearn.metrics import accuracy_score
from sklearn.metrics import precision_score
from sklearn.metrics import recall_score
from sklearn.metrics import f1_score
import numpy as np
def mean_precision_k(y_true, y_score, k=10):
p_ks = []
for y_t, y_s in zip(y_true, y_score):
if np.sum(y_t == 1):
p_ks.append(ranking_precision_score(y_t, y_s, k=k))
return np.mean(p_ks)
def mean_recall_k(y_true, y_score, k=10):
r_ks = []
for y_t, y_s in zip(y_true, y_score):
if np.sum(y_t == 1):
r_ks.append(ranking_recall_score(y_t, y_s, k=k))
return np.mean(r_ks)
def mean_ndcg_score(y_true, y_score, k=10, gains="exponential"):
ndcg_s = []
for y_t, y_s in zip(y_true, y_score):
if np.sum(y_t == 1):
ndcg_s.append(ndcg_score(y_t, y_s, k=k, gains=gains))
return np.mean(ndcg_s)
def mean_rprecision_k(y_true, y_score, k=10):
p_ks = []
for y_t, y_s in zip(y_true, y_score):
if np.sum(y_t == 1):
p_ks.append(ranking_rprecision_score(y_t, y_s, k=k))
return np.mean(p_ks)
def ranking_recall_score(y_true, y_score, k=10):
unique_y = np.unique(y_true)
if len(unique_y) == 1:
return ValueError("The score cannot be approximated.")
elif len(unique_y) > 2:
raise ValueError("Only supported for two relevance levels.")
pos_label = unique_y[1]
n_pos = np.sum(y_true == pos_label)
order = np.argsort(y_score)[::-1]
y_true = np.take(y_true, order[:k])
n_relevant = np.sum(y_true == pos_label)
return float(n_relevant) / n_pos
def ranking_precision_score(y_true, y_score, k=10):
unique_y = np.unique(y_true)
if len(unique_y) == 1:
return ValueError("The score cannot be approximated.")
elif len(unique_y) > 2:
raise ValueError("Only supported for two relevance levels.")
pos_label = unique_y[1]
order = np.argsort(y_score)[::-1]
y_true = np.take(y_true, order[:k])
n_relevant = np.sum(y_true == pos_label)
return float(n_relevant) / k
def ranking_rprecision_score(y_true, y_score, k=10):
unique_y = np.unique(y_true)
if len(unique_y) == 1:
return ValueError("The score cannot be approximated.")
elif len(unique_y) > 2:
raise ValueError("Only supported for two relevance levels.")
pos_label = unique_y[1]
n_pos = np.sum(y_true == pos_label)
order = np.argsort(y_score)[::-1]
y_true = np.take(y_true, order[:k])
n_relevant = np.sum(y_true == pos_label)
return float(n_relevant) / min(k, n_pos)
def average_precision_score(y_true, y_score, k=10):
unique_y = np.unique(y_true)
if len(unique_y) == 1:
return ValueError("The score cannot be approximated.")
elif len(unique_y) > 2:
raise ValueError("Only supported for two relevance levels.")
pos_label = unique_y[1]
n_pos = np.sum(y_true == pos_label)
order = np.argsort(y_score)[::-1][:min(n_pos, k)]
y_true = np.asarray(y_true)[order]
score = 0
for i in range(len(y_true)):
if y_true[i] == pos_label:
prec = 0
for j in range(0, i + 1):
if y_true[j] == pos_label:
prec += 1.0
prec /= (i + 1.0)
score += prec
if n_pos == 0:
return 0
return score / n_pos
def dcg_score(y_true, y_score, k=10, gains="exponential"):
order = np.argsort(y_score)[::-1]
y_true = np.take(y_true, order[:k])
if gains == "exponential":
gains = 2 ** y_true - 1
elif gains == "linear":
gains = y_true
else:
raise ValueError("Invalid gains option.")
discounts = np.log2(np.arange(len(y_true)) + 2)
return np.sum(gains / discounts)
def ndcg_score(y_true, y_score, k=10, gains="exponential"):
best = dcg_score(y_true, y_true, k, gains)
actual = dcg_score(y_true, y_score, k, gains)
return actual / best
def dcg_from_ranking(y_true, ranking):
y_true = np.asarray(y_true)
ranking = np.asarray(ranking)
rel = y_true[ranking]
gains = 2 ** rel - 1
discounts = np.log2(np.arange(len(ranking)) + 2)
return np.sum(gains / discounts)
def ndcg_from_ranking(y_true, ranking):
k = len(ranking)
best_ranking = np.argsort(y_true)[::-1]
best = dcg_from_ranking(y_true, best_ranking[:k])
return dcg_from_ranking(y_true, ranking) / best
def colwise_accuracy(y_true,y_pred):
y_pred=y_pred.T
y_true=y_true.T
acc_list=[]
for cate in range(0,y_pred.shape[0]):
acc_list.append(accuracy_score(y_pred[cate],y_true[cate]))
return sum(acc_list)/len(acc_list)
def calculate_metrics(pred, target, threshold=0.5):
pred = np.array(pred > threshold, dtype=float)
return {'Accuracy': accuracy_score(y_true=target, y_pred=pred),
'Column-wise Accuracy': colwise_accuracy(y_true=target, y_pred=pred),
'micro/precision': precision_score(y_true=target, y_pred=pred, average='micro'),
'micro/recall': recall_score(y_true=target, y_pred=pred, average='micro'),
'micro/f1': f1_score(y_true=target, y_pred=pred, average='micro'),
'macro/precision': precision_score(y_true=target, y_pred=pred, average='macro'),
'macro/recall': recall_score(y_true=target, y_pred=pred, average='macro'),
'macro/f1': f1_score(y_true=target, y_pred=pred, average='macro'),
'samples/precision': precision_score(y_true=target, y_pred=pred, average='samples'),
'samples/recall': recall_score(y_true=target, y_pred=pred, average='samples'),
'samples/f1': f1_score(y_true=target, y_pred=pred, average='samples'),
}
| true
| true
|
79065452ef9d0ced2e62447f9f620825073dedd2
| 154
|
py
|
Python
|
src/models/HostsModel.py
|
xrzhev/abysswatcher-api
|
e387127c6ecffb8ff820ca7657e0e25a9ba98ca8
|
[
"MIT"
] | null | null | null |
src/models/HostsModel.py
|
xrzhev/abysswatcher-api
|
e387127c6ecffb8ff820ca7657e0e25a9ba98ca8
|
[
"MIT"
] | null | null | null |
src/models/HostsModel.py
|
xrzhev/abysswatcher-api
|
e387127c6ecffb8ff820ca7657e0e25a9ba98ca8
|
[
"MIT"
] | null | null | null |
from pydantic import BaseModel, HttpUrl
from typing import List
class RegisterHostModel(BaseModel):
name: str
url: HttpUrl
ports: List[int]
| 19.25
| 39
| 0.74026
|
from pydantic import BaseModel, HttpUrl
from typing import List
class RegisterHostModel(BaseModel):
name: str
url: HttpUrl
ports: List[int]
| true
| true
|
790654c3c78d2d88e919b53b5edf4132bdfff07f
| 1,271
|
py
|
Python
|
pysc2/bin/battle_net_maps.py
|
rainwangphy/pysc2
|
9e3e5af7ba02b65ade3b902d830770cfa24d7c74
|
[
"Apache-2.0"
] | null | null | null |
pysc2/bin/battle_net_maps.py
|
rainwangphy/pysc2
|
9e3e5af7ba02b65ade3b902d830770cfa24d7c74
|
[
"Apache-2.0"
] | null | null | null |
pysc2/bin/battle_net_maps.py
|
rainwangphy/pysc2
|
9e3e5af7ba02b65ade3b902d830770cfa24d7c74
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Print the list of available maps according to the game."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from pysc2 import run_configs
def main(unused_argv):
with run_configs.get().start(want_rgb=False) as controller:
available_maps = controller.available_maps()
print("\n")
print("Local map paths:")
for m in sorted(available_maps.local_map_paths):
print(" ", m)
print()
print("Battle.net maps:")
for m in sorted(available_maps.battlenet_map_names):
print(" ", m)
if __name__ == "__main__":
app.run(main)
| 31
| 74
| 0.730921
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from pysc2 import run_configs
def main(unused_argv):
with run_configs.get().start(want_rgb=False) as controller:
available_maps = controller.available_maps()
print("\n")
print("Local map paths:")
for m in sorted(available_maps.local_map_paths):
print(" ", m)
print()
print("Battle.net maps:")
for m in sorted(available_maps.battlenet_map_names):
print(" ", m)
if __name__ == "__main__":
app.run(main)
| true
| true
|
79065532d452f966f3cc40595e26fdfe54efe0f5
| 7,380
|
py
|
Python
|
imageLoader.py
|
gitpharm01/Parapose
|
220f3af30011e1dd7c0d5f20660a1dd01eab63db
|
[
"Apache-2.0"
] | null | null | null |
imageLoader.py
|
gitpharm01/Parapose
|
220f3af30011e1dd7c0d5f20660a1dd01eab63db
|
[
"Apache-2.0"
] | null | null | null |
imageLoader.py
|
gitpharm01/Parapose
|
220f3af30011e1dd7c0d5f20660a1dd01eab63db
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import random
import os
import json
import math
import cv2
def getPaddedROI(img, center_x, center_y, width, height):
#print(str(int(center_x)) + "," + str(int(center_y)))
paddingColor = [0,0,0]
top_left_x = center_x - int(width/2)-1
#print("top_left_x:")
#print(top_left_x)
top_left_y = center_y - int(height/2)-1
#print("top_left_y:")
#print(top_left_y)
bottom_right_x = center_x + int(width/2)
bottom_right_y = center_y + int(height/2)
#print ("bottom_right_x / y")
#print(str(bottom_right_x) + " / " + str(bottom_right_y))
img_height = np.size(img, 0)
img_width = np.size(img, 1)
if(top_left_x <0 or top_left_y <0 or bottom_right_x >img_width or bottom_right_y > img_height):
#border padding needed
border_left = 0
border_right = 0
border_top= 0
border_bottom= 0
if(top_left_x < 0):
width = width + top_left_x
border_left = -1 * top_left_x
top_left_x = 0
if(top_left_y < 0):
height = height + top_left_y
border_top = -1 * top_left_y
top_left_y = 0
if(bottom_right_x > img_width):
width = width -(bottom_right_x - img_width)
border_right = bottom_right_x - img_width
if(bottom_right_y> img_height):
height = height -(bottom_right_y - img_height)
border_bottom = bottom_right_y - img_height
#print(border_left)
#print(border_right)
#print(border_top)
#print(border_bottom)
img_roi = img[top_left_y : bottom_right_y ,top_left_x : bottom_right_x ]
#cv2.imshow("originalROI",img_roi)
img_roi = cv2.copyMakeBorder(img_roi, border_top,border_bottom,border_left, border_right, cv2.BORDER_CONSTANT,value=paddingColor)
else:
img_roi = img[top_left_y : bottom_right_y ,top_left_x : bottom_right_x ]
return img_roi
#similarity map converter
#convert 16 target ground truth label(coordinates) into 16 Distance maps
#Each map have value '0' on the kepoint and '32'(according to the length of the generated Hash codes) on non-keypoint areas
def make_heatmap(emptymap ,joint_idx, point, sigma):
point_x,point_y = point
_, height, width = emptymap.shape[:3]
th= 4.605
delta = math.sqrt(th * 2)
x0 = int(max(0, point_x - delta * sigma))
y0 = int(max(0, point_y - delta * sigma))
x1 = int(min(width, point_x + delta * sigma))
y1 = int(min(height, point_y + delta * sigma))
for y in range(y0,y1):
for x in range(x0,x1):
d = (x - point_x)**2 + (y - point_y)**2
exp = d / 2.0 / sigma / sigma
if exp > th:
continue
emptymap[joint_idx][y][x] = max (emptymap[joint_idx][y][x], math.exp(-exp))
emptymap[joint_idx][y][x] = min (emptymap[joint_idx][y][x], 1.0)
def training_data_feeder(joint_data_path, train_val_path, imgpath, input_size, hint_roi_size):
#load trainvalset data,
train_val = open(train_val_path).readlines()
train_groups = json.loads(train_val[0].strip())["train_set"]
#print(train_groups)
#load one of train set indecies
index = random.choice(train_groups)
#print(index)
#create path object to the image directory( index "0" to dir_name "001")
dir_name = str(index+1)
if((index+1) < 100):
dir_name ="0"+ dir_name
if((index+1) < 10):
dir_name = "0" + dir_name
#print(dir_name)
dir_path = imgpath + dir_name + "/"
#print(dir_path)
#ramdomly load three images, get file names
#from "sample_names" will load first two names as h_img1 h_iimg2, third name as t_img
file_list = []
for file in os.listdir(dir_path):
if len(file) > 5:
file_list.append(file)
#print(file_list)
#print("selected: ")
sample_name = random.sample(file_list, 3)
#print(sample_name)
#load image files
h_img1 = cv2.imread(dir_path + sample_name[0])
h_img2 = cv2.imread(dir_path + sample_name[1])
t_img = cv2.imread(dir_path + sample_name[2])
#load corresponding joint data as labels
h_label1 = []
h_label2 = []
t_label = []
label_data = open(joint_data_path).readlines()
for i in range( len(label_data)):
datum = json.loads(label_data[i].strip())
if(datum["filename"] == sample_name[0]):
for joint in datum["joint_pos"]:
h_label1.append(joint[1])
#print(h_label1)
elif(datum["filename"] == sample_name[1]):
for joint in datum["joint_pos"]:
h_label2.append(joint[1])
elif(datum["filename"] == sample_name[2]):
for joint in datum["joint_pos"]:
t_label.append(joint[1])
#resize the two images and get resize ratios
resize_ratioh1 = (input_size / h_img1.shape[1] , input_size / h_img1.shape[0])
resize_ratioh2 = (input_size / h_img2.shape[1] , input_size / h_img2.shape[0])
resize_ratiot = (1 / t_img.shape[1] , 1 / t_img.shape[0])
h_img1= cv2.resize(h_img1,(input_size,input_size))
h_img2= cv2.resize(h_img2,(input_size,input_size))
t_img = cv2.resize(t_img,(input_size,input_size))
#Convert the joint position according to the resize ratios
#crop rois from two hint images to get the hintsets
#img_point = None
hintSet01 = []
hintSet02 = []
for joint in h_label1:
joint[0] = joint[0]*resize_ratioh1[0]
joint[1] = joint[1]*resize_ratioh1[1]
for i in range(len(h_label1)):
tmp = getPaddedROI(h_img1, int(h_label1[i][0]), int(h_label1[i][1]), hint_roi_size, hint_roi_size)
hintSet01.append(tmp)
#cv2.imshow("tmp",tmp)
#cv2.imshow("h_img1",h_img1)
#for tmp in hintSet01:
# cv2.imshow("tmp",tmp)
# cv2.waitKey(0)
for joint in h_label2:
joint[0] = joint[0]*resize_ratioh2[0]
joint[1] = joint[1]*resize_ratioh2[1]
for i in range(len(h_label2)):
tmp = getPaddedROI(h_img2, int(h_label2[i][0]), int(h_label2[i][1]), hint_roi_size, hint_roi_size)
hintSet02.append(tmp)
#Normalize the value by dividing with input_size
#
joint_idx = 0
heatmap = np.zeros((16, 76, 76) , dtype = np.float32)
for joint in t_label:
point =[ joint[0]*resize_ratiot[0] * 76, joint[1]*resize_ratiot[1] *76 ]
make_heatmap(heatmap, joint_idx, point, 1) #sigma = 1
joint_idx +=1
heatmap = 1 - heatmap
return hintSet01, hintSet02, t_img, heatmap
#cv2.imshow("img_point",img_point)
#cv2.waitKey(0)
#cv2.imshow("h_img1",h_img1)
#cv2.imshow("h_img2",h_img2)
#cv2.imshow("t_img",t_img)
#cv2.waitKey(0)
#define sub function crop roi
#return roi*16
#crop rois x 2 times to get 2 hintsets
#return hintset01,hintset02,target image, target label
#joint_data_path = "./custom_data.json"
#train_val_path = "./train_val_indices.json"
#imgpath = "./000/"
#input_size = 400
#hint_roi = 14
#hintSet01,hintSet02,t_img, heatmap = training_data_feeder(joint_data_path, train_val_path, imgpath, input_size, hint_roi )
#print(np.shape(heatmap))
#cv2.imshow('target_image',t_img)
#for i in range(16):
# cv2.imshow('heat map',heatmap[i])
# cv2.waitKey(0)
| 35.311005
| 137
| 0.628862
|
import numpy as np
import random
import os
import json
import math
import cv2
def getPaddedROI(img, center_x, center_y, width, height):
paddingColor = [0,0,0]
top_left_x = center_x - int(width/2)-1
top_left_y = center_y - int(height/2)-1
bottom_right_x = center_x + int(width/2)
bottom_right_y = center_y + int(height/2)
img_height = np.size(img, 0)
img_width = np.size(img, 1)
if(top_left_x <0 or top_left_y <0 or bottom_right_x >img_width or bottom_right_y > img_height):
border_left = 0
border_right = 0
border_top= 0
border_bottom= 0
if(top_left_x < 0):
width = width + top_left_x
border_left = -1 * top_left_x
top_left_x = 0
if(top_left_y < 0):
height = height + top_left_y
border_top = -1 * top_left_y
top_left_y = 0
if(bottom_right_x > img_width):
width = width -(bottom_right_x - img_width)
border_right = bottom_right_x - img_width
if(bottom_right_y> img_height):
height = height -(bottom_right_y - img_height)
border_bottom = bottom_right_y - img_height
img_roi = img[top_left_y : bottom_right_y ,top_left_x : bottom_right_x ]
img_roi = cv2.copyMakeBorder(img_roi, border_top,border_bottom,border_left, border_right, cv2.BORDER_CONSTANT,value=paddingColor)
else:
img_roi = img[top_left_y : bottom_right_y ,top_left_x : bottom_right_x ]
return img_roi
def make_heatmap(emptymap ,joint_idx, point, sigma):
point_x,point_y = point
_, height, width = emptymap.shape[:3]
th= 4.605
delta = math.sqrt(th * 2)
x0 = int(max(0, point_x - delta * sigma))
y0 = int(max(0, point_y - delta * sigma))
x1 = int(min(width, point_x + delta * sigma))
y1 = int(min(height, point_y + delta * sigma))
for y in range(y0,y1):
for x in range(x0,x1):
d = (x - point_x)**2 + (y - point_y)**2
exp = d / 2.0 / sigma / sigma
if exp > th:
continue
emptymap[joint_idx][y][x] = max (emptymap[joint_idx][y][x], math.exp(-exp))
emptymap[joint_idx][y][x] = min (emptymap[joint_idx][y][x], 1.0)
def training_data_feeder(joint_data_path, train_val_path, imgpath, input_size, hint_roi_size):
train_val = open(train_val_path).readlines()
train_groups = json.loads(train_val[0].strip())["train_set"]
index = random.choice(train_groups)
dir_name = str(index+1)
if((index+1) < 100):
dir_name ="0"+ dir_name
if((index+1) < 10):
dir_name = "0" + dir_name
dir_path = imgpath + dir_name + "/"
file_list = []
for file in os.listdir(dir_path):
if len(file) > 5:
file_list.append(file)
sample_name = random.sample(file_list, 3)
h_img1 = cv2.imread(dir_path + sample_name[0])
h_img2 = cv2.imread(dir_path + sample_name[1])
t_img = cv2.imread(dir_path + sample_name[2])
h_label1 = []
h_label2 = []
t_label = []
label_data = open(joint_data_path).readlines()
for i in range( len(label_data)):
datum = json.loads(label_data[i].strip())
if(datum["filename"] == sample_name[0]):
for joint in datum["joint_pos"]:
h_label1.append(joint[1])
elif(datum["filename"] == sample_name[1]):
for joint in datum["joint_pos"]:
h_label2.append(joint[1])
elif(datum["filename"] == sample_name[2]):
for joint in datum["joint_pos"]:
t_label.append(joint[1])
resize_ratioh1 = (input_size / h_img1.shape[1] , input_size / h_img1.shape[0])
resize_ratioh2 = (input_size / h_img2.shape[1] , input_size / h_img2.shape[0])
resize_ratiot = (1 / t_img.shape[1] , 1 / t_img.shape[0])
h_img1= cv2.resize(h_img1,(input_size,input_size))
h_img2= cv2.resize(h_img2,(input_size,input_size))
t_img = cv2.resize(t_img,(input_size,input_size))
hintSet01 = []
hintSet02 = []
for joint in h_label1:
joint[0] = joint[0]*resize_ratioh1[0]
joint[1] = joint[1]*resize_ratioh1[1]
for i in range(len(h_label1)):
tmp = getPaddedROI(h_img1, int(h_label1[i][0]), int(h_label1[i][1]), hint_roi_size, hint_roi_size)
hintSet01.append(tmp)
for joint in h_label2:
joint[0] = joint[0]*resize_ratioh2[0]
joint[1] = joint[1]*resize_ratioh2[1]
for i in range(len(h_label2)):
tmp = getPaddedROI(h_img2, int(h_label2[i][0]), int(h_label2[i][1]), hint_roi_size, hint_roi_size)
hintSet02.append(tmp)
joint_idx = 0
heatmap = np.zeros((16, 76, 76) , dtype = np.float32)
for joint in t_label:
point =[ joint[0]*resize_ratiot[0] * 76, joint[1]*resize_ratiot[1] *76 ]
make_heatmap(heatmap, joint_idx, point, 1)
joint_idx +=1
heatmap = 1 - heatmap
return hintSet01, hintSet02, t_img, heatmap
| true
| true
|
790655962fe5f0dedfca81c1496d202aa334601e
| 47
|
py
|
Python
|
hello_world.py
|
jzfarmer/learning_python
|
279fc19d4405625b49f853575252bf1dee3cbb99
|
[
"MIT"
] | null | null | null |
hello_world.py
|
jzfarmer/learning_python
|
279fc19d4405625b49f853575252bf1dee3cbb99
|
[
"MIT"
] | null | null | null |
hello_world.py
|
jzfarmer/learning_python
|
279fc19d4405625b49f853575252bf1dee3cbb99
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
print( 'hello world' )
| 11.75
| 22
| 0.659574
|
print( 'hello world' )
| true
| true
|
790655ea6c80cb158eb146494c56364a9049bf42
| 2,918
|
py
|
Python
|
align/compiler/create_database.py
|
mabrains/ALIGN-public
|
9a6c14310de13df369a8340f465911b629f15a3f
|
[
"BSD-3-Clause"
] | null | null | null |
align/compiler/create_database.py
|
mabrains/ALIGN-public
|
9a6c14310de13df369a8340f465911b629f15a3f
|
[
"BSD-3-Clause"
] | null | null | null |
align/compiler/create_database.py
|
mabrains/ALIGN-public
|
9a6c14310de13df369a8340f465911b629f15a3f
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 15 10:38:14 2021
@author: kunal001
"""
import logging
logger = logging.getLogger(__name__)
class CreateDatabase:
def __init__(self,hier_graph,const_parse):
self.hier_graph_dict = {}
self.const_parse = const_parse
self.G = hier_graph
def read_inputs(self,name:str):
"""
read circuit graphs
"""
top_ports = []
ports_weight = {}
for node, attr in self.G.nodes(data=True):
if 'source' in attr['inst_type']:
for source_nets in self.G.neighbors(node):
top_ports.append(source_nets)
elif 'net_type' in attr:
if attr['net_type'] == "external":
top_ports.append(node)
ports_weight[node]=[]
for nbr in list(self.G.neighbors(node)):
ports_weight[node].append(self.G.get_edge_data(node, nbr)['weight'])
logger.debug("Merging nested graph hierarchies to dictionary: ")
const = self.const_parse.read_user_const(name)
self.hier_graph_dict[name] = {
"graph": self.G,
"ports": top_ports,
"ports_weight": ports_weight,
"const": const
}
self._traverse_hier_in_graph(self.G)
logger.debug(f"read graph {self.hier_graph_dict}")
return self.hier_graph_dict
def _traverse_hier_in_graph(self,G):
"""
Recusively reads all hierachies in the graph and convert them to dictionary
"""
for node, attr in G.nodes(data=True):
if "sub_graph" in attr and attr["sub_graph"]:
logger.debug(f'Traversing sub graph: {node} {attr["inst_type"]} {attr["ports"]}')
sub_ports = []
ports_weight = {}
for sub_node, sub_attr in attr["sub_graph"].nodes(data=True):
if 'net_type' in sub_attr:
if sub_attr['net_type'] == "external":
sub_ports.append(sub_node)
ports_weight[sub_node] = []
for nbr in list(attr["sub_graph"].neighbors(sub_node)):
ports_weight[sub_node].append(attr["sub_graph"].get_edge_data(sub_node, nbr)['weight'])
logger.debug(f'external ports: {sub_ports}, {attr["connection"]}, {ports_weight}')
const = self.const_parse.read_user_const(attr["inst_type"])
self.hier_graph_dict[attr["inst_type"]] = {
"graph": attr["sub_graph"],
"ports": sub_ports,
"const": const,
"ports_weight": ports_weight
}
self._traverse_hier_in_graph(attr["sub_graph"])
| 38.906667
| 119
| 0.532557
|
import logging
logger = logging.getLogger(__name__)
class CreateDatabase:
def __init__(self,hier_graph,const_parse):
self.hier_graph_dict = {}
self.const_parse = const_parse
self.G = hier_graph
def read_inputs(self,name:str):
top_ports = []
ports_weight = {}
for node, attr in self.G.nodes(data=True):
if 'source' in attr['inst_type']:
for source_nets in self.G.neighbors(node):
top_ports.append(source_nets)
elif 'net_type' in attr:
if attr['net_type'] == "external":
top_ports.append(node)
ports_weight[node]=[]
for nbr in list(self.G.neighbors(node)):
ports_weight[node].append(self.G.get_edge_data(node, nbr)['weight'])
logger.debug("Merging nested graph hierarchies to dictionary: ")
const = self.const_parse.read_user_const(name)
self.hier_graph_dict[name] = {
"graph": self.G,
"ports": top_ports,
"ports_weight": ports_weight,
"const": const
}
self._traverse_hier_in_graph(self.G)
logger.debug(f"read graph {self.hier_graph_dict}")
return self.hier_graph_dict
def _traverse_hier_in_graph(self,G):
for node, attr in G.nodes(data=True):
if "sub_graph" in attr and attr["sub_graph"]:
logger.debug(f'Traversing sub graph: {node} {attr["inst_type"]} {attr["ports"]}')
sub_ports = []
ports_weight = {}
for sub_node, sub_attr in attr["sub_graph"].nodes(data=True):
if 'net_type' in sub_attr:
if sub_attr['net_type'] == "external":
sub_ports.append(sub_node)
ports_weight[sub_node] = []
for nbr in list(attr["sub_graph"].neighbors(sub_node)):
ports_weight[sub_node].append(attr["sub_graph"].get_edge_data(sub_node, nbr)['weight'])
logger.debug(f'external ports: {sub_ports}, {attr["connection"]}, {ports_weight}')
const = self.const_parse.read_user_const(attr["inst_type"])
self.hier_graph_dict[attr["inst_type"]] = {
"graph": attr["sub_graph"],
"ports": sub_ports,
"const": const,
"ports_weight": ports_weight
}
self._traverse_hier_in_graph(attr["sub_graph"])
| true
| true
|
790656792cf1a06755ecb98d07fc56893c0250cc
| 3,151
|
py
|
Python
|
tests/test_docs/test_standalone_transaction/test_standalone_transaction.py
|
valory-xyz/agents-aea
|
8f38efa96041b0156ed1ae328178e395dbabf2fc
|
[
"Apache-2.0"
] | 28
|
2021-10-31T18:54:14.000Z
|
2022-03-17T13:10:43.000Z
|
tests/test_docs/test_standalone_transaction/test_standalone_transaction.py
|
valory-xyz/agents-aea
|
8f38efa96041b0156ed1ae328178e395dbabf2fc
|
[
"Apache-2.0"
] | 66
|
2021-10-31T11:55:48.000Z
|
2022-03-31T06:26:23.000Z
|
tests/test_docs/test_standalone_transaction/test_standalone_transaction.py
|
valory-xyz/agents-aea
|
8f38efa96041b0156ed1ae328178e395dbabf2fc
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2022 Valory AG
# Copyright 2018-2021 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""This module contains the tests for the code-blocks in the standalone-transaction.md file."""
import logging
import os
from unittest.mock import patch
import pytest
from aea.test_tools.test_cases import BaseAEATestCase
from tests.conftest import CUR_PATH, MAX_FLAKY_RERUNS_INTEGRATION, ROOT_DIR
from tests.test_docs.helper import extract_code_blocks, extract_python_code
from tests.test_docs.test_standalone_transaction.standalone_transaction import (
logger,
run,
)
MD_FILE = "docs/standalone-transaction.md"
PY_FILE = "test_docs/test_standalone_transaction/standalone_transaction.py"
test_logger = logging.getLogger(__name__)
class TestStandaloneTransaction(BaseAEATestCase):
"""This class contains the tests for the code-blocks in the agent-vs-aea.md file."""
@classmethod
def _patch_logger(cls):
cls.patch_logger_info = patch.object(logger, "info")
cls.mocked_logger_info = cls.patch_logger_info.__enter__()
@classmethod
def _unpatch_logger(cls):
cls.mocked_logger_info.__exit__()
@classmethod
def setup_class(cls):
"""Setup the test class."""
super().setup_class()
cls._patch_logger()
doc_path = os.path.join(ROOT_DIR, MD_FILE)
cls.code_blocks = extract_code_blocks(filepath=doc_path, filter_="python")
test_code_path = os.path.join(CUR_PATH, PY_FILE)
cls.python_file = extract_python_code(test_code_path)
def test_read_md_file(self):
"""Test the last code block, that is the full listing of the demo from the Markdown."""
assert (
self.code_blocks[-1] == self.python_file
), "Files must be exactly the same."
@pytest.mark.integration(reruns=MAX_FLAKY_RERUNS_INTEGRATION)
def test_run_end_to_end(self):
"""Run the transaction from the file."""
try:
run()
self.mocked_logger_info.assert_any_call("Transaction complete.")
except RuntimeError:
test_logger.info("RuntimeError: Some transactions have failed")
def test_code_blocks_exist(self):
"""Test that all the code-blocks exist in the python file."""
for blocks in self.code_blocks:
assert (
blocks in self.python_file
), "Code-block doesn't exist in the python file."
| 35.404494
| 95
| 0.671215
|
import logging
import os
from unittest.mock import patch
import pytest
from aea.test_tools.test_cases import BaseAEATestCase
from tests.conftest import CUR_PATH, MAX_FLAKY_RERUNS_INTEGRATION, ROOT_DIR
from tests.test_docs.helper import extract_code_blocks, extract_python_code
from tests.test_docs.test_standalone_transaction.standalone_transaction import (
logger,
run,
)
MD_FILE = "docs/standalone-transaction.md"
PY_FILE = "test_docs/test_standalone_transaction/standalone_transaction.py"
test_logger = logging.getLogger(__name__)
class TestStandaloneTransaction(BaseAEATestCase):
@classmethod
def _patch_logger(cls):
cls.patch_logger_info = patch.object(logger, "info")
cls.mocked_logger_info = cls.patch_logger_info.__enter__()
@classmethod
def _unpatch_logger(cls):
cls.mocked_logger_info.__exit__()
@classmethod
def setup_class(cls):
super().setup_class()
cls._patch_logger()
doc_path = os.path.join(ROOT_DIR, MD_FILE)
cls.code_blocks = extract_code_blocks(filepath=doc_path, filter_="python")
test_code_path = os.path.join(CUR_PATH, PY_FILE)
cls.python_file = extract_python_code(test_code_path)
def test_read_md_file(self):
assert (
self.code_blocks[-1] == self.python_file
), "Files must be exactly the same."
@pytest.mark.integration(reruns=MAX_FLAKY_RERUNS_INTEGRATION)
def test_run_end_to_end(self):
try:
run()
self.mocked_logger_info.assert_any_call("Transaction complete.")
except RuntimeError:
test_logger.info("RuntimeError: Some transactions have failed")
def test_code_blocks_exist(self):
for blocks in self.code_blocks:
assert (
blocks in self.python_file
), "Code-block doesn't exist in the python file."
| true
| true
|
790656dfc67283942aa59d35297a0f07c93f209a
| 346
|
py
|
Python
|
piprot/test/test_piprot.py
|
sedrubal/piprot
|
53a9f33556168b137df328a928400ea787c0ca7f
|
[
"MIT"
] | 199
|
2015-01-14T11:09:54.000Z
|
2022-01-12T16:36:06.000Z
|
piprot/test/test_piprot.py
|
sedrubal/piprot
|
53a9f33556168b137df328a928400ea787c0ca7f
|
[
"MIT"
] | 49
|
2015-01-14T21:26:31.000Z
|
2020-06-12T10:51:30.000Z
|
piprot/test/test_piprot.py
|
sedrubal/piprot
|
53a9f33556168b137df328a928400ea787c0ca7f
|
[
"MIT"
] | 35
|
2015-02-05T04:37:26.000Z
|
2021-07-31T10:28:51.000Z
|
#!/usr/bin/env python
import unittest
from piprot.piprot import main
class TestRequirementsParser(unittest.TestCase):
def setUp(self):
pass
def test_requirement_exact(self):
with self.assertRaises(SystemExit):
main([open('piprot/test/files/pytz_req.txt')])
if __name__ == '__main__':
unittest.main()
| 19.222222
| 58
| 0.684971
|
import unittest
from piprot.piprot import main
class TestRequirementsParser(unittest.TestCase):
def setUp(self):
pass
def test_requirement_exact(self):
with self.assertRaises(SystemExit):
main([open('piprot/test/files/pytz_req.txt')])
if __name__ == '__main__':
unittest.main()
| true
| true
|
790656ed3b45cbb39f2ccbb5be021d26bc38a0db
| 312
|
py
|
Python
|
exerc27/27.py
|
WilliamSampaio/ExerciciosPython
|
4317d242d2944b91b5d455da8a4ac3a33e154385
|
[
"MIT"
] | null | null | null |
exerc27/27.py
|
WilliamSampaio/ExerciciosPython
|
4317d242d2944b91b5d455da8a4ac3a33e154385
|
[
"MIT"
] | null | null | null |
exerc27/27.py
|
WilliamSampaio/ExerciciosPython
|
4317d242d2944b91b5d455da8a4ac3a33e154385
|
[
"MIT"
] | null | null | null |
import os
turno = input('Qual turno você estuda? ')
if turno[0] == 'm' or turno[0] == 'M':
print('Bom Dia!')
elif turno[0] == 'v' or turno[0] == 'V':
print('Boa Tarde')
elif turno[0] == 'n' or turno[0] == 'N':
print('Boa Noite')
else:
print('Valor Inválido!')
os.system("pause")
| 20.8
| 42
| 0.535256
|
import os
turno = input('Qual turno você estuda? ')
if turno[0] == 'm' or turno[0] == 'M':
print('Bom Dia!')
elif turno[0] == 'v' or turno[0] == 'V':
print('Boa Tarde')
elif turno[0] == 'n' or turno[0] == 'N':
print('Boa Noite')
else:
print('Valor Inválido!')
os.system("pause")
| true
| true
|
790657d1965476ce384e27b1371c9e99996e94c1
| 3,947
|
py
|
Python
|
TextMagic/models/reopen_chats_bulk_input_object.py
|
imissyouso/textmagic-rest-python
|
172c4c2bd588119eb97cdfaab3d8ba24bf3f3e09
|
[
"MIT"
] | null | null | null |
TextMagic/models/reopen_chats_bulk_input_object.py
|
imissyouso/textmagic-rest-python
|
172c4c2bd588119eb97cdfaab3d8ba24bf3f3e09
|
[
"MIT"
] | null | null | null |
TextMagic/models/reopen_chats_bulk_input_object.py
|
imissyouso/textmagic-rest-python
|
172c4c2bd588119eb97cdfaab3d8ba24bf3f3e09
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
TextMagic API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class ReopenChatsBulkInputObject(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'ids': 'str',
'all': 'bool'
}
attribute_map = {
'ids': 'ids',
'all': 'all'
}
def __init__(self, ids=None, all=None): # noqa: E501
"""ReopenChatsBulkInputObject - a model defined in Swagger""" # noqa: E501
self._ids = None
self._all = None
self.discriminator = None
if ids is not None:
self.ids = ids
if all is not None:
self.all = all
@property
def ids(self):
"""Gets the ids of this ReopenChatsBulkInputObject. # noqa: E501
Entity ID(s), separated by comma # noqa: E501
:return: The ids of this ReopenChatsBulkInputObject. # noqa: E501
:rtype: str
"""
return self._ids
@ids.setter
def ids(self, ids):
"""Sets the ids of this ReopenChatsBulkInputObject.
Entity ID(s), separated by comma # noqa: E501
:param ids: The ids of this ReopenChatsBulkInputObject. # noqa: E501
:type: str
"""
self._ids = ids
@property
def all(self):
"""Gets the all of this ReopenChatsBulkInputObject. # noqa: E501
Entity ID(s), separated by comma # noqa: E501
:return: The all of this ReopenChatsBulkInputObject. # noqa: E501
:rtype: bool
"""
return self._all
@all.setter
def all(self, all):
"""Sets the all of this ReopenChatsBulkInputObject.
Entity ID(s), separated by comma # noqa: E501
:param all: The all of this ReopenChatsBulkInputObject. # noqa: E501
:type: bool
"""
self._all = all
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ReopenChatsBulkInputObject, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ReopenChatsBulkInputObject):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 27.034247
| 119
| 0.558905
|
import pprint
import re
import six
class ReopenChatsBulkInputObject(object):
swagger_types = {
'ids': 'str',
'all': 'bool'
}
attribute_map = {
'ids': 'ids',
'all': 'all'
}
def __init__(self, ids=None, all=None):
self._ids = None
self._all = None
self.discriminator = None
if ids is not None:
self.ids = ids
if all is not None:
self.all = all
@property
def ids(self):
return self._ids
@ids.setter
def ids(self, ids):
self._ids = ids
@property
def all(self):
return self._all
@all.setter
def all(self, all):
self._all = all
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ReopenChatsBulkInputObject, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, ReopenChatsBulkInputObject):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true
| true
|
7906586313260e60731734c5a3175fce5e81afdf
| 59,084
|
py
|
Python
|
keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py
|
hashnfv/hashnfv-moon
|
daaba34fa2ed4426bc0fde359e54a5e1b872208c
|
[
"Apache-2.0"
] | null | null | null |
keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py
|
hashnfv/hashnfv-moon
|
daaba34fa2ed4426bc0fde359e54a5e1b872208c
|
[
"Apache-2.0"
] | null | null | null |
keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py
|
hashnfv/hashnfv-moon
|
daaba34fa2ed4426bc0fde359e54a5e1b872208c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import uuid
import mock
from oslo_log import versionutils
from six.moves import http_client
from testtools import matchers
from keystone.contrib.endpoint_filter import routers
from keystone.tests import unit
from keystone.tests.unit import test_v3
class EndpointFilterTestCase(test_v3.RestfulTestCase):
def config_overrides(self):
super(EndpointFilterTestCase, self).config_overrides()
self.config_fixture.config(
group='catalog', driver='endpoint_filter.sql')
def setUp(self):
super(EndpointFilterTestCase, self).setUp()
self.default_request_url = (
'/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': self.endpoint_id})
class EndpointFilterDeprecateTestCase(test_v3.RestfulTestCase):
@mock.patch.object(versionutils, 'report_deprecated_feature')
def test_exception_happens(self, mock_deprecator):
routers.EndpointFilterExtension(mock.ANY)
mock_deprecator.assert_called_once_with(mock.ANY, mock.ANY)
args, _kwargs = mock_deprecator.call_args
self.assertIn("Remove endpoint_filter_extension from", args[1])
class EndpointFilterCRUDTestCase(EndpointFilterTestCase):
def test_create_endpoint_project_association(self):
"""PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
Valid endpoint and project id test case.
"""
self.put(self.default_request_url)
def test_create_endpoint_project_association_with_invalid_project(self):
"""PUT OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
Invalid project id test case.
"""
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': uuid.uuid4().hex,
'endpoint_id': self.endpoint_id},
expected_status=http_client.NOT_FOUND)
def test_create_endpoint_project_association_with_invalid_endpoint(self):
"""PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
Invalid endpoint id test case.
"""
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': uuid.uuid4().hex},
expected_status=http_client.NOT_FOUND)
def test_create_endpoint_project_association_with_unexpected_body(self):
"""PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
Unexpected body in request. The body should be ignored.
"""
self.put(self.default_request_url,
body={'project_id': self.default_domain_project_id})
def test_check_endpoint_project_association(self):
"""HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
Valid project and endpoint id test case.
"""
self.put(self.default_request_url)
self.head('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': self.endpoint_id})
def test_check_endpoint_project_association_with_invalid_project(self):
"""HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
Invalid project id test case.
"""
self.put(self.default_request_url)
self.head('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': uuid.uuid4().hex,
'endpoint_id': self.endpoint_id},
expected_status=http_client.NOT_FOUND)
def test_check_endpoint_project_association_with_invalid_endpoint(self):
"""HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
Invalid endpoint id test case.
"""
self.put(self.default_request_url)
self.head('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': uuid.uuid4().hex},
expected_status=http_client.NOT_FOUND)
def test_list_endpoints_associated_with_valid_project(self):
"""GET /OS-EP-FILTER/projects/{project_id}/endpoints
Valid project and endpoint id test case.
"""
self.put(self.default_request_url)
resource_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
'project_id': self.default_domain_project_id}
r = self.get(resource_url)
self.assertValidEndpointListResponse(r, self.endpoint,
resource_url=resource_url)
def test_list_endpoints_associated_with_invalid_project(self):
"""GET /OS-EP-FILTER/projects/{project_id}/endpoints
Invalid project id test case.
"""
self.put(self.default_request_url)
self.get('/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
'project_id': uuid.uuid4().hex},
expected_status=http_client.NOT_FOUND)
def test_list_projects_associated_with_endpoint(self):
"""GET /OS-EP-FILTER/endpoints/{endpoint_id}/projects
Valid endpoint-project association test case.
"""
self.put(self.default_request_url)
resource_url = '/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' % {
'endpoint_id': self.endpoint_id}
r = self.get(resource_url)
self.assertValidProjectListResponse(r, self.default_domain_project,
resource_url=resource_url)
def test_list_projects_with_no_endpoint_project_association(self):
"""GET /OS-EP-FILTER/endpoints/{endpoint_id}/projects
Valid endpoint id but no endpoint-project associations test case.
"""
r = self.get('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' %
{'endpoint_id': self.endpoint_id})
self.assertValidProjectListResponse(r, expected_length=0)
def test_list_projects_associated_with_invalid_endpoint(self):
"""GET /OS-EP-FILTER/endpoints/{endpoint_id}/projects
Invalid endpoint id test case.
"""
self.get('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' %
{'endpoint_id': uuid.uuid4().hex},
expected_status=http_client.NOT_FOUND)
def test_remove_endpoint_project_association(self):
"""DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
Valid project id and endpoint id test case.
"""
self.put(self.default_request_url)
self.delete('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': self.endpoint_id})
def test_remove_endpoint_project_association_with_invalid_project(self):
"""DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
Invalid project id test case.
"""
self.put(self.default_request_url)
self.delete('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': uuid.uuid4().hex,
'endpoint_id': self.endpoint_id},
expected_status=http_client.NOT_FOUND)
def test_remove_endpoint_project_association_with_invalid_endpoint(self):
"""DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
Invalid endpoint id test case.
"""
self.put(self.default_request_url)
self.delete('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': uuid.uuid4().hex},
expected_status=http_client.NOT_FOUND)
def test_endpoint_project_association_cleanup_when_project_deleted(self):
self.put(self.default_request_url)
association_url = ('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' %
{'endpoint_id': self.endpoint_id})
r = self.get(association_url)
self.assertValidProjectListResponse(r, expected_length=1)
self.delete('/projects/%(project_id)s' % {
'project_id': self.default_domain_project_id})
r = self.get(association_url)
self.assertValidProjectListResponse(r, expected_length=0)
def test_endpoint_project_association_cleanup_when_endpoint_deleted(self):
self.put(self.default_request_url)
association_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
'project_id': self.default_domain_project_id}
r = self.get(association_url)
self.assertValidEndpointListResponse(r, expected_length=1)
self.delete('/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id})
r = self.get(association_url)
self.assertValidEndpointListResponse(r, expected_length=0)
@unit.skip_if_cache_disabled('catalog')
def test_create_endpoint_project_association_invalidates_cache(self):
# NOTE(davechen): create another endpoint which will be added to
# default project, this should be done at first since
# `create_endpoint` will also invalidate cache.
endpoint_id2 = uuid.uuid4().hex
endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
region_id=self.region_id,
interface='public',
id=endpoint_id2)
self.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy())
# create endpoint project association.
self.put(self.default_request_url)
# should get back only one endpoint that was just created.
user_id = uuid.uuid4().hex
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
# there is only one endpoints associated with the default project.
self.assertEqual(1, len(catalog[0]['endpoints']))
self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id'])
# add the second endpoint to default project, bypassing
# catalog_api API manager.
self.catalog_api.driver.add_endpoint_to_project(
endpoint_id2,
self.default_domain_project_id)
# but, we can just get back one endpoint from the cache, since the
# catalog is pulled out from cache and its haven't been invalidated.
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertEqual(1, len(catalog[0]['endpoints']))
# remove the endpoint2 from the default project, and add it again via
# catalog_api API manager.
self.catalog_api.driver.remove_endpoint_from_project(
endpoint_id2,
self.default_domain_project_id)
# add second endpoint to default project, this can be done by calling
# the catalog_api API manager directly but call the REST API
# instead for consistency.
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': endpoint_id2})
# should get back two endpoints since the cache has been
# invalidated when the second endpoint was added to default project.
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertEqual(2, len(catalog[0]['endpoints']))
ep_id_list = [catalog[0]['endpoints'][0]['id'],
catalog[0]['endpoints'][1]['id']]
self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
@unit.skip_if_cache_disabled('catalog')
def test_remove_endpoint_from_project_invalidates_cache(self):
endpoint_id2 = uuid.uuid4().hex
endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
region_id=self.region_id,
interface='public',
id=endpoint_id2)
self.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy())
# create endpoint project association.
self.put(self.default_request_url)
# add second endpoint to default project.
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': endpoint_id2})
# should get back only one endpoint that was just created.
user_id = uuid.uuid4().hex
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
# there are two endpoints associated with the default project.
ep_id_list = [catalog[0]['endpoints'][0]['id'],
catalog[0]['endpoints'][1]['id']]
self.assertEqual(2, len(catalog[0]['endpoints']))
self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
# remove the endpoint2 from the default project, bypassing
# catalog_api API manager.
self.catalog_api.driver.remove_endpoint_from_project(
endpoint_id2,
self.default_domain_project_id)
# but, we can just still get back two endpoints from the cache,
# since the catalog is pulled out from cache and its haven't
# been invalidated.
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertEqual(2, len(catalog[0]['endpoints']))
# add back the endpoint2 to the default project, and remove it by
# catalog_api API manage.
self.catalog_api.driver.add_endpoint_to_project(
endpoint_id2,
self.default_domain_project_id)
# remove the endpoint2 from the default project, this can be done
# by calling the catalog_api API manager directly but call
# the REST API instead for consistency.
self.delete('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': endpoint_id2})
# should only get back one endpoint since the cache has been
# invalidated after the endpoint project association was removed.
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertEqual(1, len(catalog[0]['endpoints']))
self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id'])
class EndpointFilterTokenRequestTestCase(EndpointFilterTestCase):
def test_project_scoped_token_using_endpoint_filter(self):
"""Verify endpoints from project scoped token filtered."""
# create a project to work with
ref = unit.new_project_ref(domain_id=self.domain_id)
r = self.post('/projects', body={'project': ref})
project = self.assertValidProjectResponse(r, ref)
# grant the user a role on the project
self.put(
'/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
'user_id': self.user['id'],
'project_id': project['id'],
'role_id': self.role['id']})
# set the user's preferred project
body = {'user': {'default_project_id': project['id']}}
r = self.patch('/users/%(user_id)s' % {
'user_id': self.user['id']},
body=body)
self.assertValidUserResponse(r)
# add one endpoint to the project
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': project['id'],
'endpoint_id': self.endpoint_id})
# attempt to authenticate without requesting a project
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'])
r = self.post('/auth/tokens', body=auth_data)
self.assertValidProjectScopedTokenResponse(
r,
require_catalog=True,
endpoint_filter=True,
ep_filter_assoc=1)
self.assertEqual(project['id'], r.result['token']['project']['id'])
def test_default_scoped_token_using_endpoint_filter(self):
"""Verify endpoints from default scoped token filtered."""
# add one endpoint to default project
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': self.endpoint_id})
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
r = self.post('/auth/tokens', body=auth_data)
self.assertValidProjectScopedTokenResponse(
r,
require_catalog=True,
endpoint_filter=True,
ep_filter_assoc=1)
self.assertEqual(self.project['id'],
r.result['token']['project']['id'])
# Ensure name of the service exists
self.assertIn('name', r.result['token']['catalog'][0])
# region and region_id should be the same in endpoints
endpoint = r.result['token']['catalog'][0]['endpoints'][0]
self.assertIn('region', endpoint)
self.assertIn('region_id', endpoint)
self.assertEqual(endpoint['region'], endpoint['region_id'])
def test_scoped_token_with_no_catalog_using_endpoint_filter(self):
"""Verify endpoint filter does not affect no catalog."""
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': self.endpoint_id})
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
r = self.post('/auth/tokens?nocatalog', body=auth_data)
self.assertValidProjectScopedTokenResponse(
r,
require_catalog=False)
self.assertEqual(self.project['id'],
r.result['token']['project']['id'])
def test_invalid_endpoint_project_association(self):
"""Verify an invalid endpoint-project association is handled."""
# add first endpoint to default project
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': self.endpoint_id})
# create a second temporary endpoint
endpoint_id2 = uuid.uuid4().hex
endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
region_id=self.region_id,
interface='public',
id=endpoint_id2)
self.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy())
# add second endpoint to default project
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': endpoint_id2})
# remove the temporary reference
# this will create inconsistency in the endpoint filter table
# which is fixed during the catalog creation for token request
self.catalog_api.delete_endpoint(endpoint_id2)
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
r = self.post('/auth/tokens', body=auth_data)
self.assertValidProjectScopedTokenResponse(
r,
require_catalog=True,
endpoint_filter=True,
ep_filter_assoc=1)
self.assertEqual(self.project['id'],
r.result['token']['project']['id'])
def test_disabled_endpoint(self):
"""Test that a disabled endpoint is handled."""
# Add an enabled endpoint to the default project
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': self.endpoint_id})
# Add a disabled endpoint to the default project.
# Create a disabled endpoint that's like the enabled one.
disabled_endpoint_ref = copy.copy(self.endpoint)
disabled_endpoint_id = uuid.uuid4().hex
disabled_endpoint_ref.update({
'id': disabled_endpoint_id,
'enabled': False,
'interface': 'internal'
})
self.catalog_api.create_endpoint(disabled_endpoint_id,
disabled_endpoint_ref)
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': disabled_endpoint_id})
# Authenticate to get token with catalog
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
r = self.post('/auth/tokens', body=auth_data)
endpoints = r.result['token']['catalog'][0]['endpoints']
endpoint_ids = [ep['id'] for ep in endpoints]
self.assertEqual([self.endpoint_id], endpoint_ids)
def test_multiple_endpoint_project_associations(self):
def _create_an_endpoint():
endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id,
interface='public',
region_id=self.region_id)
r = self.post('/endpoints', body={'endpoint': endpoint_ref})
return r.result['endpoint']['id']
# create three endpoints
endpoint_id1 = _create_an_endpoint()
endpoint_id2 = _create_an_endpoint()
_create_an_endpoint()
# only associate two endpoints with project
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': endpoint_id1})
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': endpoint_id2})
# there should be only two endpoints in token catalog
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
r = self.post('/auth/tokens', body=auth_data)
self.assertValidProjectScopedTokenResponse(
r,
require_catalog=True,
endpoint_filter=True,
ep_filter_assoc=2)
def test_get_auth_catalog_using_endpoint_filter(self):
# add one endpoint to default project
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': self.endpoint_id})
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
token_data = self.post('/auth/tokens', body=auth_data)
self.assertValidProjectScopedTokenResponse(
token_data,
require_catalog=True,
endpoint_filter=True,
ep_filter_assoc=1)
auth_catalog = self.get('/auth/catalog',
token=token_data.headers['X-Subject-Token'])
self.assertEqual(token_data.result['token']['catalog'],
auth_catalog.result['catalog'])
class JsonHomeTests(EndpointFilterTestCase, test_v3.JsonHomeTestMixin):
JSON_HOME_DATA = {
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
'1.0/rel/endpoint_projects': {
'href-template': '/OS-EP-FILTER/endpoints/{endpoint_id}/projects',
'href-vars': {
'endpoint_id':
'http://docs.openstack.org/api/openstack-identity/3/param/'
'endpoint_id',
},
},
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
'1.0/rel/endpoint_groups': {
'href': '/OS-EP-FILTER/endpoint_groups',
},
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
'1.0/rel/endpoint_group': {
'href-template': '/OS-EP-FILTER/endpoint_groups/'
'{endpoint_group_id}',
'href-vars': {
'endpoint_group_id':
'http://docs.openstack.org/api/openstack-identity/3/'
'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
},
},
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
'1.0/rel/endpoint_group_to_project_association': {
'href-template': '/OS-EP-FILTER/endpoint_groups/'
'{endpoint_group_id}/projects/{project_id}',
'href-vars': {
'project_id':
'http://docs.openstack.org/api/openstack-identity/3/param/'
'project_id',
'endpoint_group_id':
'http://docs.openstack.org/api/openstack-identity/3/'
'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
},
},
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
'1.0/rel/projects_associated_with_endpoint_group': {
'href-template': '/OS-EP-FILTER/endpoint_groups/'
'{endpoint_group_id}/projects',
'href-vars': {
'endpoint_group_id':
'http://docs.openstack.org/api/openstack-identity/3/'
'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
},
},
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
'1.0/rel/endpoints_in_endpoint_group': {
'href-template': '/OS-EP-FILTER/endpoint_groups/'
'{endpoint_group_id}/endpoints',
'href-vars': {
'endpoint_group_id':
'http://docs.openstack.org/api/openstack-identity/3/'
'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
},
},
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
'1.0/rel/project_endpoint_groups': {
'href-template': '/OS-EP-FILTER/projects/{project_id}/'
'endpoint_groups',
'href-vars': {
'project_id':
'http://docs.openstack.org/api/openstack-identity/3/param/'
'project_id',
},
},
}
class EndpointGroupCRUDTestCase(EndpointFilterTestCase):
DEFAULT_ENDPOINT_GROUP_BODY = {
'endpoint_group': {
'description': 'endpoint group description',
'filters': {
'interface': 'admin'
},
'name': 'endpoint_group_name'
}
}
DEFAULT_ENDPOINT_GROUP_URL = '/OS-EP-FILTER/endpoint_groups'
def test_create_endpoint_group(self):
"""POST /OS-EP-FILTER/endpoint_groups
Valid endpoint group test case.
"""
r = self.post(self.DEFAULT_ENDPOINT_GROUP_URL,
body=self.DEFAULT_ENDPOINT_GROUP_BODY)
expected_filters = (self.DEFAULT_ENDPOINT_GROUP_BODY
['endpoint_group']['filters'])
expected_name = (self.DEFAULT_ENDPOINT_GROUP_BODY
['endpoint_group']['name'])
self.assertEqual(expected_filters,
r.result['endpoint_group']['filters'])
self.assertEqual(expected_name, r.result['endpoint_group']['name'])
self.assertThat(
r.result['endpoint_group']['links']['self'],
matchers.EndsWith(
'/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': r.result['endpoint_group']['id']}))
def test_create_invalid_endpoint_group(self):
"""POST /OS-EP-FILTER/endpoint_groups
Invalid endpoint group creation test case.
"""
invalid_body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
invalid_body['endpoint_group']['filters'] = {'foobar': 'admin'}
self.post(self.DEFAULT_ENDPOINT_GROUP_URL,
body=invalid_body,
expected_status=http_client.BAD_REQUEST)
def test_get_endpoint_group(self):
"""GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}
Valid endpoint group test case.
"""
# create an endpoint group to work with
response = self.post(self.DEFAULT_ENDPOINT_GROUP_URL,
body=self.DEFAULT_ENDPOINT_GROUP_BODY)
endpoint_group_id = response.result['endpoint_group']['id']
endpoint_group_filters = response.result['endpoint_group']['filters']
endpoint_group_name = response.result['endpoint_group']['name']
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.get(url)
self.assertEqual(endpoint_group_id,
response.result['endpoint_group']['id'])
self.assertEqual(endpoint_group_filters,
response.result['endpoint_group']['filters'])
self.assertEqual(endpoint_group_name,
response.result['endpoint_group']['name'])
self.assertThat(response.result['endpoint_group']['links']['self'],
matchers.EndsWith(url))
def test_get_invalid_endpoint_group(self):
"""GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}
Invalid endpoint group test case.
"""
endpoint_group_id = 'foobar'
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.get(url, expected_status=http_client.NOT_FOUND)
def test_check_endpoint_group(self):
"""HEAD /OS-EP-FILTER/endpoint_groups/{endpoint_group_id}
Valid endpoint_group_id test case.
"""
# create an endpoint group to work with
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.head(url, expected_status=http_client.OK)
def test_check_invalid_endpoint_group(self):
"""HEAD /OS-EP-FILTER/endpoint_groups/{endpoint_group_id}
Invalid endpoint_group_id test case.
"""
endpoint_group_id = 'foobar'
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.head(url, expected_status=http_client.NOT_FOUND)
def test_patch_endpoint_group(self):
"""PATCH /OS-EP-FILTER/endpoint_groups/{endpoint_group}
Valid endpoint group patch test case.
"""
body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
body['endpoint_group']['filters'] = {'region_id': 'UK'}
body['endpoint_group']['name'] = 'patch_test'
# create an endpoint group to work with
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
r = self.patch(url, body=body)
self.assertEqual(endpoint_group_id,
r.result['endpoint_group']['id'])
self.assertEqual(body['endpoint_group']['filters'],
r.result['endpoint_group']['filters'])
self.assertThat(r.result['endpoint_group']['links']['self'],
matchers.EndsWith(url))
def test_patch_nonexistent_endpoint_group(self):
"""PATCH /OS-EP-FILTER/endpoint_groups/{endpoint_group}
Invalid endpoint group patch test case.
"""
body = {
'endpoint_group': {
'name': 'patch_test'
}
}
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': 'ABC'}
self.patch(url, body=body, expected_status=http_client.NOT_FOUND)
def test_patch_invalid_endpoint_group(self):
"""PATCH /OS-EP-FILTER/endpoint_groups/{endpoint_group}
Valid endpoint group patch test case.
"""
body = {
'endpoint_group': {
'description': 'endpoint group description',
'filters': {
'region': 'UK'
},
'name': 'patch_test'
}
}
# create an endpoint group to work with
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.patch(url, body=body, expected_status=http_client.BAD_REQUEST)
# Perform a GET call to ensure that the content remains
# the same (as DEFAULT_ENDPOINT_GROUP_BODY) after attempting to update
# with an invalid filter
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
r = self.get(url)
del r.result['endpoint_group']['id']
del r.result['endpoint_group']['links']
self.assertDictEqual(self.DEFAULT_ENDPOINT_GROUP_BODY, r.result)
def test_delete_endpoint_group(self):
"""GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}
Valid endpoint group test case.
"""
# create an endpoint group to work with
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.delete(url)
self.get(url, expected_status=http_client.NOT_FOUND)
def test_delete_invalid_endpoint_group(self):
"""GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}
Invalid endpoint group test case.
"""
endpoint_group_id = 'foobar'
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.delete(url, expected_status=http_client.NOT_FOUND)
def test_add_endpoint_group_to_project(self):
"""Create a valid endpoint group and project association."""
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
self._create_endpoint_group_project_association(endpoint_group_id,
self.project_id)
def test_add_endpoint_group_to_project_with_invalid_project_id(self):
"""Create an invalid endpoint group and project association."""
# create an endpoint group to work with
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
# associate endpoint group with project
project_id = uuid.uuid4().hex
url = self._get_project_endpoint_group_url(
endpoint_group_id, project_id)
self.put(url, expected_status=http_client.NOT_FOUND)
def test_get_endpoint_group_in_project(self):
"""Test retrieving project endpoint group association."""
# create an endpoint group to work with
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
# associate endpoint group with project
url = self._get_project_endpoint_group_url(
endpoint_group_id, self.project_id)
self.put(url)
response = self.get(url)
self.assertEqual(
endpoint_group_id,
response.result['project_endpoint_group']['endpoint_group_id'])
self.assertEqual(
self.project_id,
response.result['project_endpoint_group']['project_id'])
def test_get_invalid_endpoint_group_in_project(self):
"""Test retrieving project endpoint group association."""
endpoint_group_id = uuid.uuid4().hex
project_id = uuid.uuid4().hex
url = self._get_project_endpoint_group_url(
endpoint_group_id, project_id)
self.get(url, expected_status=http_client.NOT_FOUND)
def test_list_endpoint_groups_in_project(self):
"""GET /OS-EP-FILTER/projects/{project_id}/endpoint_groups."""
# create an endpoint group to work with
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
# associate endpoint group with project
url = self._get_project_endpoint_group_url(
endpoint_group_id, self.project_id)
self.put(url)
url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' %
{'project_id': self.project_id})
response = self.get(url)
self.assertEqual(
endpoint_group_id,
response.result['endpoint_groups'][0]['id'])
def test_list_endpoint_groups_in_invalid_project(self):
"""Test retrieving from invalid project."""
project_id = uuid.uuid4().hex
url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' %
{'project_id': project_id})
self.get(url, expected_status=http_client.NOT_FOUND)
def test_empty_endpoint_groups_in_project(self):
"""Test when no endpoint groups associated with the project."""
url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' %
{'project_id': self.project_id})
response = self.get(url)
self.assertEqual(0, len(response.result['endpoint_groups']))
def test_check_endpoint_group_to_project(self):
"""Test HEAD with a valid endpoint group and project association."""
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
self._create_endpoint_group_project_association(endpoint_group_id,
self.project_id)
url = self._get_project_endpoint_group_url(
endpoint_group_id, self.project_id)
self.head(url, expected_status=http_client.OK)
def test_check_endpoint_group_to_project_with_invalid_project_id(self):
"""Test HEAD with an invalid endpoint group and project association."""
# create an endpoint group to work with
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
# create an endpoint group to project association
url = self._get_project_endpoint_group_url(
endpoint_group_id, self.project_id)
self.put(url)
# send a head request with an invalid project id
project_id = uuid.uuid4().hex
url = self._get_project_endpoint_group_url(
endpoint_group_id, project_id)
self.head(url, expected_status=http_client.NOT_FOUND)
def test_list_endpoint_groups(self):
"""GET /OS-EP-FILTER/endpoint_groups."""
# create an endpoint group to work with
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
# recover all endpoint groups
url = '/OS-EP-FILTER/endpoint_groups'
r = self.get(url)
self.assertNotEmpty(r.result['endpoint_groups'])
self.assertEqual(endpoint_group_id,
r.result['endpoint_groups'][0].get('id'))
def test_list_projects_associated_with_endpoint_group(self):
"""GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}/projects
Valid endpoint group test case.
"""
# create an endpoint group to work with
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
# associate endpoint group with project
self._create_endpoint_group_project_association(endpoint_group_id,
self.project_id)
# recover list of projects associated with endpoint group
url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s'
'/projects' %
{'endpoint_group_id': endpoint_group_id})
self.get(url)
def test_list_endpoints_associated_with_endpoint_group(self):
"""GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}/endpoints
Valid endpoint group test case.
"""
# create a service
service_ref = unit.new_service_ref()
response = self.post(
'/services',
body={'service': service_ref})
service_id = response.result['service']['id']
# create an endpoint
endpoint_ref = unit.new_endpoint_ref(service_id=service_id,
interface='public',
region_id=self.region_id)
response = self.post('/endpoints', body={'endpoint': endpoint_ref})
endpoint_id = response.result['endpoint']['id']
# create an endpoint group
body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
body['endpoint_group']['filters'] = {'service_id': service_id}
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, body)
# create association
self._create_endpoint_group_project_association(endpoint_group_id,
self.project_id)
# recover list of endpoints associated with endpoint group
url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s'
'/endpoints' % {'endpoint_group_id': endpoint_group_id})
r = self.get(url)
self.assertNotEmpty(r.result['endpoints'])
self.assertEqual(endpoint_id, r.result['endpoints'][0].get('id'))
def test_list_endpoints_associated_with_project_endpoint_group(self):
"""GET /OS-EP-FILTER/projects/{project_id}/endpoints
Valid project, endpoint id, and endpoint group test case.
"""
# create a temporary service
service_ref = unit.new_service_ref()
response = self.post('/services', body={'service': service_ref})
service_id2 = response.result['service']['id']
# create additional endpoints
self._create_endpoint_and_associations(
self.default_domain_project_id, service_id2)
self._create_endpoint_and_associations(
self.default_domain_project_id)
# create project and endpoint association with default endpoint:
self.put(self.default_request_url)
# create an endpoint group that contains a different endpoint
body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
body['endpoint_group']['filters'] = {'service_id': service_id2}
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, body)
# associate endpoint group with project
self._create_endpoint_group_project_association(
endpoint_group_id, self.default_domain_project_id)
# Now get a list of the filtered endpoints
endpoints_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
'project_id': self.default_domain_project_id}
r = self.get(endpoints_url)
endpoints = self.assertValidEndpointListResponse(r)
self.assertEqual(2, len(endpoints))
# Ensure catalog includes the endpoints from endpoint_group project
# association, this is needed when a project scoped token is issued
# and "endpoint_filter.sql" backend driver is in place.
user_id = uuid.uuid4().hex
catalog_list = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertEqual(2, len(catalog_list))
# Now remove project endpoint group association
url = self._get_project_endpoint_group_url(
endpoint_group_id, self.default_domain_project_id)
self.delete(url)
# Now remove endpoint group
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.delete(url)
r = self.get(endpoints_url)
endpoints = self.assertValidEndpointListResponse(r)
self.assertEqual(1, len(endpoints))
catalog_list = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertEqual(1, len(catalog_list))
def test_endpoint_group_project_cleanup_with_project(self):
# create endpoint group
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
# create new project and associate with endpoint_group
project_ref = unit.new_project_ref(domain_id=self.domain_id)
r = self.post('/projects', body={'project': project_ref})
project = self.assertValidProjectResponse(r, project_ref)
url = self._get_project_endpoint_group_url(endpoint_group_id,
project['id'])
self.put(url)
# check that we can recover the project endpoint group association
self.get(url)
# Now delete the project and then try and retrieve the project
# endpoint group association again
self.delete('/projects/%(project_id)s' % {
'project_id': project['id']})
self.get(url, expected_status=http_client.NOT_FOUND)
def test_endpoint_group_project_cleanup_with_endpoint_group(self):
# create endpoint group
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
# create new project and associate with endpoint_group
project_ref = unit.new_project_ref(domain_id=self.domain_id)
r = self.post('/projects', body={'project': project_ref})
project = self.assertValidProjectResponse(r, project_ref)
url = self._get_project_endpoint_group_url(endpoint_group_id,
project['id'])
self.put(url)
# check that we can recover the project endpoint group association
self.get(url)
# now remove the project endpoint group association
self.delete(url)
self.get(url, expected_status=http_client.NOT_FOUND)
def test_removing_an_endpoint_group_project(self):
# create an endpoint group
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
# create an endpoint_group project
url = self._get_project_endpoint_group_url(
endpoint_group_id, self.default_domain_project_id)
self.put(url)
# remove the endpoint group project
self.delete(url)
self.get(url, expected_status=http_client.NOT_FOUND)
def test_remove_endpoint_group_with_project_association(self):
# create an endpoint group
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
# create an endpoint_group project
project_endpoint_group_url = self._get_project_endpoint_group_url(
endpoint_group_id, self.default_domain_project_id)
self.put(project_endpoint_group_url)
# remove endpoint group, the associated endpoint_group project will
# be removed as well.
endpoint_group_url = ('/OS-EP-FILTER/endpoint_groups/'
'%(endpoint_group_id)s'
% {'endpoint_group_id': endpoint_group_id})
self.delete(endpoint_group_url)
self.get(endpoint_group_url, expected_status=http_client.NOT_FOUND)
self.get(project_endpoint_group_url,
expected_status=http_client.NOT_FOUND)
@unit.skip_if_cache_disabled('catalog')
def test_add_endpoint_group_to_project_invalidates_catalog_cache(self):
# create another endpoint with 'admin' interface which matches
# 'filters' definition in endpoint group, then there should be two
# endpoints returned when retrieving v3 catalog if cache works as
# expected.
# this should be done at first since `create_endpoint` will also
# invalidate cache.
endpoint_id2 = uuid.uuid4().hex
endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
region_id=self.region_id,
interface='admin',
id=endpoint_id2)
self.catalog_api.create_endpoint(endpoint_id2, endpoint2)
# create a project and endpoint association.
self.put(self.default_request_url)
# there is only one endpoint associated with the default project.
user_id = uuid.uuid4().hex
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1))
# create an endpoint group.
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
# add the endpoint group to default project, bypassing
# catalog_api API manager.
self.catalog_api.driver.add_endpoint_group_to_project(
endpoint_group_id,
self.default_domain_project_id)
# can get back only one endpoint from the cache, since the catalog
# is pulled out from cache.
invalid_catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertThat(invalid_catalog[0]['endpoints'],
matchers.HasLength(1))
self.assertEqual(catalog, invalid_catalog)
# remove the endpoint group from default project, and add it again via
# catalog_api API manager.
self.catalog_api.driver.remove_endpoint_group_from_project(
endpoint_group_id,
self.default_domain_project_id)
# add the endpoint group to default project.
self.catalog_api.add_endpoint_group_to_project(
endpoint_group_id,
self.default_domain_project_id)
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
# now, it will return 2 endpoints since the cache has been
# invalidated.
self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2))
ep_id_list = [catalog[0]['endpoints'][0]['id'],
catalog[0]['endpoints'][1]['id']]
self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
@unit.skip_if_cache_disabled('catalog')
def test_remove_endpoint_group_from_project_invalidates_cache(self):
# create another endpoint with 'admin' interface which matches
# 'filters' definition in endpoint group, then there should be two
# endpoints returned when retrieving v3 catalog. But only one
# endpoint will return after the endpoint group's deletion if cache
# works as expected.
# this should be done at first since `create_endpoint` will also
# invalidate cache.
endpoint_id2 = uuid.uuid4().hex
endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
region_id=self.region_id,
interface='admin',
id=endpoint_id2)
self.catalog_api.create_endpoint(endpoint_id2, endpoint2)
# create project and endpoint association.
self.put(self.default_request_url)
# create an endpoint group.
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
# add the endpoint group to default project.
self.catalog_api.add_endpoint_group_to_project(
endpoint_group_id,
self.default_domain_project_id)
# should get back two endpoints, one from endpoint project
# association, the other one is from endpoint_group project
# association.
user_id = uuid.uuid4().hex
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2))
ep_id_list = [catalog[0]['endpoints'][0]['id'],
catalog[0]['endpoints'][1]['id']]
self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
# remove endpoint_group project association, bypassing
# catalog_api API manager.
self.catalog_api.driver.remove_endpoint_group_from_project(
endpoint_group_id,
self.default_domain_project_id)
# still get back two endpoints, since the catalog is pulled out
# from cache and the cache haven't been invalidated.
invalid_catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertThat(invalid_catalog[0]['endpoints'],
matchers.HasLength(2))
self.assertEqual(catalog, invalid_catalog)
# add back the endpoint_group project association and remove it from
# manager.
self.catalog_api.driver.add_endpoint_group_to_project(
endpoint_group_id,
self.default_domain_project_id)
self.catalog_api.remove_endpoint_group_from_project(
endpoint_group_id,
self.default_domain_project_id)
# should only get back one endpoint since the cache has been
# invalidated after the endpoint_group project association was
# removed.
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1))
self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id'])
def _create_valid_endpoint_group(self, url, body):
r = self.post(url, body=body)
return r.result['endpoint_group']['id']
def _create_endpoint_group_project_association(self,
endpoint_group_id,
project_id):
url = self._get_project_endpoint_group_url(endpoint_group_id,
project_id)
self.put(url)
def _get_project_endpoint_group_url(self,
endpoint_group_id,
project_id):
return ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s'
'/projects/%(project_id)s' %
{'endpoint_group_id': endpoint_group_id,
'project_id': project_id})
def _create_endpoint_and_associations(self, project_id, service_id=None):
"""Creates an endpoint associated with service and project."""
if not service_id:
# create a new service
service_ref = unit.new_service_ref()
response = self.post(
'/services', body={'service': service_ref})
service_id = response.result['service']['id']
# create endpoint
endpoint_ref = unit.new_endpoint_ref(service_id=service_id,
interface='public',
region_id=self.region_id)
response = self.post('/endpoints', body={'endpoint': endpoint_ref})
endpoint = response.result['endpoint']
# now add endpoint to project
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': endpoint['id']})
return endpoint
| 42.445402
| 79
| 0.627243
|
import copy
import uuid
import mock
from oslo_log import versionutils
from six.moves import http_client
from testtools import matchers
from keystone.contrib.endpoint_filter import routers
from keystone.tests import unit
from keystone.tests.unit import test_v3
class EndpointFilterTestCase(test_v3.RestfulTestCase):
def config_overrides(self):
super(EndpointFilterTestCase, self).config_overrides()
self.config_fixture.config(
group='catalog', driver='endpoint_filter.sql')
def setUp(self):
super(EndpointFilterTestCase, self).setUp()
self.default_request_url = (
'/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': self.endpoint_id})
class EndpointFilterDeprecateTestCase(test_v3.RestfulTestCase):
@mock.patch.object(versionutils, 'report_deprecated_feature')
def test_exception_happens(self, mock_deprecator):
routers.EndpointFilterExtension(mock.ANY)
mock_deprecator.assert_called_once_with(mock.ANY, mock.ANY)
args, _kwargs = mock_deprecator.call_args
self.assertIn("Remove endpoint_filter_extension from", args[1])
class EndpointFilterCRUDTestCase(EndpointFilterTestCase):
def test_create_endpoint_project_association(self):
self.put(self.default_request_url)
def test_create_endpoint_project_association_with_invalid_project(self):
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': uuid.uuid4().hex,
'endpoint_id': self.endpoint_id},
expected_status=http_client.NOT_FOUND)
def test_create_endpoint_project_association_with_invalid_endpoint(self):
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': uuid.uuid4().hex},
expected_status=http_client.NOT_FOUND)
def test_create_endpoint_project_association_with_unexpected_body(self):
self.put(self.default_request_url,
body={'project_id': self.default_domain_project_id})
def test_check_endpoint_project_association(self):
self.put(self.default_request_url)
self.head('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': self.endpoint_id})
def test_check_endpoint_project_association_with_invalid_project(self):
self.put(self.default_request_url)
self.head('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': uuid.uuid4().hex,
'endpoint_id': self.endpoint_id},
expected_status=http_client.NOT_FOUND)
def test_check_endpoint_project_association_with_invalid_endpoint(self):
self.put(self.default_request_url)
self.head('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': uuid.uuid4().hex},
expected_status=http_client.NOT_FOUND)
def test_list_endpoints_associated_with_valid_project(self):
self.put(self.default_request_url)
resource_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
'project_id': self.default_domain_project_id}
r = self.get(resource_url)
self.assertValidEndpointListResponse(r, self.endpoint,
resource_url=resource_url)
def test_list_endpoints_associated_with_invalid_project(self):
self.put(self.default_request_url)
self.get('/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
'project_id': uuid.uuid4().hex},
expected_status=http_client.NOT_FOUND)
def test_list_projects_associated_with_endpoint(self):
self.put(self.default_request_url)
resource_url = '/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' % {
'endpoint_id': self.endpoint_id}
r = self.get(resource_url)
self.assertValidProjectListResponse(r, self.default_domain_project,
resource_url=resource_url)
def test_list_projects_with_no_endpoint_project_association(self):
r = self.get('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' %
{'endpoint_id': self.endpoint_id})
self.assertValidProjectListResponse(r, expected_length=0)
def test_list_projects_associated_with_invalid_endpoint(self):
self.get('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' %
{'endpoint_id': uuid.uuid4().hex},
expected_status=http_client.NOT_FOUND)
def test_remove_endpoint_project_association(self):
self.put(self.default_request_url)
self.delete('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': self.endpoint_id})
def test_remove_endpoint_project_association_with_invalid_project(self):
self.put(self.default_request_url)
self.delete('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': uuid.uuid4().hex,
'endpoint_id': self.endpoint_id},
expected_status=http_client.NOT_FOUND)
def test_remove_endpoint_project_association_with_invalid_endpoint(self):
self.put(self.default_request_url)
self.delete('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': uuid.uuid4().hex},
expected_status=http_client.NOT_FOUND)
def test_endpoint_project_association_cleanup_when_project_deleted(self):
self.put(self.default_request_url)
association_url = ('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' %
{'endpoint_id': self.endpoint_id})
r = self.get(association_url)
self.assertValidProjectListResponse(r, expected_length=1)
self.delete('/projects/%(project_id)s' % {
'project_id': self.default_domain_project_id})
r = self.get(association_url)
self.assertValidProjectListResponse(r, expected_length=0)
def test_endpoint_project_association_cleanup_when_endpoint_deleted(self):
self.put(self.default_request_url)
association_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
'project_id': self.default_domain_project_id}
r = self.get(association_url)
self.assertValidEndpointListResponse(r, expected_length=1)
self.delete('/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id})
r = self.get(association_url)
self.assertValidEndpointListResponse(r, expected_length=0)
@unit.skip_if_cache_disabled('catalog')
def test_create_endpoint_project_association_invalidates_cache(self):
endpoint_id2 = uuid.uuid4().hex
endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
region_id=self.region_id,
interface='public',
id=endpoint_id2)
self.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy())
self.put(self.default_request_url)
user_id = uuid.uuid4().hex
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertEqual(1, len(catalog[0]['endpoints']))
self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id'])
self.catalog_api.driver.add_endpoint_to_project(
endpoint_id2,
self.default_domain_project_id)
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertEqual(1, len(catalog[0]['endpoints']))
# remove the endpoint2 from the default project, and add it again via
# catalog_api API manager.
self.catalog_api.driver.remove_endpoint_from_project(
endpoint_id2,
self.default_domain_project_id)
# add second endpoint to default project, this can be done by calling
# the catalog_api API manager directly but call the REST API
# instead for consistency.
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': endpoint_id2})
# should get back two endpoints since the cache has been
# invalidated when the second endpoint was added to default project.
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertEqual(2, len(catalog[0]['endpoints']))
ep_id_list = [catalog[0]['endpoints'][0]['id'],
catalog[0]['endpoints'][1]['id']]
self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
@unit.skip_if_cache_disabled('catalog')
def test_remove_endpoint_from_project_invalidates_cache(self):
endpoint_id2 = uuid.uuid4().hex
endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
region_id=self.region_id,
interface='public',
id=endpoint_id2)
self.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy())
# create endpoint project association.
self.put(self.default_request_url)
# add second endpoint to default project.
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': endpoint_id2})
# should get back only one endpoint that was just created.
user_id = uuid.uuid4().hex
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
# there are two endpoints associated with the default project.
ep_id_list = [catalog[0]['endpoints'][0]['id'],
catalog[0]['endpoints'][1]['id']]
self.assertEqual(2, len(catalog[0]['endpoints']))
self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
# remove the endpoint2 from the default project, bypassing
# catalog_api API manager.
self.catalog_api.driver.remove_endpoint_from_project(
endpoint_id2,
self.default_domain_project_id)
# but, we can just still get back two endpoints from the cache,
# since the catalog is pulled out from cache and its haven't
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertEqual(2, len(catalog[0]['endpoints']))
self.catalog_api.driver.add_endpoint_to_project(
endpoint_id2,
self.default_domain_project_id)
self.delete('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
'endpoint_id': endpoint_id2})
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertEqual(1, len(catalog[0]['endpoints']))
self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id'])
class EndpointFilterTokenRequestTestCase(EndpointFilterTestCase):
def test_project_scoped_token_using_endpoint_filter(self):
ref = unit.new_project_ref(domain_id=self.domain_id)
r = self.post('/projects', body={'project': ref})
project = self.assertValidProjectResponse(r, ref)
self.put(
'/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
'user_id': self.user['id'],
'project_id': project['id'],
'role_id': self.role['id']})
body = {'user': {'default_project_id': project['id']}}
r = self.patch('/users/%(user_id)s' % {
'user_id': self.user['id']},
body=body)
self.assertValidUserResponse(r)
# add one endpoint to the project
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': project['id'],
'endpoint_id': self.endpoint_id})
# attempt to authenticate without requesting a project
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'])
r = self.post('/auth/tokens', body=auth_data)
self.assertValidProjectScopedTokenResponse(
r,
require_catalog=True,
endpoint_filter=True,
ep_filter_assoc=1)
self.assertEqual(project['id'], r.result['token']['project']['id'])
def test_default_scoped_token_using_endpoint_filter(self):
# add one endpoint to default project
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': self.endpoint_id})
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
r = self.post('/auth/tokens', body=auth_data)
self.assertValidProjectScopedTokenResponse(
r,
require_catalog=True,
endpoint_filter=True,
ep_filter_assoc=1)
self.assertEqual(self.project['id'],
r.result['token']['project']['id'])
# Ensure name of the service exists
self.assertIn('name', r.result['token']['catalog'][0])
# region and region_id should be the same in endpoints
endpoint = r.result['token']['catalog'][0]['endpoints'][0]
self.assertIn('region', endpoint)
self.assertIn('region_id', endpoint)
self.assertEqual(endpoint['region'], endpoint['region_id'])
def test_scoped_token_with_no_catalog_using_endpoint_filter(self):
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': self.endpoint_id})
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
r = self.post('/auth/tokens?nocatalog', body=auth_data)
self.assertValidProjectScopedTokenResponse(
r,
require_catalog=False)
self.assertEqual(self.project['id'],
r.result['token']['project']['id'])
def test_invalid_endpoint_project_association(self):
# add first endpoint to default project
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': self.endpoint_id})
# create a second temporary endpoint
endpoint_id2 = uuid.uuid4().hex
endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
region_id=self.region_id,
interface='public',
id=endpoint_id2)
self.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy())
# add second endpoint to default project
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': endpoint_id2})
# remove the temporary reference
# this will create inconsistency in the endpoint filter table
# which is fixed during the catalog creation for token request
self.catalog_api.delete_endpoint(endpoint_id2)
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
r = self.post('/auth/tokens', body=auth_data)
self.assertValidProjectScopedTokenResponse(
r,
require_catalog=True,
endpoint_filter=True,
ep_filter_assoc=1)
self.assertEqual(self.project['id'],
r.result['token']['project']['id'])
def test_disabled_endpoint(self):
# Add an enabled endpoint to the default project
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': self.endpoint_id})
# Add a disabled endpoint to the default project.
# Create a disabled endpoint that's like the enabled one.
disabled_endpoint_ref = copy.copy(self.endpoint)
disabled_endpoint_id = uuid.uuid4().hex
disabled_endpoint_ref.update({
'id': disabled_endpoint_id,
'enabled': False,
'interface': 'internal'
})
self.catalog_api.create_endpoint(disabled_endpoint_id,
disabled_endpoint_ref)
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': disabled_endpoint_id})
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
r = self.post('/auth/tokens', body=auth_data)
endpoints = r.result['token']['catalog'][0]['endpoints']
endpoint_ids = [ep['id'] for ep in endpoints]
self.assertEqual([self.endpoint_id], endpoint_ids)
def test_multiple_endpoint_project_associations(self):
def _create_an_endpoint():
endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id,
interface='public',
region_id=self.region_id)
r = self.post('/endpoints', body={'endpoint': endpoint_ref})
return r.result['endpoint']['id']
endpoint_id1 = _create_an_endpoint()
endpoint_id2 = _create_an_endpoint()
_create_an_endpoint()
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': endpoint_id1})
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': endpoint_id2})
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
r = self.post('/auth/tokens', body=auth_data)
self.assertValidProjectScopedTokenResponse(
r,
require_catalog=True,
endpoint_filter=True,
ep_filter_assoc=2)
def test_get_auth_catalog_using_endpoint_filter(self):
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': self.endpoint_id})
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
token_data = self.post('/auth/tokens', body=auth_data)
self.assertValidProjectScopedTokenResponse(
token_data,
require_catalog=True,
endpoint_filter=True,
ep_filter_assoc=1)
auth_catalog = self.get('/auth/catalog',
token=token_data.headers['X-Subject-Token'])
self.assertEqual(token_data.result['token']['catalog'],
auth_catalog.result['catalog'])
class JsonHomeTests(EndpointFilterTestCase, test_v3.JsonHomeTestMixin):
JSON_HOME_DATA = {
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
'1.0/rel/endpoint_projects': {
'href-template': '/OS-EP-FILTER/endpoints/{endpoint_id}/projects',
'href-vars': {
'endpoint_id':
'http://docs.openstack.org/api/openstack-identity/3/param/'
'endpoint_id',
},
},
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
'1.0/rel/endpoint_groups': {
'href': '/OS-EP-FILTER/endpoint_groups',
},
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
'1.0/rel/endpoint_group': {
'href-template': '/OS-EP-FILTER/endpoint_groups/'
'{endpoint_group_id}',
'href-vars': {
'endpoint_group_id':
'http://docs.openstack.org/api/openstack-identity/3/'
'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
},
},
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
'1.0/rel/endpoint_group_to_project_association': {
'href-template': '/OS-EP-FILTER/endpoint_groups/'
'{endpoint_group_id}/projects/{project_id}',
'href-vars': {
'project_id':
'http://docs.openstack.org/api/openstack-identity/3/param/'
'project_id',
'endpoint_group_id':
'http://docs.openstack.org/api/openstack-identity/3/'
'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
},
},
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
'1.0/rel/projects_associated_with_endpoint_group': {
'href-template': '/OS-EP-FILTER/endpoint_groups/'
'{endpoint_group_id}/projects',
'href-vars': {
'endpoint_group_id':
'http://docs.openstack.org/api/openstack-identity/3/'
'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
},
},
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
'1.0/rel/endpoints_in_endpoint_group': {
'href-template': '/OS-EP-FILTER/endpoint_groups/'
'{endpoint_group_id}/endpoints',
'href-vars': {
'endpoint_group_id':
'http://docs.openstack.org/api/openstack-identity/3/'
'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
},
},
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
'1.0/rel/project_endpoint_groups': {
'href-template': '/OS-EP-FILTER/projects/{project_id}/'
'endpoint_groups',
'href-vars': {
'project_id':
'http://docs.openstack.org/api/openstack-identity/3/param/'
'project_id',
},
},
}
class EndpointGroupCRUDTestCase(EndpointFilterTestCase):
DEFAULT_ENDPOINT_GROUP_BODY = {
'endpoint_group': {
'description': 'endpoint group description',
'filters': {
'interface': 'admin'
},
'name': 'endpoint_group_name'
}
}
DEFAULT_ENDPOINT_GROUP_URL = '/OS-EP-FILTER/endpoint_groups'
def test_create_endpoint_group(self):
r = self.post(self.DEFAULT_ENDPOINT_GROUP_URL,
body=self.DEFAULT_ENDPOINT_GROUP_BODY)
expected_filters = (self.DEFAULT_ENDPOINT_GROUP_BODY
['endpoint_group']['filters'])
expected_name = (self.DEFAULT_ENDPOINT_GROUP_BODY
['endpoint_group']['name'])
self.assertEqual(expected_filters,
r.result['endpoint_group']['filters'])
self.assertEqual(expected_name, r.result['endpoint_group']['name'])
self.assertThat(
r.result['endpoint_group']['links']['self'],
matchers.EndsWith(
'/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': r.result['endpoint_group']['id']}))
def test_create_invalid_endpoint_group(self):
invalid_body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
invalid_body['endpoint_group']['filters'] = {'foobar': 'admin'}
self.post(self.DEFAULT_ENDPOINT_GROUP_URL,
body=invalid_body,
expected_status=http_client.BAD_REQUEST)
def test_get_endpoint_group(self):
response = self.post(self.DEFAULT_ENDPOINT_GROUP_URL,
body=self.DEFAULT_ENDPOINT_GROUP_BODY)
endpoint_group_id = response.result['endpoint_group']['id']
endpoint_group_filters = response.result['endpoint_group']['filters']
endpoint_group_name = response.result['endpoint_group']['name']
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.get(url)
self.assertEqual(endpoint_group_id,
response.result['endpoint_group']['id'])
self.assertEqual(endpoint_group_filters,
response.result['endpoint_group']['filters'])
self.assertEqual(endpoint_group_name,
response.result['endpoint_group']['name'])
self.assertThat(response.result['endpoint_group']['links']['self'],
matchers.EndsWith(url))
def test_get_invalid_endpoint_group(self):
endpoint_group_id = 'foobar'
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.get(url, expected_status=http_client.NOT_FOUND)
def test_check_endpoint_group(self):
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.head(url, expected_status=http_client.OK)
def test_check_invalid_endpoint_group(self):
endpoint_group_id = 'foobar'
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.head(url, expected_status=http_client.NOT_FOUND)
def test_patch_endpoint_group(self):
body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
body['endpoint_group']['filters'] = {'region_id': 'UK'}
body['endpoint_group']['name'] = 'patch_test'
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
r = self.patch(url, body=body)
self.assertEqual(endpoint_group_id,
r.result['endpoint_group']['id'])
self.assertEqual(body['endpoint_group']['filters'],
r.result['endpoint_group']['filters'])
self.assertThat(r.result['endpoint_group']['links']['self'],
matchers.EndsWith(url))
def test_patch_nonexistent_endpoint_group(self):
body = {
'endpoint_group': {
'name': 'patch_test'
}
}
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': 'ABC'}
self.patch(url, body=body, expected_status=http_client.NOT_FOUND)
def test_patch_invalid_endpoint_group(self):
body = {
'endpoint_group': {
'description': 'endpoint group description',
'filters': {
'region': 'UK'
},
'name': 'patch_test'
}
}
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.patch(url, body=body, expected_status=http_client.BAD_REQUEST)
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
r = self.get(url)
del r.result['endpoint_group']['id']
del r.result['endpoint_group']['links']
self.assertDictEqual(self.DEFAULT_ENDPOINT_GROUP_BODY, r.result)
def test_delete_endpoint_group(self):
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.delete(url)
self.get(url, expected_status=http_client.NOT_FOUND)
def test_delete_invalid_endpoint_group(self):
endpoint_group_id = 'foobar'
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.delete(url, expected_status=http_client.NOT_FOUND)
def test_add_endpoint_group_to_project(self):
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
self._create_endpoint_group_project_association(endpoint_group_id,
self.project_id)
def test_add_endpoint_group_to_project_with_invalid_project_id(self):
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
project_id = uuid.uuid4().hex
url = self._get_project_endpoint_group_url(
endpoint_group_id, project_id)
self.put(url, expected_status=http_client.NOT_FOUND)
def test_get_endpoint_group_in_project(self):
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
url = self._get_project_endpoint_group_url(
endpoint_group_id, self.project_id)
self.put(url)
response = self.get(url)
self.assertEqual(
endpoint_group_id,
response.result['project_endpoint_group']['endpoint_group_id'])
self.assertEqual(
self.project_id,
response.result['project_endpoint_group']['project_id'])
def test_get_invalid_endpoint_group_in_project(self):
endpoint_group_id = uuid.uuid4().hex
project_id = uuid.uuid4().hex
url = self._get_project_endpoint_group_url(
endpoint_group_id, project_id)
self.get(url, expected_status=http_client.NOT_FOUND)
def test_list_endpoint_groups_in_project(self):
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
url = self._get_project_endpoint_group_url(
endpoint_group_id, self.project_id)
self.put(url)
url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' %
{'project_id': self.project_id})
response = self.get(url)
self.assertEqual(
endpoint_group_id,
response.result['endpoint_groups'][0]['id'])
def test_list_endpoint_groups_in_invalid_project(self):
project_id = uuid.uuid4().hex
url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' %
{'project_id': project_id})
self.get(url, expected_status=http_client.NOT_FOUND)
def test_empty_endpoint_groups_in_project(self):
url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' %
{'project_id': self.project_id})
response = self.get(url)
self.assertEqual(0, len(response.result['endpoint_groups']))
def test_check_endpoint_group_to_project(self):
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
self._create_endpoint_group_project_association(endpoint_group_id,
self.project_id)
url = self._get_project_endpoint_group_url(
endpoint_group_id, self.project_id)
self.head(url, expected_status=http_client.OK)
def test_check_endpoint_group_to_project_with_invalid_project_id(self):
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
url = self._get_project_endpoint_group_url(
endpoint_group_id, self.project_id)
self.put(url)
project_id = uuid.uuid4().hex
url = self._get_project_endpoint_group_url(
endpoint_group_id, project_id)
self.head(url, expected_status=http_client.NOT_FOUND)
def test_list_endpoint_groups(self):
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
url = '/OS-EP-FILTER/endpoint_groups'
r = self.get(url)
self.assertNotEmpty(r.result['endpoint_groups'])
self.assertEqual(endpoint_group_id,
r.result['endpoint_groups'][0].get('id'))
def test_list_projects_associated_with_endpoint_group(self):
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
self._create_endpoint_group_project_association(endpoint_group_id,
self.project_id)
url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s'
'/projects' %
{'endpoint_group_id': endpoint_group_id})
self.get(url)
def test_list_endpoints_associated_with_endpoint_group(self):
service_ref = unit.new_service_ref()
response = self.post(
'/services',
body={'service': service_ref})
service_id = response.result['service']['id']
endpoint_ref = unit.new_endpoint_ref(service_id=service_id,
interface='public',
region_id=self.region_id)
response = self.post('/endpoints', body={'endpoint': endpoint_ref})
endpoint_id = response.result['endpoint']['id']
body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
body['endpoint_group']['filters'] = {'service_id': service_id}
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, body)
self._create_endpoint_group_project_association(endpoint_group_id,
self.project_id)
url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s'
'/endpoints' % {'endpoint_group_id': endpoint_group_id})
r = self.get(url)
self.assertNotEmpty(r.result['endpoints'])
self.assertEqual(endpoint_id, r.result['endpoints'][0].get('id'))
def test_list_endpoints_associated_with_project_endpoint_group(self):
service_ref = unit.new_service_ref()
response = self.post('/services', body={'service': service_ref})
service_id2 = response.result['service']['id']
self._create_endpoint_and_associations(
self.default_domain_project_id, service_id2)
self._create_endpoint_and_associations(
self.default_domain_project_id)
self.put(self.default_request_url)
body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
body['endpoint_group']['filters'] = {'service_id': service_id2}
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, body)
self._create_endpoint_group_project_association(
endpoint_group_id, self.default_domain_project_id)
endpoints_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
'project_id': self.default_domain_project_id}
r = self.get(endpoints_url)
endpoints = self.assertValidEndpointListResponse(r)
self.assertEqual(2, len(endpoints))
user_id = uuid.uuid4().hex
catalog_list = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertEqual(2, len(catalog_list))
url = self._get_project_endpoint_group_url(
endpoint_group_id, self.default_domain_project_id)
self.delete(url)
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
self.delete(url)
r = self.get(endpoints_url)
endpoints = self.assertValidEndpointListResponse(r)
self.assertEqual(1, len(endpoints))
catalog_list = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertEqual(1, len(catalog_list))
def test_endpoint_group_project_cleanup_with_project(self):
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
project_ref = unit.new_project_ref(domain_id=self.domain_id)
r = self.post('/projects', body={'project': project_ref})
project = self.assertValidProjectResponse(r, project_ref)
url = self._get_project_endpoint_group_url(endpoint_group_id,
project['id'])
self.put(url)
self.get(url)
self.delete('/projects/%(project_id)s' % {
'project_id': project['id']})
self.get(url, expected_status=http_client.NOT_FOUND)
def test_endpoint_group_project_cleanup_with_endpoint_group(self):
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
project_ref = unit.new_project_ref(domain_id=self.domain_id)
r = self.post('/projects', body={'project': project_ref})
project = self.assertValidProjectResponse(r, project_ref)
url = self._get_project_endpoint_group_url(endpoint_group_id,
project['id'])
self.put(url)
self.get(url)
self.delete(url)
self.get(url, expected_status=http_client.NOT_FOUND)
def test_removing_an_endpoint_group_project(self):
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
url = self._get_project_endpoint_group_url(
endpoint_group_id, self.default_domain_project_id)
self.put(url)
self.delete(url)
self.get(url, expected_status=http_client.NOT_FOUND)
def test_remove_endpoint_group_with_project_association(self):
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
project_endpoint_group_url = self._get_project_endpoint_group_url(
endpoint_group_id, self.default_domain_project_id)
self.put(project_endpoint_group_url)
endpoint_group_url = ('/OS-EP-FILTER/endpoint_groups/'
'%(endpoint_group_id)s'
% {'endpoint_group_id': endpoint_group_id})
self.delete(endpoint_group_url)
self.get(endpoint_group_url, expected_status=http_client.NOT_FOUND)
self.get(project_endpoint_group_url,
expected_status=http_client.NOT_FOUND)
@unit.skip_if_cache_disabled('catalog')
def test_add_endpoint_group_to_project_invalidates_catalog_cache(self):
endpoint_id2 = uuid.uuid4().hex
endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
region_id=self.region_id,
interface='admin',
id=endpoint_id2)
self.catalog_api.create_endpoint(endpoint_id2, endpoint2)
self.put(self.default_request_url)
user_id = uuid.uuid4().hex
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1))
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
self.catalog_api.driver.add_endpoint_group_to_project(
endpoint_group_id,
self.default_domain_project_id)
invalid_catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertThat(invalid_catalog[0]['endpoints'],
matchers.HasLength(1))
self.assertEqual(catalog, invalid_catalog)
self.catalog_api.driver.remove_endpoint_group_from_project(
endpoint_group_id,
self.default_domain_project_id)
self.catalog_api.add_endpoint_group_to_project(
endpoint_group_id,
self.default_domain_project_id)
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2))
ep_id_list = [catalog[0]['endpoints'][0]['id'],
catalog[0]['endpoints'][1]['id']]
self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
@unit.skip_if_cache_disabled('catalog')
def test_remove_endpoint_group_from_project_invalidates_cache(self):
# works as expected.
# this should be done at first since `create_endpoint` will also
# invalidate cache.
endpoint_id2 = uuid.uuid4().hex
endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
region_id=self.region_id,
interface='admin',
id=endpoint_id2)
self.catalog_api.create_endpoint(endpoint_id2, endpoint2)
# create project and endpoint association.
self.put(self.default_request_url)
# create an endpoint group.
endpoint_group_id = self._create_valid_endpoint_group(
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
# add the endpoint group to default project.
self.catalog_api.add_endpoint_group_to_project(
endpoint_group_id,
self.default_domain_project_id)
# should get back two endpoints, one from endpoint project
# association, the other one is from endpoint_group project
# association.
user_id = uuid.uuid4().hex
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2))
ep_id_list = [catalog[0]['endpoints'][0]['id'],
catalog[0]['endpoints'][1]['id']]
self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
# remove endpoint_group project association, bypassing
# catalog_api API manager.
self.catalog_api.driver.remove_endpoint_group_from_project(
endpoint_group_id,
self.default_domain_project_id)
# still get back two endpoints, since the catalog is pulled out
# from cache and the cache haven't been invalidated.
invalid_catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertThat(invalid_catalog[0]['endpoints'],
matchers.HasLength(2))
self.assertEqual(catalog, invalid_catalog)
self.catalog_api.driver.add_endpoint_group_to_project(
endpoint_group_id,
self.default_domain_project_id)
self.catalog_api.remove_endpoint_group_from_project(
endpoint_group_id,
self.default_domain_project_id)
catalog = self.catalog_api.get_v3_catalog(
user_id,
self.default_domain_project_id)
self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1))
self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id'])
def _create_valid_endpoint_group(self, url, body):
r = self.post(url, body=body)
return r.result['endpoint_group']['id']
def _create_endpoint_group_project_association(self,
endpoint_group_id,
project_id):
url = self._get_project_endpoint_group_url(endpoint_group_id,
project_id)
self.put(url)
def _get_project_endpoint_group_url(self,
endpoint_group_id,
project_id):
return ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s'
'/projects/%(project_id)s' %
{'endpoint_group_id': endpoint_group_id,
'project_id': project_id})
def _create_endpoint_and_associations(self, project_id, service_id=None):
if not service_id:
service_ref = unit.new_service_ref()
response = self.post(
'/services', body={'service': service_ref})
service_id = response.result['service']['id']
endpoint_ref = unit.new_endpoint_ref(service_id=service_id,
interface='public',
region_id=self.region_id)
response = self.post('/endpoints', body={'endpoint': endpoint_ref})
endpoint = response.result['endpoint']
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
'endpoint_id': endpoint['id']})
return endpoint
| true
| true
|
790658778885073a7b7f1ed07c9ef5cb057d811e
| 7,924
|
py
|
Python
|
py_scripts/preprocessing/prep_shard.py
|
usc-isi-i2/dig-text-similarity-search
|
88109725a44837c9c5f3ae15c630bd8c5901f1ea
|
[
"MIT"
] | 7
|
2018-08-28T22:02:34.000Z
|
2020-07-21T07:44:19.000Z
|
py_scripts/preprocessing/prep_shard.py
|
usc-isi-i2/dig-text-similarity-search
|
88109725a44837c9c5f3ae15c630bd8c5901f1ea
|
[
"MIT"
] | 11
|
2018-08-23T19:25:27.000Z
|
2019-03-01T20:34:04.000Z
|
py_scripts/preprocessing/prep_shard.py
|
usc-isi-i2/dig-text-similarity-search
|
88109725a44837c9c5f3ae15c630bd8c5901f1ea
|
[
"MIT"
] | 3
|
2020-02-18T13:24:02.000Z
|
2021-12-25T22:17:25.000Z
|
# <editor-fold desc="Basic Imports">
import os
import os.path as p
import requests
from time import time
from argparse import ArgumentParser
import sys
sys.path.append(p.join(p.dirname(__file__), '..'))
sys.path.append(p.join(p.dirname(__file__), '../..'))
# </editor-fold>
# <editor-fold desc="Parse Command Line Args">
prog_file_path = p.join(p.dirname(__file__), 'progress.txt')
relative_base_path = '../../base_indexes/USE_lite_base_IVF16K.index'
base_index_path = p.abspath(p.join(p.dirname(__file__), relative_base_path))
arp = ArgumentParser(description='Vectorize Sentences for Searchable Index.')
arp.add_argument('input_dir', help='Path to raw news dir.')
arp.add_argument('output_dir', help='Path to saved index dir.')
arp.add_argument('-p', '--progress_file', default=prog_file_path,
help='For keeping track of news that has been preprocessed. '
'Default: dig-text-similarity-search/progress.txt')
arp.add_argument('-b', '--base_index_path', default=base_index_path,
help='Path to pre-trained empty faiss index. '
'Default: dig-text-similarity-search/base_indexes/*.index')
arp.add_argument('-l', '--large', action='store_true',
help='Toggle large Universal Sentence Encoder (Transformer NN).')
arp.add_argument('-m', '--m_per_batch', type=int, default=512*128,
help='Sentences per batch.')
arp.add_argument('-n', '--n_per_minibatch', type=int, default=64,
help='Sentences per mini-batch.')
arp.add_argument('-v', '--verbose', action='store_true',
help='Shows progress of batch vectorization.')
arp.add_argument('-t', '--num_threads', default='2',
help='Set CPU thread budget for numpy.')
arp.add_argument('-d', '--no_delete', action='store_false', default=True,
help='Keeps faiss indexes for each batch after merging on-disk.')
arp.add_argument('-a', '--add_shard', action='store_true',
help='Adds shard to running similarity server.')
arp.add_argument('-u', '--url', default='http://localhost:5954/faiss',
help='Port handling similarity server.')
arp.add_argument('-T', '--TF_logging', action='store_false', default=True,
help='Increase verbosity of TensorFlow.')
opts = arp.parse_args()
# </editor-fold>
if opts.num_threads:
print(f'\nRestricting numpy to {opts.num_threads} thread(s)\n')
os.environ['OPENBLAS_NUM_THREADS'] = opts.num_threads
os.environ['NUMEXPR_NUM_THREADS'] = opts.num_threads
os.environ['MKL_NUM_THREADS'] = opts.num_threads
os.environ['OMP_NUM_THREADS'] = opts.num_threads
from dt_sim.data_reader.jl_io_funcs import check_all_docs, get_all_docs
from dt_sim.data_reader.misc_io_funcs import check_unique, clear_dir
from dt_sim.vectorizer.sentence_vectorizer import SentenceVectorizer
from dt_sim.indexer.index_builder import OnDiskIVFBuilder
from dt_sim.processor.corpus_processor import CorpusProcessor
# Suppress TF logging
if opts.TF_logging:
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
# Init
sv = SentenceVectorizer(large=opts.large)
idx_bdr = OnDiskIVFBuilder(path_to_base_index=opts.base_index_path)
cp = CorpusProcessor(vectorizer=sv, index_builder=idx_bdr,
progress_file=opts.progress_file)
# Track progress
prepped_news = cp.track_preprocessing(cp.progress_file, verbose=opts.verbose)
raw_news = cp.get_news_paths(opts.input_dir, verbose=opts.verbose)
candidates = cp.candidate_files(prepped_news, raw_news, verbose=opts.verbose)
file_to_process = candidates[:1] # Preprocesses one news.jl per call
def main(raw_jl, output_dir: str = opts.output_dir,
m_per_batch: int = opts.m_per_batch, n_per_minibatch: int = opts.n_per_minibatch,
no_delete: bool = opts.no_delete, verbose: bool = opts.verbose,
add_shard: bool = opts.add_shard, url: str = opts.url):
subidx_dir, shard_date = cp.init_paths(raw_jl)
if verbose:
print(f'Will process: {raw_jl}\n')
# Check File Content
if verbose:
print(f'\nReading file: {raw_jl}')
jl_stats = check_all_docs(raw_jl, batch_size=m_per_batch)
(doc_count, line_count, junk, n_batches) = jl_stats
if verbose:
print(f'* Found {doc_count} good documents with {line_count} total sentences\n'
f'* Will skip {junk} junk documents\n'
f'* Processing {n_batches} batches\n')
# Preprocess
t_start = time()
doc_batch_gen = get_all_docs(raw_jl, batch_size=m_per_batch)
for i, (batched_sents, batched_ids) in enumerate(doc_batch_gen):
t_0 = time()
if verbose:
print(f' Starting doc batch: {i+1:3d}')
subidx = str(raw_jl.split('/')[-1]).replace('.jl', f'_{i:03d}_sub.index')
subidx_path = p.join(subidx_dir, subidx)
if p.exists(subidx_path):
print(f' File exists: {subidx_path} \n Skipping... ')
cp.index_builder.include_subidx_path(subidx_path)
else:
# Vectorize
emb_batch, id_batch = cp.batch_vectorize(
text_batch=batched_sents, id_batch=batched_ids,
n_minibatch=n_per_minibatch, very_verbose=False
)
t_vect = time()
if verbose:
print(f' * Vectorized in {t_vect - t_0:6.2f}s')
# Make faiss subindex
subidx_path = check_unique(subidx_path)
cp.index_builder.generate_subindex(subidx_path, emb_batch, id_batch)
t_subidx = time()
if verbose:
print(f' * Subindexed in {t_subidx - t_vect:6.2f}s')
# Clear graph
del emb_batch, batched_sents, id_batch
cp.vectorizer.close_session()
t_reset = time()
if verbose:
print(f' * Cleared TF in {t_reset - t_subidx:6.2f}s')
# Restart TF session if necessary
if i < n_batches - 1:
cp.vectorizer.start_session()
if verbose:
print(f' * Started TF in {time() - t_reset:6.2f}s')
if verbose:
mp, sp = divmod(time() - t_start, 60)
print(f' Completed doc batch: {i+1:3d}/{n_batches} '
f' Total time passed: {int(mp):3d}m{sp:0.2f}s\n')
# Merge
# TODO: Title indexes
t_merge = time()
merged_index_path = shard_date + '_all.index'
merged_index_path = p.join(output_dir, merged_index_path)
merged_index_path = check_unique(merged_index_path)
merged_ivfdata_path = shard_date + '_all.ivfdata'
merged_ivfdata_path = p.join(output_dir, merged_ivfdata_path)
merged_ivfdata_path = check_unique(merged_ivfdata_path)
if verbose:
print(f'\n Merging {merged_index_path.split("/")[-1]} on-disk')
assert cp.index_builder.index_path_clear(merged_index_path)
assert cp.index_builder.index_path_clear(merged_ivfdata_path, '.ivfdata')
n_vect = cp.index_builder.merge_IVFs(index_path=merged_index_path,
ivfdata_path=merged_ivfdata_path)
if verbose:
mm, sm = divmod(time() - t_merge, 60)
print(f' Merged subindexes ({n_vect} vectors) in: {int(mm):3d}m{sm:0.2f}s')
# Record progress
cp.record_progress(raw_jl)
# Clear sub.index files after merge
if no_delete:
clear_dir(subidx_dir)
if verbose:
print('\n Cleared sub.index files')
if add_shard:
try:
url = url
payload = {'path': merged_index_path}
r = requests.put(url, params=payload)
print(r.text)
except Exception as e:
print(f'Shard was not added because an exception occurred: {e}')
if __name__ == '__main__':
if len(file_to_process):
jl = file_to_process[0]
main(raw_jl=jl)
else:
print('Nothing to process.')
| 40.428571
| 90
| 0.652196
|
import os
import os.path as p
import requests
from time import time
from argparse import ArgumentParser
import sys
sys.path.append(p.join(p.dirname(__file__), '..'))
sys.path.append(p.join(p.dirname(__file__), '../..'))
prog_file_path = p.join(p.dirname(__file__), 'progress.txt')
relative_base_path = '../../base_indexes/USE_lite_base_IVF16K.index'
base_index_path = p.abspath(p.join(p.dirname(__file__), relative_base_path))
arp = ArgumentParser(description='Vectorize Sentences for Searchable Index.')
arp.add_argument('input_dir', help='Path to raw news dir.')
arp.add_argument('output_dir', help='Path to saved index dir.')
arp.add_argument('-p', '--progress_file', default=prog_file_path,
help='For keeping track of news that has been preprocessed. '
'Default: dig-text-similarity-search/progress.txt')
arp.add_argument('-b', '--base_index_path', default=base_index_path,
help='Path to pre-trained empty faiss index. '
'Default: dig-text-similarity-search/base_indexes/*.index')
arp.add_argument('-l', '--large', action='store_true',
help='Toggle large Universal Sentence Encoder (Transformer NN).')
arp.add_argument('-m', '--m_per_batch', type=int, default=512*128,
help='Sentences per batch.')
arp.add_argument('-n', '--n_per_minibatch', type=int, default=64,
help='Sentences per mini-batch.')
arp.add_argument('-v', '--verbose', action='store_true',
help='Shows progress of batch vectorization.')
arp.add_argument('-t', '--num_threads', default='2',
help='Set CPU thread budget for numpy.')
arp.add_argument('-d', '--no_delete', action='store_false', default=True,
help='Keeps faiss indexes for each batch after merging on-disk.')
arp.add_argument('-a', '--add_shard', action='store_true',
help='Adds shard to running similarity server.')
arp.add_argument('-u', '--url', default='http://localhost:5954/faiss',
help='Port handling similarity server.')
arp.add_argument('-T', '--TF_logging', action='store_false', default=True,
help='Increase verbosity of TensorFlow.')
opts = arp.parse_args()
if opts.num_threads:
print(f'\nRestricting numpy to {opts.num_threads} thread(s)\n')
os.environ['OPENBLAS_NUM_THREADS'] = opts.num_threads
os.environ['NUMEXPR_NUM_THREADS'] = opts.num_threads
os.environ['MKL_NUM_THREADS'] = opts.num_threads
os.environ['OMP_NUM_THREADS'] = opts.num_threads
from dt_sim.data_reader.jl_io_funcs import check_all_docs, get_all_docs
from dt_sim.data_reader.misc_io_funcs import check_unique, clear_dir
from dt_sim.vectorizer.sentence_vectorizer import SentenceVectorizer
from dt_sim.indexer.index_builder import OnDiskIVFBuilder
from dt_sim.processor.corpus_processor import CorpusProcessor
if opts.TF_logging:
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
sv = SentenceVectorizer(large=opts.large)
idx_bdr = OnDiskIVFBuilder(path_to_base_index=opts.base_index_path)
cp = CorpusProcessor(vectorizer=sv, index_builder=idx_bdr,
progress_file=opts.progress_file)
prepped_news = cp.track_preprocessing(cp.progress_file, verbose=opts.verbose)
raw_news = cp.get_news_paths(opts.input_dir, verbose=opts.verbose)
candidates = cp.candidate_files(prepped_news, raw_news, verbose=opts.verbose)
file_to_process = candidates[:1]
def main(raw_jl, output_dir: str = opts.output_dir,
m_per_batch: int = opts.m_per_batch, n_per_minibatch: int = opts.n_per_minibatch,
no_delete: bool = opts.no_delete, verbose: bool = opts.verbose,
add_shard: bool = opts.add_shard, url: str = opts.url):
subidx_dir, shard_date = cp.init_paths(raw_jl)
if verbose:
print(f'Will process: {raw_jl}\n')
if verbose:
print(f'\nReading file: {raw_jl}')
jl_stats = check_all_docs(raw_jl, batch_size=m_per_batch)
(doc_count, line_count, junk, n_batches) = jl_stats
if verbose:
print(f'* Found {doc_count} good documents with {line_count} total sentences\n'
f'* Will skip {junk} junk documents\n'
f'* Processing {n_batches} batches\n')
t_start = time()
doc_batch_gen = get_all_docs(raw_jl, batch_size=m_per_batch)
for i, (batched_sents, batched_ids) in enumerate(doc_batch_gen):
t_0 = time()
if verbose:
print(f' Starting doc batch: {i+1:3d}')
subidx = str(raw_jl.split('/')[-1]).replace('.jl', f'_{i:03d}_sub.index')
subidx_path = p.join(subidx_dir, subidx)
if p.exists(subidx_path):
print(f' File exists: {subidx_path} \n Skipping... ')
cp.index_builder.include_subidx_path(subidx_path)
else:
emb_batch, id_batch = cp.batch_vectorize(
text_batch=batched_sents, id_batch=batched_ids,
n_minibatch=n_per_minibatch, very_verbose=False
)
t_vect = time()
if verbose:
print(f' * Vectorized in {t_vect - t_0:6.2f}s')
subidx_path = check_unique(subidx_path)
cp.index_builder.generate_subindex(subidx_path, emb_batch, id_batch)
t_subidx = time()
if verbose:
print(f' * Subindexed in {t_subidx - t_vect:6.2f}s')
del emb_batch, batched_sents, id_batch
cp.vectorizer.close_session()
t_reset = time()
if verbose:
print(f' * Cleared TF in {t_reset - t_subidx:6.2f}s')
if i < n_batches - 1:
cp.vectorizer.start_session()
if verbose:
print(f' * Started TF in {time() - t_reset:6.2f}s')
if verbose:
mp, sp = divmod(time() - t_start, 60)
print(f' Completed doc batch: {i+1:3d}/{n_batches} '
f' Total time passed: {int(mp):3d}m{sp:0.2f}s\n')
t_merge = time()
merged_index_path = shard_date + '_all.index'
merged_index_path = p.join(output_dir, merged_index_path)
merged_index_path = check_unique(merged_index_path)
merged_ivfdata_path = shard_date + '_all.ivfdata'
merged_ivfdata_path = p.join(output_dir, merged_ivfdata_path)
merged_ivfdata_path = check_unique(merged_ivfdata_path)
if verbose:
print(f'\n Merging {merged_index_path.split("/")[-1]} on-disk')
assert cp.index_builder.index_path_clear(merged_index_path)
assert cp.index_builder.index_path_clear(merged_ivfdata_path, '.ivfdata')
n_vect = cp.index_builder.merge_IVFs(index_path=merged_index_path,
ivfdata_path=merged_ivfdata_path)
if verbose:
mm, sm = divmod(time() - t_merge, 60)
print(f' Merged subindexes ({n_vect} vectors) in: {int(mm):3d}m{sm:0.2f}s')
cp.record_progress(raw_jl)
if no_delete:
clear_dir(subidx_dir)
if verbose:
print('\n Cleared sub.index files')
if add_shard:
try:
url = url
payload = {'path': merged_index_path}
r = requests.put(url, params=payload)
print(r.text)
except Exception as e:
print(f'Shard was not added because an exception occurred: {e}')
if __name__ == '__main__':
if len(file_to_process):
jl = file_to_process[0]
main(raw_jl=jl)
else:
print('Nothing to process.')
| true
| true
|
7906590296d72f430ee23d3c9699c4e26c77d3ba
| 8,783
|
py
|
Python
|
elasticsearch/_sync/client/text_structure.py
|
rwcanand/elasticsearch-py
|
b090faa967afb6e426a0d6060e0c92fbf6dcbea0
|
[
"Apache-2.0"
] | null | null | null |
elasticsearch/_sync/client/text_structure.py
|
rwcanand/elasticsearch-py
|
b090faa967afb6e426a0d6060e0c92fbf6dcbea0
|
[
"Apache-2.0"
] | null | null | null |
elasticsearch/_sync/client/text_structure.py
|
rwcanand/elasticsearch-py
|
b090faa967afb6e426a0d6060e0c92fbf6dcbea0
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import typing as t
from elastic_transport import ObjectApiResponse
from ._base import NamespacedClient
from .utils import _rewrite_parameters
class TextStructureClient(NamespacedClient):
@_rewrite_parameters(
body_name="text_files",
)
def find_structure(
self,
*,
text_files: t.Union[t.List[t.Any], t.Tuple[t.Any, ...]],
charset: t.Optional[str] = None,
column_names: t.Optional[str] = None,
delimiter: t.Optional[str] = None,
explain: t.Optional[bool] = None,
format: t.Optional[str] = None,
grok_pattern: t.Optional[str] = None,
has_header_row: t.Optional[bool] = None,
line_merge_size_limit: t.Optional[int] = None,
lines_to_sample: t.Optional[int] = None,
quote: t.Optional[str] = None,
should_trim_fields: t.Optional[bool] = None,
timeout: t.Optional[t.Union[int, str]] = None,
timestamp_field: t.Optional[str] = None,
timestamp_format: t.Optional[str] = None,
) -> ObjectApiResponse[t.Any]:
"""
Finds the structure of a text file. The text file must contain data that is suitable
to be ingested into Elasticsearch.
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/find-structure.html>`_
:param text_files:
:param charset: The text’s character set. It must be a character set that is
supported by the JVM that Elasticsearch uses. For example, UTF-8, UTF-16LE,
windows-1252, or EUC-JP. If this parameter is not specified, the structure
finder chooses an appropriate character set.
:param column_names: If you have set format to delimited, you can specify the
column names in a comma-separated list. If this parameter is not specified,
the structure finder uses the column names from the header row of the text.
If the text does not have a header role, columns are named "column1", "column2",
"column3", etc.
:param delimiter: If you have set format to delimited, you can specify the character
used to delimit the values in each row. Only a single character is supported;
the delimiter cannot have multiple characters. By default, the API considers
the following possibilities: comma, tab, semi-colon, and pipe (|). In this
default scenario, all rows must have the same number of fields for the delimited
format to be detected. If you specify a delimiter, up to 10% of the rows
can have a different number of columns than the first row.
:param explain: If this parameter is set to true, the response includes a field
named explanation, which is an array of strings that indicate how the structure
finder produced its result.
:param format: The high level structure of the text. Valid values are ndjson,
xml, delimited, and semi_structured_text. By default, the API chooses the
format. In this default scenario, all rows must have the same number of fields
for a delimited format to be detected. If the format is set to delimited
and the delimiter is not set, however, the API tolerates up to 5% of rows
that have a different number of columns than the first row.
:param grok_pattern: If you have set format to semi_structured_text, you can
specify a Grok pattern that is used to extract fields from every message
in the text. The name of the timestamp field in the Grok pattern must match
what is specified in the timestamp_field parameter. If that parameter is
not specified, the name of the timestamp field in the Grok pattern must match
"timestamp". If grok_pattern is not specified, the structure finder creates
a Grok pattern.
:param has_header_row: If you have set format to delimited, you can use this
parameter to indicate whether the column names are in the first row of the
text. If this parameter is not specified, the structure finder guesses based
on the similarity of the first row of the text to other rows.
:param line_merge_size_limit: The maximum number of characters in a message when
lines are merged to form messages while analyzing semi-structured text. If
you have extremely long messages you may need to increase this, but be aware
that this may lead to very long processing times if the way to group lines
into messages is misdetected.
:param lines_to_sample: The number of lines to include in the structural analysis,
starting from the beginning of the text. The minimum is 2; If the value of
this parameter is greater than the number of lines in the text, the analysis
proceeds (as long as there are at least two lines in the text) for all of
the lines.
:param quote: If you have set format to delimited, you can specify the character
used to quote the values in each row if they contain newlines or the delimiter
character. Only a single character is supported. If this parameter is not
specified, the default value is a double quote ("). If your delimited text
format does not use quoting, a workaround is to set this argument to a character
that does not appear anywhere in the sample.
:param should_trim_fields: If you have set format to delimited, you can specify
whether values between delimiters should have whitespace trimmed from them.
If this parameter is not specified and the delimiter is pipe (|), the default
value is true. Otherwise, the default value is false.
:param timeout: Sets the maximum amount of time that the structure analysis make
take. If the analysis is still running when the timeout expires then it will
be aborted.
:param timestamp_field: Optional parameter to specify the timestamp field in
the file
:param timestamp_format: The Java time format of the timestamp field in the text.
"""
if text_files is None:
raise ValueError("Empty value passed for parameter 'text_files'")
__path = "/_text_structure/find_structure"
__query: t.Dict[str, t.Any] = {}
if charset is not None:
__query["charset"] = charset
if column_names is not None:
__query["column_names"] = column_names
if delimiter is not None:
__query["delimiter"] = delimiter
if explain is not None:
__query["explain"] = explain
if format is not None:
__query["format"] = format
if grok_pattern is not None:
__query["grok_pattern"] = grok_pattern
if has_header_row is not None:
__query["has_header_row"] = has_header_row
if line_merge_size_limit is not None:
__query["line_merge_size_limit"] = line_merge_size_limit
if lines_to_sample is not None:
__query["lines_to_sample"] = lines_to_sample
if quote is not None:
__query["quote"] = quote
if should_trim_fields is not None:
__query["should_trim_fields"] = should_trim_fields
if timeout is not None:
__query["timeout"] = timeout
if timestamp_field is not None:
__query["timestamp_field"] = timestamp_field
if timestamp_format is not None:
__query["timestamp_format"] = timestamp_format
__body = text_files
__headers = {
"accept": "application/json",
"content-type": "application/x-ndjson",
}
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
| 55.238994
| 96
| 0.669703
|
import typing as t
from elastic_transport import ObjectApiResponse
from ._base import NamespacedClient
from .utils import _rewrite_parameters
class TextStructureClient(NamespacedClient):
@_rewrite_parameters(
body_name="text_files",
)
def find_structure(
self,
*,
text_files: t.Union[t.List[t.Any], t.Tuple[t.Any, ...]],
charset: t.Optional[str] = None,
column_names: t.Optional[str] = None,
delimiter: t.Optional[str] = None,
explain: t.Optional[bool] = None,
format: t.Optional[str] = None,
grok_pattern: t.Optional[str] = None,
has_header_row: t.Optional[bool] = None,
line_merge_size_limit: t.Optional[int] = None,
lines_to_sample: t.Optional[int] = None,
quote: t.Optional[str] = None,
should_trim_fields: t.Optional[bool] = None,
timeout: t.Optional[t.Union[int, str]] = None,
timestamp_field: t.Optional[str] = None,
timestamp_format: t.Optional[str] = None,
) -> ObjectApiResponse[t.Any]:
if text_files is None:
raise ValueError("Empty value passed for parameter 'text_files'")
__path = "/_text_structure/find_structure"
__query: t.Dict[str, t.Any] = {}
if charset is not None:
__query["charset"] = charset
if column_names is not None:
__query["column_names"] = column_names
if delimiter is not None:
__query["delimiter"] = delimiter
if explain is not None:
__query["explain"] = explain
if format is not None:
__query["format"] = format
if grok_pattern is not None:
__query["grok_pattern"] = grok_pattern
if has_header_row is not None:
__query["has_header_row"] = has_header_row
if line_merge_size_limit is not None:
__query["line_merge_size_limit"] = line_merge_size_limit
if lines_to_sample is not None:
__query["lines_to_sample"] = lines_to_sample
if quote is not None:
__query["quote"] = quote
if should_trim_fields is not None:
__query["should_trim_fields"] = should_trim_fields
if timeout is not None:
__query["timeout"] = timeout
if timestamp_field is not None:
__query["timestamp_field"] = timestamp_field
if timestamp_format is not None:
__query["timestamp_format"] = timestamp_format
__body = text_files
__headers = {
"accept": "application/json",
"content-type": "application/x-ndjson",
}
return self.perform_request(
"POST", __path, params=__query, headers=__headers, body=__body
)
| true
| true
|
7906591fea522eb12da131c6f3bcc4260ebd1c0c
| 989
|
py
|
Python
|
python/example_code/greengrass/snippets/connector_modbus_rtu_usage.py
|
iconara/aws-doc-sdk-examples
|
52706b31b4fce8fb89468e56743edf5369e69628
|
[
"Apache-2.0"
] | 5,166
|
2016-09-02T08:48:38.000Z
|
2022-03-31T19:12:43.000Z
|
python/example_code/greengrass/snippets/connector_modbus_rtu_usage.py
|
iconara/aws-doc-sdk-examples
|
52706b31b4fce8fb89468e56743edf5369e69628
|
[
"Apache-2.0"
] | 1,186
|
2016-09-28T23:05:19.000Z
|
2022-03-31T18:07:47.000Z
|
python/example_code/greengrass/snippets/connector_modbus_rtu_usage.py
|
iconara/aws-doc-sdk-examples
|
52706b31b4fce8fb89468e56743edf5369e69628
|
[
"Apache-2.0"
] | 4,003
|
2016-08-29T19:51:40.000Z
|
2022-03-31T16:40:02.000Z
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""
Purpose
Shows how to implement an AWS Lambda function that publishes messages to an
AWS IoT Greengrass connector.
"""
# snippet-start:[greengrass.python.connector-modbus-rtu-usage.complete]
import json
import greengrasssdk
iot_client = greengrasssdk.client('iot-data')
send_topic = 'modbus/adapter/request'
def create_read_coils_request():
return {
"request": {
"operation": "ReadCoilsRequest",
"device": 1,
"address": 0x01,
"count": 1},
"id": "TestRequest"}
def publish_basic_message():
iot_client.publish(
topic=send_topic, payload=json.dumps(create_read_coils_request()))
publish_basic_message()
# In this example, the required AWS Lambda handler is never called.
def function_handler(event, context):
return
# snippet-end:[greengrass.python.connector-modbus-rtu-usage.complete]
| 24.121951
| 75
| 0.706775
|
import json
import greengrasssdk
iot_client = greengrasssdk.client('iot-data')
send_topic = 'modbus/adapter/request'
def create_read_coils_request():
return {
"request": {
"operation": "ReadCoilsRequest",
"device": 1,
"address": 0x01,
"count": 1},
"id": "TestRequest"}
def publish_basic_message():
iot_client.publish(
topic=send_topic, payload=json.dumps(create_read_coils_request()))
publish_basic_message()
def function_handler(event, context):
return
| true
| true
|
7906593965714f1263e201b03c96797d1677290a
| 1,133
|
py
|
Python
|
tools/convert_json_to_mat.py
|
MichaelGoodale/opensauce-python
|
cafad071fa1ed675b4e7177b37ed41af94b39c5f
|
[
"Apache-2.0"
] | 38
|
2015-02-10T08:35:50.000Z
|
2022-03-15T10:56:40.000Z
|
tools/convert_json_to_mat.py
|
MichaelGoodale/opensauce-python
|
cafad071fa1ed675b4e7177b37ed41af94b39c5f
|
[
"Apache-2.0"
] | 37
|
2015-09-23T00:17:07.000Z
|
2022-02-24T17:52:56.000Z
|
tools/convert_json_to_mat.py
|
CobiELF/opensauce-python
|
03c278ca92b150188821dadfc9702ff9f939aa4e
|
[
"Apache-2.0"
] | 11
|
2018-08-28T06:41:41.000Z
|
2022-01-21T05:07:40.000Z
|
# Script to convert json into proprietary .mat files
# Licensed under Apache v2 (see LICENSE)
import sys
import os
import glob
import json
from scipy.io import savemat
def main(json_dir, out_dir):
""" Script to convert all .json files in json_dir into corresponding .mat
files in out_dir
.mat files have the same basename as the .json files
This script is meant for data files that contain data from
OpenSauce / VoiceSauce variables.
"""
# Find all .json files in json_dir
json_files = glob.glob(os.path.join(json_dir, '*.json'))
# Iterate through each .mat file
for json_file in json_files:
with open(json_file) as f:
json_dict = json.load(f)
# Write json dict to mat
# Check that output directory exists, if not create it
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
fn = os.path.join(out_dir, os.path.splitext(os.path.basename(json_file))[0]) + '.mat'
savemat(fn, json_dict)
print('Wrote data in {} to {}'.format(json_file, fn))
if __name__ == '__main__':
main(sys.argv[1], sys.argv[2])
| 29.815789
| 93
| 0.661077
|
import sys
import os
import glob
import json
from scipy.io import savemat
def main(json_dir, out_dir):
json_files = glob.glob(os.path.join(json_dir, '*.json'))
for json_file in json_files:
with open(json_file) as f:
json_dict = json.load(f)
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
fn = os.path.join(out_dir, os.path.splitext(os.path.basename(json_file))[0]) + '.mat'
savemat(fn, json_dict)
print('Wrote data in {} to {}'.format(json_file, fn))
if __name__ == '__main__':
main(sys.argv[1], sys.argv[2])
| true
| true
|
790659b5400546f61617455b3c2bc812fc214d2f
| 19,791
|
py
|
Python
|
plotting.py
|
HanMeh/dreamerv2
|
ec9904613cb70dded51017f724f05eb688c1bc3d
|
[
"MIT"
] | 1
|
2021-04-13T11:00:11.000Z
|
2021-04-13T11:00:11.000Z
|
plotting.py
|
HanMeh/dreamerv2
|
ec9904613cb70dded51017f724f05eb688c1bc3d
|
[
"MIT"
] | null | null | null |
plotting.py
|
HanMeh/dreamerv2
|
ec9904613cb70dded51017f724f05eb688c1bc3d
|
[
"MIT"
] | null | null | null |
import argparse
import collections
import functools
import itertools
import json
import multiprocessing as mp
import os
import pathlib
import re
import subprocess
import warnings
os.environ['NO_AT_BRIDGE'] = '1' # Hide X org false warning.
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import numpy as np
import pandas as pd
np.set_string_function(lambda x: f'<np.array shape={x.shape} dtype={x.dtype}>')
Run = collections.namedtuple('Run', 'task method seed xs ys')
PALETTES = dict(
discrete=(
'#377eb8', '#4daf4a', '#984ea3', '#e41a1c', '#ff7f00', '#a65628',
'#f781bf', '#888888', '#a6cee3', '#b2df8a', '#cab2d6', '#fb9a99',
),
contrast=(
'#0022ff', '#33aa00', '#ff0011', '#ddaa00', '#cc44dd', '#0088aa',
'#001177', '#117700', '#990022', '#885500', '#553366', '#006666',
),
gradient=(
'#fde725', '#a0da39', '#4ac16d', '#1fa187', '#277f8e', '#365c8d',
'#46327e', '#440154',
),
baselines=(
'#222222', '#666666', '#aaaaaa', '#cccccc',
),
)
LEGEND = dict(
fontsize='medium', numpoints=1, labelspacing=0, columnspacing=1.2,
handlelength=1.5, handletextpad=0.5, ncol=4, loc='lower center')
DEFAULT_BASELINES = [
'd4pg', 'dqn_sticky', 'rainbow_sticky', 'human$', 'impala']
BINS = collections.defaultdict(int)
BINS.update(dmc=1e5, atari=1e6, particle=1e5)
def find_keys(args):
filenames = []
for indir in args.indir:
task = next(indir.iterdir()) # First only.
for method in task.iterdir():
seed = next(indir.iterdir()) # First only.
filenames += list(seed.glob('**/*.jsonl'))
keys = set()
for filename in filenames:
keys |= set(load_jsonl(filename).columns)
print(f'Keys ({len(keys)}):', ', '.join(keys), flush=True)
def load_runs(args):
total, toload = [], []
for indir in args.indir:
filenames = list(indir.glob('**/*.jsonl'))
total += filenames
for filename in filenames:
task, method, seed = filename.relative_to(indir).parts[:-1]
if not any(p.search(task) for p in args.tasks):
continue
if not any(p.search(method) for p in args.methods):
continue
toload.append((filename, indir))
print(f'Loading {len(toload)} of {len(total)} runs...')
jobs = [functools.partial(load_run, f, i, args) for f, i in toload]
# Disable async data loading:
# runs = [j() for j in jobs]
with mp.Pool(10) as pool:
promises = [pool.apply_async(j) for j in jobs]
runs = [p.get() for p in promises]
runs = [r for r in runs if r is not None]
return runs
def load_run(filename, indir, args):
task, method, seed = filename.relative_to(indir).parts[:-1]
prefix = f'indir{args.indir.index(indir)+1}_'
if task == 'atari_jamesbond':
task = 'atari_james_bond'
seed = prefix + seed
if args.prefix:
method = prefix + method
df = load_jsonl(filename)
if df is None:
print('Skipping empty run')
return
try:
df = df[[args.xaxis, args.yaxis]].dropna()
if args.maxval:
df = df.replace([+np.inf], +args.maxval)
df = df.replace([-np.inf], -args.maxval)
df[args.yaxis] = df[args.yaxis].clip(-args.maxval, +args.maxval)
except KeyError:
return
xs = df[args.xaxis].to_numpy()
ys = df[args.yaxis].to_numpy()
bins = BINS[task.split('_')[0]] if args.bins == -1 else args.bins
if bins:
borders = np.arange(0, xs.max() + 1e-8, bins)
xs, ys = bin_scores(xs, ys, borders)
if not len(xs):
print('Skipping empty run', task, method, seed)
return
return Run(task, method, seed, xs, ys)
def load_baselines(patterns, prefix=False):
runs = []
directory = pathlib.Path(__file__).parent / 'scores'
for filename in directory.glob('**/*_baselines.json'):
for task, methods in json.loads(filename.read_text()).items():
for method, score in methods.items():
if prefix:
method = f'baseline_{method}'
if not any(p.search(method) for p in patterns):
continue
runs.append(Run(task, method, None, None, score))
return runs
def stats(runs, baselines):
tasks = sorted(set(r.task for r in runs))
methods = sorted(set(r.method for r in runs))
seeds = sorted(set(r.seed for r in runs))
baseline = sorted(set(r.method for r in baselines))
print('Loaded', len(runs), 'runs.')
print(f'Tasks ({len(tasks)}):', ', '.join(tasks))
print(f'Methods ({len(methods)}):', ', '.join(methods))
print(f'Seeds ({len(seeds)}):', ', '.join(seeds))
print(f'Baselines ({len(baseline)}):', ', '.join(baseline))
def order_methods(runs, baselines, args):
methods = []
for pattern in args.methods:
for method in sorted(set(r.method for r in runs)):
if pattern.search(method):
if method not in methods:
methods.append(method)
if method not in args.colors:
index = len(args.colors) % len(args.palette)
args.colors[method] = args.palette[index]
non_baseline_colors = len(args.colors)
for pattern in args.baselines:
for method in sorted(set(r.method for r in baselines)):
if pattern.search(method):
if method not in methods:
methods.append(method)
if method not in args.colors:
index = len(args.colors) - non_baseline_colors
index = index % len(PALETTES['baselines'])
args.colors[method] = PALETTES['baselines'][index]
return methods
def figure(runs, methods, args):
tasks = sorted(set(r.task for r in runs if r.xs is not None))
rows = int(np.ceil((len(tasks) + len(args.add)) / args.cols))
figsize = args.size[0] * args.cols, args.size[1] * rows
fig, axes = plt.subplots(rows, args.cols, figsize=figsize)
for task, ax in zip(tasks, axes.flatten()):
relevant = [r for r in runs if r.task == task]
plot(task, ax, relevant, methods, args)
for name, ax in zip(args.add, axes.flatten()[len(tasks):]):
ax.set_facecolor((0.9, 0.9, 0.9))
if name == 'median':
plot_combined(
'combined_median', ax, runs, methods, args,
lo='random', hi='human$',
agg=lambda x: np.nanmedian(x, -1))
elif name == 'mean':
plot_combined(
'combined_mean', ax, runs, methods, args,
lo='random', hi='human$',
agg=lambda x: np.nanmean(x, -1))
elif name == 'gamer_median':
plot_combined(
'combined_gamer_median', ax, runs, methods, args,
lo='random', hi='human$',
agg=lambda x: np.nanmedian(x, -1))
elif name == 'gamer_mean':
plot_combined(
'combined_gamer_mean', ax, runs, methods, args,
lo='random', hi='human$',
agg=lambda x: np.nanmean(x, -1))
elif name == 'record_mean':
plot_combined(
'combined_record_mean', ax, runs, methods, args,
lo='random', hi='record',
agg=lambda x: np.nanmean(x, -1))
elif name == 'clipped_record_mean':
plot_combined(
'combined_clipped_record_mean', ax, runs, methods, args,
lo='random', hi='record', clip=True,
agg=lambda x: np.nanmean(x, -1))
elif name == 'num_seeds':
plot_combined(
'combined_num_seeds', ax, runs, methods, args,
agg=lambda x: np.isfinite(x).sum(-1))
elif name == 'human_above':
plot_combined(
'combined_above_human$', ax, runs, methods, args,
agg=lambda y: (y >= 1.0).astype(float).sum(-1))
elif name == 'human_below':
plot_combined(
'combined_below_human$', ax, runs, methods, args,
agg=lambda y: (y <= 1.0).astype(float).sum(-1))
else:
raise NotImplementedError(name)
if args.xlim:
for ax in axes[:-1].flatten():
ax.xaxis.get_offset_text().set_visible(False)
if args.xlabel:
for ax in axes[-1]:
ax.set_xlabel(args.xlabel)
if args.ylabel:
for ax in axes[:, 0]:
ax.set_ylabel(args.ylabel)
for ax in axes.flatten()[len(tasks) + len(args.add):]:
ax.axis('off')
legend(fig, args.labels, **LEGEND)
return fig
def plot(task, ax, runs, methods, args):
assert runs
try:
title = task.split('_', 1)[1].replace('_', ' ').title()
except IndexError:
title = task.title()
ax.set_title(title)
xlim = [+np.inf, -np.inf]
for index, method in enumerate(methods):
relevant = [r for r in runs if r.method == method]
if not relevant:
continue
if any(r.xs is None for r in relevant):
baseline(index, method, ax, relevant, args)
else:
if args.aggregate == 'none':
xs, ys = curve_lines(index, task, method, ax, relevant, args)
else:
xs, ys = curve_area(index, task, method, ax, relevant, args)
if len(xs) == len(ys) == 0:
print(f'Skipping empty: {task} {method}')
continue
xlim = [min(xlim[0], xs.min()), max(xlim[1], xs.max())]
ax.ticklabel_format(axis='x', style='sci', scilimits=(0, 0))
steps = [1, 2, 2.5, 5, 10]
ax.xaxis.set_major_locator(ticker.MaxNLocator(args.xticks, steps=steps))
ax.yaxis.set_major_locator(ticker.MaxNLocator(args.yticks, steps=steps))
if np.isfinite(xlim).all():
ax.set_xlim(args.xlim or xlim)
if args.xlim:
ticks = sorted({*ax.get_xticks(), *args.xlim})
ticks = [x for x in ticks if args.xlim[0] <= x <= args.xlim[1]]
ax.set_xticks(ticks)
if args.ylim:
ax.set_ylim(args.ylim)
if args.ylimticks:
ticks = sorted({*ax.get_yticks(), *args.ylim})
ticks = [x for x in ticks if args.ylim[0] <= x <= args.ylim[1]]
ax.set_yticks(ticks)
def plot_combined(
name, ax, runs, methods, args, agg, lo=None, hi=None, clip=False):
tasks = sorted(set(run.task for run in runs if run.xs is not None))
seeds = list(set(run.seed for run in runs))
runs = [r for r in runs if r.task in tasks] # Discard unused baselines.
# Bin all runs onto the same X steps.
borders = sorted(
[r.xs for r in runs if r.xs is not None],
key=lambda x: np.nanmax(x))[-1]
for index, run in enumerate(runs):
if run.xs is None:
continue
xs, ys = bin_scores(run.xs, run.ys, borders)
runs[index] = run._replace(xs=xs, ys=ys)
# Per-task normalization by low and high baseline.
if lo or hi:
mins = collections.defaultdict(list)
maxs = collections.defaultdict(list)
[mins[r.task].append(r.ys) for r in load_baselines([re.compile(lo)])]
[maxs[r.task].append(r.ys) for r in load_baselines([re.compile(hi)])]
mins = {task: min(ys) for task, ys in mins.items() if task in tasks}
maxs = {task: max(ys) for task, ys in maxs.items() if task in tasks}
missing_baselines = []
for task in tasks:
if task not in mins or task not in maxs:
missing_baselines.append(task)
if set(missing_baselines) == set(tasks):
print(f'No baselines found to normalize any tasks in {name} plot.')
else:
for task in missing_baselines:
print(f'No baselines found to normalize {task} in {name} plot.')
for index, run in enumerate(runs):
if run.task not in mins or run.task not in maxs:
continue
ys = (run.ys - mins[run.task]) / (maxs[run.task] - mins[run.task])
if clip:
ys = np.minimum(ys, 1.0)
runs[index] = run._replace(ys=ys)
# Aggregate across tasks but not methods or seeds.
combined = []
for method, seed in itertools.product(methods, seeds):
relevant = [r for r in runs if r.method == method and r.seed == seed]
if not relevant:
continue
if relevant[0].xs is None:
xs, ys = None, np.array([r.ys for r in relevant])
else:
xs, ys = stack_scores(*zip(*[(r.xs, r.ys) for r in relevant]))
with warnings.catch_warnings(): # Ignore empty slice warnings.
warnings.simplefilter('ignore', category=RuntimeWarning)
combined.append(Run('combined', method, seed, xs, agg(ys)))
plot(name, ax, combined, methods, args)
def curve_lines(index, task, method, ax, runs, args):
zorder = 10000 - 10 * index - 1
for run in runs:
color = args.colors[method]
ax.plot(run.xs, run.ys, label=method, color=color, zorder=zorder)
return runs[0].xs, runs[0].ys
def curve_area(index, task, method, ax, runs, args):
xs, ys = stack_scores(*zip(*[(r.xs, r.ys) for r in runs]))
with warnings.catch_warnings(): # NaN buckets remain NaN.
warnings.simplefilter('ignore', category=RuntimeWarning)
if args.aggregate == 'std1':
mean, std = np.nanmean(ys, -1), np.nanstd(ys, -1)
lo, mi, hi = mean - std, mean, mean + std
elif args.aggregate == 'per0':
lo, mi, hi = [np.nanpercentile(ys, k, -1) for k in (0, 50, 100)]
elif args.aggregate == 'per5':
lo, mi, hi = [np.nanpercentile(ys, k, -1) for k in (5, 50, 95)]
elif args.aggregate == 'per25':
lo, mi, hi = [np.nanpercentile(ys, k, -1) for k in (25, 50, 75)]
else:
raise NotImplementedError(args.aggregate)
color = args.colors[method]
kw = dict(color=color, zorder=1000 - 10 * index, alpha=0.1, linewidths=0)
ax.fill_between(xs, lo, hi, **kw)
ax.plot(xs, mi, label=method, color=color, zorder=10000 - 10 * index - 1)
return xs, mi
def baseline(index, method, ax, runs, args):
assert all(run.xs is None for run in runs)
ys = np.array([run.ys for run in runs])
mean, std = ys.mean(), ys.std()
color = args.colors[method]
kw = dict(color=color, zorder=500 - 20 * index - 1, alpha=0.1, linewidths=0)
ax.fill_between([-np.inf, np.inf], [mean - std] * 2, [mean + std] * 2, **kw)
kw = dict(ls='--', color=color, zorder=5000 - 10 * index - 1)
ax.axhline(mean, label=method, **kw)
def legend(fig, mapping=None, **kwargs):
entries = {}
for ax in fig.axes:
for handle, label in zip(*ax.get_legend_handles_labels()):
if mapping and label in mapping:
label = mapping[label]
entries[label] = handle
leg = fig.legend(entries.values(), entries.keys(), **kwargs)
leg.get_frame().set_edgecolor('white')
extent = leg.get_window_extent(fig.canvas.get_renderer())
extent = extent.transformed(fig.transFigure.inverted())
yloc, xloc = kwargs['loc'].split()
y0 = dict(lower=extent.y1, center=0, upper=0)[yloc]
y1 = dict(lower=1, center=1, upper=extent.y0)[yloc]
x0 = dict(left=extent.x1, center=0, right=0)[xloc]
x1 = dict(left=1, center=1, right=extent.x0)[xloc]
fig.tight_layout(rect=[x0, y0, x1, y1], h_pad=0.5, w_pad=0.5)
def save(fig, args):
args.outdir.mkdir(parents=True, exist_ok=True)
filename = args.outdir / 'curves.png'
fig.savefig(filename, dpi=args.dpi)
print('Saved to', filename)
filename = args.outdir / 'curves.pdf'
fig.savefig(filename)
try:
subprocess.call(['pdfcrop', str(filename), str(filename)])
except FileNotFoundError:
print('Install texlive-extra-utils to crop PDF outputs.')
def bin_scores(xs, ys, borders, reducer=np.nanmean):
order = np.argsort(xs)
xs, ys = xs[order], ys[order]
binned = []
with warnings.catch_warnings(): # Empty buckets become NaN.
warnings.simplefilter('ignore', category=RuntimeWarning)
for start, stop in zip(borders[:-1], borders[1:]):
left = (xs <= start).sum()
right = (xs <= stop).sum()
binned.append(reducer(ys[left:right]))
return borders[1:], np.array(binned)
def stack_scores(multiple_xs, multiple_ys):
longest_xs = sorted(multiple_xs, key=lambda x: len(x))[-1]
multiple_padded_ys = []
for xs, ys in zip(multiple_xs, multiple_ys):
assert (longest_xs[:len(xs)] == xs).all(), (list(xs), list(longest_xs))
padding = [np.inf] * (len(longest_xs) - len(xs))
padded_ys = np.concatenate([ys, padding])
multiple_padded_ys.append(padded_ys)
stacked_ys = np.stack(multiple_padded_ys, -1)
return longest_xs, stacked_ys
def load_jsonl(filename):
try:
with filename.open() as f:
lines = list(f.readlines())
records = []
for index, line in enumerate(lines):
try:
records.append(json.loads(line))
except Exception:
if index == len(lines) - 1:
continue # Silently skip last line if it is incomplete.
raise ValueError(
f'Skipping invalid JSON line ({index+1}/{len(lines)+1}) in'
f'{filename}: {line}')
return pd.DataFrame(records)
except ValueError as e:
print('Invalid', filename, e)
return None
def save_runs(runs, filename):
filename.parent.mkdir(parents=True, exist_ok=True)
records = []
for run in runs:
if run.xs is None:
continue
records.append(dict(
task=run.task, method=run.method, seed=run.seed,
xs=run.xs.tolist(), ys=run.ys.tolist()))
runs = json.dumps(records)
filename.write_text(runs)
print('Saved', filename)
def main(args):
find_keys(args)
runs = load_runs(args)
save_runs(runs, args.outdir / 'runs.jsonl')
baselines = load_baselines(args.baselines, args.prefix)
stats(runs, baselines)
methods = order_methods(runs, baselines, args)
if not runs:
print('Noting to plot.')
return
print('Plotting...')
fig = figure(runs + baselines, methods, args)
save(fig, args)
def parse_args():
boolean = lambda x: bool(['False', 'True'].index(x))
parser = argparse.ArgumentParser()
parser.add_argument('--indir', nargs='+', type=pathlib.Path, required=True)
parser.add_argument('--outdir', type=pathlib.Path, required=True)
parser.add_argument('--subdir', type=boolean, default=True)
parser.add_argument('--xaxis', type=str, required=True)
parser.add_argument('--yaxis', type=str, required=True)
parser.add_argument('--tasks', nargs='+', default=[r'.*'])
parser.add_argument('--methods', nargs='+', default=[r'.*'])
parser.add_argument('--baselines', nargs='+', default=DEFAULT_BASELINES)
parser.add_argument('--prefix', type=boolean, default=False)
parser.add_argument('--bins', type=float, default=-1)
parser.add_argument('--aggregate', type=str, default='std1')
parser.add_argument('--size', nargs=2, type=float, default=[2.5, 2.3])
parser.add_argument('--dpi', type=int, default=80)
parser.add_argument('--cols', type=int, default=6)
parser.add_argument('--xlim', nargs=2, type=float, default=None)
parser.add_argument('--ylim', nargs=2, type=float, default=None)
parser.add_argument('--ylimticks', type=boolean, default=True)
parser.add_argument('--xlabel', type=str, default=None)
parser.add_argument('--ylabel', type=str, default=None)
parser.add_argument('--xticks', type=int, default=6)
parser.add_argument('--yticks', type=int, default=5)
parser.add_argument('--labels', nargs='+', default=None)
parser.add_argument('--palette', nargs='+', default=['contrast'])
parser.add_argument('--colors', nargs='+', default={})
parser.add_argument('--maxval', type=float, default=0)
parser.add_argument('--add', nargs='+', type=str, default=[
'gamer_median', 'gamer_mean', 'record_mean',
'clipped_record_mean', 'num_seeds'])
args = parser.parse_args()
if args.subdir:
args.outdir /= args.indir[0].stem
args.indir = [d.expanduser() for d in args.indir]
args.outdir = args.outdir.expanduser()
if args.labels:
assert len(args.labels) % 2 == 0
args.labels = {k: v for k, v in zip(args.labels[:-1], args.labels[1:])}
if args.colors:
assert len(args.colors) % 2 == 0
args.colors = {k: v for k, v in zip(args.colors[:-1], args.colors[1:])}
args.tasks = [re.compile(p) for p in args.tasks]
args.methods = [re.compile(p) for p in args.methods]
args.baselines = [re.compile(p) for p in args.baselines]
if 'return' not in args.yaxis:
args.baselines = []
if args.prefix is None:
args.prefix = len(args.indir) > 1
if len(args.palette) == 1 and args.palette[0] in PALETTES:
args.palette = 10 * PALETTES[args.palette[0]]
if len(args.add) == 1 and args.add[0] == 'none':
args.add = []
return args
if __name__ == '__main__':
main(parse_args())
| 36.582255
| 79
| 0.637411
|
import argparse
import collections
import functools
import itertools
import json
import multiprocessing as mp
import os
import pathlib
import re
import subprocess
import warnings
os.environ['NO_AT_BRIDGE'] = '1'
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import numpy as np
import pandas as pd
np.set_string_function(lambda x: f'<np.array shape={x.shape} dtype={x.dtype}>')
Run = collections.namedtuple('Run', 'task method seed xs ys')
PALETTES = dict(
discrete=(
'#377eb8', '#4daf4a', '#984ea3', '#e41a1c', '#ff7f00', '#a65628',
'#f781bf', '#888888', '#a6cee3', '#b2df8a', '#cab2d6', '#fb9a99',
),
contrast=(
'#0022ff', '#33aa00', '#ff0011', '#ddaa00', '#cc44dd', '#0088aa',
'#001177', '#117700', '#990022', '#885500', '#553366', '#006666',
),
gradient=(
'#fde725', '#a0da39', '#4ac16d', '#1fa187', '#277f8e', '#365c8d',
'#46327e', '#440154',
),
baselines=(
'#222222', '#666666', '#aaaaaa', '#cccccc',
),
)
LEGEND = dict(
fontsize='medium', numpoints=1, labelspacing=0, columnspacing=1.2,
handlelength=1.5, handletextpad=0.5, ncol=4, loc='lower center')
DEFAULT_BASELINES = [
'd4pg', 'dqn_sticky', 'rainbow_sticky', 'human$', 'impala']
BINS = collections.defaultdict(int)
BINS.update(dmc=1e5, atari=1e6, particle=1e5)
def find_keys(args):
filenames = []
for indir in args.indir:
task = next(indir.iterdir())
for method in task.iterdir():
seed = next(indir.iterdir())
filenames += list(seed.glob('**/*.jsonl'))
keys = set()
for filename in filenames:
keys |= set(load_jsonl(filename).columns)
print(f'Keys ({len(keys)}):', ', '.join(keys), flush=True)
def load_runs(args):
total, toload = [], []
for indir in args.indir:
filenames = list(indir.glob('**/*.jsonl'))
total += filenames
for filename in filenames:
task, method, seed = filename.relative_to(indir).parts[:-1]
if not any(p.search(task) for p in args.tasks):
continue
if not any(p.search(method) for p in args.methods):
continue
toload.append((filename, indir))
print(f'Loading {len(toload)} of {len(total)} runs...')
jobs = [functools.partial(load_run, f, i, args) for f, i in toload]
with mp.Pool(10) as pool:
promises = [pool.apply_async(j) for j in jobs]
runs = [p.get() for p in promises]
runs = [r for r in runs if r is not None]
return runs
def load_run(filename, indir, args):
task, method, seed = filename.relative_to(indir).parts[:-1]
prefix = f'indir{args.indir.index(indir)+1}_'
if task == 'atari_jamesbond':
task = 'atari_james_bond'
seed = prefix + seed
if args.prefix:
method = prefix + method
df = load_jsonl(filename)
if df is None:
print('Skipping empty run')
return
try:
df = df[[args.xaxis, args.yaxis]].dropna()
if args.maxval:
df = df.replace([+np.inf], +args.maxval)
df = df.replace([-np.inf], -args.maxval)
df[args.yaxis] = df[args.yaxis].clip(-args.maxval, +args.maxval)
except KeyError:
return
xs = df[args.xaxis].to_numpy()
ys = df[args.yaxis].to_numpy()
bins = BINS[task.split('_')[0]] if args.bins == -1 else args.bins
if bins:
borders = np.arange(0, xs.max() + 1e-8, bins)
xs, ys = bin_scores(xs, ys, borders)
if not len(xs):
print('Skipping empty run', task, method, seed)
return
return Run(task, method, seed, xs, ys)
def load_baselines(patterns, prefix=False):
runs = []
directory = pathlib.Path(__file__).parent / 'scores'
for filename in directory.glob('**/*_baselines.json'):
for task, methods in json.loads(filename.read_text()).items():
for method, score in methods.items():
if prefix:
method = f'baseline_{method}'
if not any(p.search(method) for p in patterns):
continue
runs.append(Run(task, method, None, None, score))
return runs
def stats(runs, baselines):
tasks = sorted(set(r.task for r in runs))
methods = sorted(set(r.method for r in runs))
seeds = sorted(set(r.seed for r in runs))
baseline = sorted(set(r.method for r in baselines))
print('Loaded', len(runs), 'runs.')
print(f'Tasks ({len(tasks)}):', ', '.join(tasks))
print(f'Methods ({len(methods)}):', ', '.join(methods))
print(f'Seeds ({len(seeds)}):', ', '.join(seeds))
print(f'Baselines ({len(baseline)}):', ', '.join(baseline))
def order_methods(runs, baselines, args):
methods = []
for pattern in args.methods:
for method in sorted(set(r.method for r in runs)):
if pattern.search(method):
if method not in methods:
methods.append(method)
if method not in args.colors:
index = len(args.colors) % len(args.palette)
args.colors[method] = args.palette[index]
non_baseline_colors = len(args.colors)
for pattern in args.baselines:
for method in sorted(set(r.method for r in baselines)):
if pattern.search(method):
if method not in methods:
methods.append(method)
if method not in args.colors:
index = len(args.colors) - non_baseline_colors
index = index % len(PALETTES['baselines'])
args.colors[method] = PALETTES['baselines'][index]
return methods
def figure(runs, methods, args):
tasks = sorted(set(r.task for r in runs if r.xs is not None))
rows = int(np.ceil((len(tasks) + len(args.add)) / args.cols))
figsize = args.size[0] * args.cols, args.size[1] * rows
fig, axes = plt.subplots(rows, args.cols, figsize=figsize)
for task, ax in zip(tasks, axes.flatten()):
relevant = [r for r in runs if r.task == task]
plot(task, ax, relevant, methods, args)
for name, ax in zip(args.add, axes.flatten()[len(tasks):]):
ax.set_facecolor((0.9, 0.9, 0.9))
if name == 'median':
plot_combined(
'combined_median', ax, runs, methods, args,
lo='random', hi='human$',
agg=lambda x: np.nanmedian(x, -1))
elif name == 'mean':
plot_combined(
'combined_mean', ax, runs, methods, args,
lo='random', hi='human$',
agg=lambda x: np.nanmean(x, -1))
elif name == 'gamer_median':
plot_combined(
'combined_gamer_median', ax, runs, methods, args,
lo='random', hi='human$',
agg=lambda x: np.nanmedian(x, -1))
elif name == 'gamer_mean':
plot_combined(
'combined_gamer_mean', ax, runs, methods, args,
lo='random', hi='human$',
agg=lambda x: np.nanmean(x, -1))
elif name == 'record_mean':
plot_combined(
'combined_record_mean', ax, runs, methods, args,
lo='random', hi='record',
agg=lambda x: np.nanmean(x, -1))
elif name == 'clipped_record_mean':
plot_combined(
'combined_clipped_record_mean', ax, runs, methods, args,
lo='random', hi='record', clip=True,
agg=lambda x: np.nanmean(x, -1))
elif name == 'num_seeds':
plot_combined(
'combined_num_seeds', ax, runs, methods, args,
agg=lambda x: np.isfinite(x).sum(-1))
elif name == 'human_above':
plot_combined(
'combined_above_human$', ax, runs, methods, args,
agg=lambda y: (y >= 1.0).astype(float).sum(-1))
elif name == 'human_below':
plot_combined(
'combined_below_human$', ax, runs, methods, args,
agg=lambda y: (y <= 1.0).astype(float).sum(-1))
else:
raise NotImplementedError(name)
if args.xlim:
for ax in axes[:-1].flatten():
ax.xaxis.get_offset_text().set_visible(False)
if args.xlabel:
for ax in axes[-1]:
ax.set_xlabel(args.xlabel)
if args.ylabel:
for ax in axes[:, 0]:
ax.set_ylabel(args.ylabel)
for ax in axes.flatten()[len(tasks) + len(args.add):]:
ax.axis('off')
legend(fig, args.labels, **LEGEND)
return fig
def plot(task, ax, runs, methods, args):
assert runs
try:
title = task.split('_', 1)[1].replace('_', ' ').title()
except IndexError:
title = task.title()
ax.set_title(title)
xlim = [+np.inf, -np.inf]
for index, method in enumerate(methods):
relevant = [r for r in runs if r.method == method]
if not relevant:
continue
if any(r.xs is None for r in relevant):
baseline(index, method, ax, relevant, args)
else:
if args.aggregate == 'none':
xs, ys = curve_lines(index, task, method, ax, relevant, args)
else:
xs, ys = curve_area(index, task, method, ax, relevant, args)
if len(xs) == len(ys) == 0:
print(f'Skipping empty: {task} {method}')
continue
xlim = [min(xlim[0], xs.min()), max(xlim[1], xs.max())]
ax.ticklabel_format(axis='x', style='sci', scilimits=(0, 0))
steps = [1, 2, 2.5, 5, 10]
ax.xaxis.set_major_locator(ticker.MaxNLocator(args.xticks, steps=steps))
ax.yaxis.set_major_locator(ticker.MaxNLocator(args.yticks, steps=steps))
if np.isfinite(xlim).all():
ax.set_xlim(args.xlim or xlim)
if args.xlim:
ticks = sorted({*ax.get_xticks(), *args.xlim})
ticks = [x for x in ticks if args.xlim[0] <= x <= args.xlim[1]]
ax.set_xticks(ticks)
if args.ylim:
ax.set_ylim(args.ylim)
if args.ylimticks:
ticks = sorted({*ax.get_yticks(), *args.ylim})
ticks = [x for x in ticks if args.ylim[0] <= x <= args.ylim[1]]
ax.set_yticks(ticks)
def plot_combined(
name, ax, runs, methods, args, agg, lo=None, hi=None, clip=False):
tasks = sorted(set(run.task for run in runs if run.xs is not None))
seeds = list(set(run.seed for run in runs))
runs = [r for r in runs if r.task in tasks]
borders = sorted(
[r.xs for r in runs if r.xs is not None],
key=lambda x: np.nanmax(x))[-1]
for index, run in enumerate(runs):
if run.xs is None:
continue
xs, ys = bin_scores(run.xs, run.ys, borders)
runs[index] = run._replace(xs=xs, ys=ys)
if lo or hi:
mins = collections.defaultdict(list)
maxs = collections.defaultdict(list)
[mins[r.task].append(r.ys) for r in load_baselines([re.compile(lo)])]
[maxs[r.task].append(r.ys) for r in load_baselines([re.compile(hi)])]
mins = {task: min(ys) for task, ys in mins.items() if task in tasks}
maxs = {task: max(ys) for task, ys in maxs.items() if task in tasks}
missing_baselines = []
for task in tasks:
if task not in mins or task not in maxs:
missing_baselines.append(task)
if set(missing_baselines) == set(tasks):
print(f'No baselines found to normalize any tasks in {name} plot.')
else:
for task in missing_baselines:
print(f'No baselines found to normalize {task} in {name} plot.')
for index, run in enumerate(runs):
if run.task not in mins or run.task not in maxs:
continue
ys = (run.ys - mins[run.task]) / (maxs[run.task] - mins[run.task])
if clip:
ys = np.minimum(ys, 1.0)
runs[index] = run._replace(ys=ys)
combined = []
for method, seed in itertools.product(methods, seeds):
relevant = [r for r in runs if r.method == method and r.seed == seed]
if not relevant:
continue
if relevant[0].xs is None:
xs, ys = None, np.array([r.ys for r in relevant])
else:
xs, ys = stack_scores(*zip(*[(r.xs, r.ys) for r in relevant]))
with warnings.catch_warnings():
warnings.simplefilter('ignore', category=RuntimeWarning)
combined.append(Run('combined', method, seed, xs, agg(ys)))
plot(name, ax, combined, methods, args)
def curve_lines(index, task, method, ax, runs, args):
zorder = 10000 - 10 * index - 1
for run in runs:
color = args.colors[method]
ax.plot(run.xs, run.ys, label=method, color=color, zorder=zorder)
return runs[0].xs, runs[0].ys
def curve_area(index, task, method, ax, runs, args):
xs, ys = stack_scores(*zip(*[(r.xs, r.ys) for r in runs]))
with warnings.catch_warnings():
warnings.simplefilter('ignore', category=RuntimeWarning)
if args.aggregate == 'std1':
mean, std = np.nanmean(ys, -1), np.nanstd(ys, -1)
lo, mi, hi = mean - std, mean, mean + std
elif args.aggregate == 'per0':
lo, mi, hi = [np.nanpercentile(ys, k, -1) for k in (0, 50, 100)]
elif args.aggregate == 'per5':
lo, mi, hi = [np.nanpercentile(ys, k, -1) for k in (5, 50, 95)]
elif args.aggregate == 'per25':
lo, mi, hi = [np.nanpercentile(ys, k, -1) for k in (25, 50, 75)]
else:
raise NotImplementedError(args.aggregate)
color = args.colors[method]
kw = dict(color=color, zorder=1000 - 10 * index, alpha=0.1, linewidths=0)
ax.fill_between(xs, lo, hi, **kw)
ax.plot(xs, mi, label=method, color=color, zorder=10000 - 10 * index - 1)
return xs, mi
def baseline(index, method, ax, runs, args):
assert all(run.xs is None for run in runs)
ys = np.array([run.ys for run in runs])
mean, std = ys.mean(), ys.std()
color = args.colors[method]
kw = dict(color=color, zorder=500 - 20 * index - 1, alpha=0.1, linewidths=0)
ax.fill_between([-np.inf, np.inf], [mean - std] * 2, [mean + std] * 2, **kw)
kw = dict(ls='--', color=color, zorder=5000 - 10 * index - 1)
ax.axhline(mean, label=method, **kw)
def legend(fig, mapping=None, **kwargs):
entries = {}
for ax in fig.axes:
for handle, label in zip(*ax.get_legend_handles_labels()):
if mapping and label in mapping:
label = mapping[label]
entries[label] = handle
leg = fig.legend(entries.values(), entries.keys(), **kwargs)
leg.get_frame().set_edgecolor('white')
extent = leg.get_window_extent(fig.canvas.get_renderer())
extent = extent.transformed(fig.transFigure.inverted())
yloc, xloc = kwargs['loc'].split()
y0 = dict(lower=extent.y1, center=0, upper=0)[yloc]
y1 = dict(lower=1, center=1, upper=extent.y0)[yloc]
x0 = dict(left=extent.x1, center=0, right=0)[xloc]
x1 = dict(left=1, center=1, right=extent.x0)[xloc]
fig.tight_layout(rect=[x0, y0, x1, y1], h_pad=0.5, w_pad=0.5)
def save(fig, args):
args.outdir.mkdir(parents=True, exist_ok=True)
filename = args.outdir / 'curves.png'
fig.savefig(filename, dpi=args.dpi)
print('Saved to', filename)
filename = args.outdir / 'curves.pdf'
fig.savefig(filename)
try:
subprocess.call(['pdfcrop', str(filename), str(filename)])
except FileNotFoundError:
print('Install texlive-extra-utils to crop PDF outputs.')
def bin_scores(xs, ys, borders, reducer=np.nanmean):
order = np.argsort(xs)
xs, ys = xs[order], ys[order]
binned = []
with warnings.catch_warnings():
warnings.simplefilter('ignore', category=RuntimeWarning)
for start, stop in zip(borders[:-1], borders[1:]):
left = (xs <= start).sum()
right = (xs <= stop).sum()
binned.append(reducer(ys[left:right]))
return borders[1:], np.array(binned)
def stack_scores(multiple_xs, multiple_ys):
longest_xs = sorted(multiple_xs, key=lambda x: len(x))[-1]
multiple_padded_ys = []
for xs, ys in zip(multiple_xs, multiple_ys):
assert (longest_xs[:len(xs)] == xs).all(), (list(xs), list(longest_xs))
padding = [np.inf] * (len(longest_xs) - len(xs))
padded_ys = np.concatenate([ys, padding])
multiple_padded_ys.append(padded_ys)
stacked_ys = np.stack(multiple_padded_ys, -1)
return longest_xs, stacked_ys
def load_jsonl(filename):
try:
with filename.open() as f:
lines = list(f.readlines())
records = []
for index, line in enumerate(lines):
try:
records.append(json.loads(line))
except Exception:
if index == len(lines) - 1:
continue
raise ValueError(
f'Skipping invalid JSON line ({index+1}/{len(lines)+1}) in'
f'{filename}: {line}')
return pd.DataFrame(records)
except ValueError as e:
print('Invalid', filename, e)
return None
def save_runs(runs, filename):
filename.parent.mkdir(parents=True, exist_ok=True)
records = []
for run in runs:
if run.xs is None:
continue
records.append(dict(
task=run.task, method=run.method, seed=run.seed,
xs=run.xs.tolist(), ys=run.ys.tolist()))
runs = json.dumps(records)
filename.write_text(runs)
print('Saved', filename)
def main(args):
find_keys(args)
runs = load_runs(args)
save_runs(runs, args.outdir / 'runs.jsonl')
baselines = load_baselines(args.baselines, args.prefix)
stats(runs, baselines)
methods = order_methods(runs, baselines, args)
if not runs:
print('Noting to plot.')
return
print('Plotting...')
fig = figure(runs + baselines, methods, args)
save(fig, args)
def parse_args():
boolean = lambda x: bool(['False', 'True'].index(x))
parser = argparse.ArgumentParser()
parser.add_argument('--indir', nargs='+', type=pathlib.Path, required=True)
parser.add_argument('--outdir', type=pathlib.Path, required=True)
parser.add_argument('--subdir', type=boolean, default=True)
parser.add_argument('--xaxis', type=str, required=True)
parser.add_argument('--yaxis', type=str, required=True)
parser.add_argument('--tasks', nargs='+', default=[r'.*'])
parser.add_argument('--methods', nargs='+', default=[r'.*'])
parser.add_argument('--baselines', nargs='+', default=DEFAULT_BASELINES)
parser.add_argument('--prefix', type=boolean, default=False)
parser.add_argument('--bins', type=float, default=-1)
parser.add_argument('--aggregate', type=str, default='std1')
parser.add_argument('--size', nargs=2, type=float, default=[2.5, 2.3])
parser.add_argument('--dpi', type=int, default=80)
parser.add_argument('--cols', type=int, default=6)
parser.add_argument('--xlim', nargs=2, type=float, default=None)
parser.add_argument('--ylim', nargs=2, type=float, default=None)
parser.add_argument('--ylimticks', type=boolean, default=True)
parser.add_argument('--xlabel', type=str, default=None)
parser.add_argument('--ylabel', type=str, default=None)
parser.add_argument('--xticks', type=int, default=6)
parser.add_argument('--yticks', type=int, default=5)
parser.add_argument('--labels', nargs='+', default=None)
parser.add_argument('--palette', nargs='+', default=['contrast'])
parser.add_argument('--colors', nargs='+', default={})
parser.add_argument('--maxval', type=float, default=0)
parser.add_argument('--add', nargs='+', type=str, default=[
'gamer_median', 'gamer_mean', 'record_mean',
'clipped_record_mean', 'num_seeds'])
args = parser.parse_args()
if args.subdir:
args.outdir /= args.indir[0].stem
args.indir = [d.expanduser() for d in args.indir]
args.outdir = args.outdir.expanduser()
if args.labels:
assert len(args.labels) % 2 == 0
args.labels = {k: v for k, v in zip(args.labels[:-1], args.labels[1:])}
if args.colors:
assert len(args.colors) % 2 == 0
args.colors = {k: v for k, v in zip(args.colors[:-1], args.colors[1:])}
args.tasks = [re.compile(p) for p in args.tasks]
args.methods = [re.compile(p) for p in args.methods]
args.baselines = [re.compile(p) for p in args.baselines]
if 'return' not in args.yaxis:
args.baselines = []
if args.prefix is None:
args.prefix = len(args.indir) > 1
if len(args.palette) == 1 and args.palette[0] in PALETTES:
args.palette = 10 * PALETTES[args.palette[0]]
if len(args.add) == 1 and args.add[0] == 'none':
args.add = []
return args
if __name__ == '__main__':
main(parse_args())
| true
| true
|
790659ba52b92a5a6517cff35b202a27924f483b
| 56,165
|
py
|
Python
|
flopy/mbase.py
|
andrewcalderwood/flopy
|
0432ce96a0a5eec4d20adb4d384505632a2db3dc
|
[
"CC0-1.0",
"BSD-3-Clause"
] | 351
|
2015-01-03T15:18:48.000Z
|
2022-03-31T09:46:43.000Z
|
flopy/mbase.py
|
andrewcalderwood/flopy
|
0432ce96a0a5eec4d20adb4d384505632a2db3dc
|
[
"CC0-1.0",
"BSD-3-Clause"
] | 1,256
|
2015-01-15T21:10:42.000Z
|
2022-03-31T22:43:06.000Z
|
flopy/mbase.py
|
andrewcalderwood/flopy
|
0432ce96a0a5eec4d20adb4d384505632a2db3dc
|
[
"CC0-1.0",
"BSD-3-Clause"
] | 553
|
2015-01-31T22:46:48.000Z
|
2022-03-31T17:43:35.000Z
|
"""
mbase module
This module contains the base model class from which
all of the other models inherit from.
"""
import abc
import os
import shutil
import threading
import warnings
import queue as Queue
from datetime import datetime
from shutil import which
from subprocess import Popen, PIPE, STDOUT
import copy
import numpy as np
from flopy import utils, discretization
from .version import __version__
from .discretization.grid import Grid
## Global variables
# Multiplier for individual array elements in integer and real arrays read by
# MODFLOW's U2DREL, U1DREL and U2DINT.
iconst = 1
# Printout flag. If >= 0 then array values read are printed in listing file.
iprn = -1
# external exceptions for users
class PackageLoadException(Exception):
"""
FloPy package load exception.
"""
def __init__(self, error, location=""):
"""Initialize exception."""
self.message = error
super().__init__(f"{error} ({location})")
class FileDataEntry:
def __init__(self, fname, unit, binflag=False, output=False, package=None):
self.fname = fname
self.unit = unit
self.binflag = binflag
self.output = output
self.package = package
class FileData:
def __init__(self):
self.file_data = []
return
def add_file(self, fname, unit, binflag=False, output=False, package=None):
ipop = []
for idx, file_data in enumerate(self.file_data):
if file_data.fname == fname or file_data.unit == unit:
ipop.append(idx)
self.file_data.append(
FileDataEntry(
fname, unit, binflag=binflag, output=output, package=package
)
)
return
class ModelInterface:
def __init__(self):
self._mg_resync = True
self._modelgrid = None
def update_modelgrid(self):
if self._modelgrid is not None:
self._modelgrid = Grid(
proj4=self._modelgrid.proj4,
xoff=self._modelgrid.xoffset,
yoff=self._modelgrid.yoffset,
angrot=self._modelgrid.angrot,
)
self._mg_resync = True
@property
@abc.abstractmethod
def modelgrid(self):
raise NotImplementedError(
"must define modelgrid in child class to use this base class"
)
@property
@abc.abstractmethod
def packagelist(self):
raise NotImplementedError(
"must define packagelist in child class to use this base class"
)
@property
@abc.abstractmethod
def namefile(self):
raise NotImplementedError(
"must define namefile in child class to use this base class"
)
@property
@abc.abstractmethod
def model_ws(self):
raise NotImplementedError(
"must define model_ws in child class to use this base class"
)
@property
@abc.abstractmethod
def exename(self):
raise NotImplementedError(
"must define exename in child class to use this base class"
)
@property
@abc.abstractmethod
def version(self):
raise NotImplementedError(
"must define version in child class to use this base class"
)
@property
@abc.abstractmethod
def solver_tols(self):
raise NotImplementedError(
"must define version in child class to use this base class"
)
@abc.abstractmethod
def export(self, f, **kwargs):
raise NotImplementedError(
"must define export in child class to use this base class"
)
@property
@abc.abstractmethod
def laytyp(self):
raise NotImplementedError(
"must define laytyp in child class to use this base class"
)
@property
@abc.abstractmethod
def hdry(self):
raise NotImplementedError(
"must define hdry in child class to use this base class"
)
@property
@abc.abstractmethod
def hnoflo(self):
raise NotImplementedError(
"must define hnoflo in child class to use this base class"
)
@property
@abc.abstractmethod
def laycbd(self):
raise NotImplementedError(
"must define laycbd in child class to use this base class"
)
@property
@abc.abstractmethod
def verbose(self):
raise NotImplementedError(
"must define verbose in child class to use this base class"
)
@abc.abstractmethod
def check(self, f=None, verbose=True, level=1):
raise NotImplementedError(
"must define check in child class to use this base class"
)
def get_package_list(self, ftype=None):
"""
Get a list of all the package names.
Parameters
----------
ftype : str
Type of package, 'RIV', 'LPF', etc.
Returns
-------
val : list of strings
Can be used to see what packages are in the model, and can then
be used with get_package to pull out individual packages.
"""
val = []
for pp in self.packagelist:
if ftype is None:
val.append(pp.name[0].upper())
elif pp.package_type.lower() == ftype:
val.append(pp.name[0].upper())
return val
def _check(self, chk, level=1):
"""
Check model data for common errors.
Parameters
----------
f : str or file handle
String defining file name or file handle for summary file
of check method output. If a string is passed a file handle
is created. If f is None, check method does not write
results to a summary file. (default is None)
verbose : bool
Boolean flag used to determine if check method results are
written to the screen
level : int
Check method analysis level. If level=0, summary checks are
performed. If level=1, full checks are performed.
summarize : bool
Boolean flag used to determine if summary of results is written
to the screen
Returns
-------
None
Examples
--------
>>> import flopy
>>> m = flopy.modflow.Modflow.load('model.nam')
>>> m.check()
"""
# check instance for model-level check
results = {}
for p in self.packagelist:
if chk.package_check_levels.get(p.name[0].lower(), 0) <= level:
results[p.name[0]] = p.check(
f=None,
verbose=False,
level=level - 1,
checktype=chk.__class__,
)
# model level checks
# solver check
if self.version in chk.solver_packages.keys():
solvers = set(chk.solver_packages[self.version]).intersection(
set(self.get_package_list())
)
if not solvers:
chk._add_to_summary(
"Error", desc="\r No solver package", package="model"
)
elif len(list(solvers)) > 1:
for s in solvers:
chk._add_to_summary(
"Error",
desc="\r Multiple solver packages",
package=s,
)
else:
chk.passed.append("Compatible solver package")
# add package check results to model level check summary
for r in results.values():
if (
r is not None and r.summary_array is not None
): # currently SFR doesn't have one
chk.summary_array = np.append(
chk.summary_array, r.summary_array
).view(np.recarray)
chk.passed += [
f"{r.package.name[0]} package: {psd}" for psd in r.passed
]
chk.summarize()
return chk
class BaseModel(ModelInterface):
"""
MODFLOW-based models base class.
Parameters
----------
modelname : str, default "modflowtest"
Name of the model, which is also used for model file names.
namefile_ext : str, default "nam"
Name file extension, without "."
exe_name : str, default "mf2k.exe"
Name of the modflow executable.
model_ws : str, optional
Path to the model workspace. Model files will be created in this
directory. Default is None, in which case model_ws is assigned
to the current working directory.
structured : bool, default True
Specify if model grid is structured (default) or unstructured.
verbose : bool, default False
Print additional information to the screen.
**kwargs : dict, optional
Used to define: ``xll``/``yll`` for the x- and y-coordinates of
the lower-left corner of the grid, ``xul``/``yul`` for the
x- and y-coordinates of the upper-left corner of the grid
(deprecated), ``rotation`` for the grid rotation (default 0.0),
``proj4_str`` for a PROJ string, and ``start_datetime`` for
model start date (default "1-1-1970").
"""
def __init__(
self,
modelname="modflowtest",
namefile_ext="nam",
exe_name="mf2k.exe",
model_ws=None,
structured=True,
verbose=False,
**kwargs,
):
"""Initialize BaseModel."""
super().__init__()
self.__name = modelname
self.namefile_ext = namefile_ext or ""
self._namefile = self.__name + "." + self.namefile_ext
self._packagelist = []
self.heading = ""
self.exe_name = exe_name
self._verbose = verbose
self.external_path = None
self.external_extension = "ref"
if model_ws is None:
model_ws = os.getcwd()
if not os.path.exists(model_ws):
try:
os.makedirs(model_ws)
except:
print(
f"\n{model_ws} not valid, "
f"workspace-folder was changed to {os.getcwd()}\n"
)
model_ws = os.getcwd()
self._model_ws = model_ws
self.structured = structured
self.pop_key_list = []
self.cl_params = ""
# check for reference info in kwargs
# we are just carrying these until a dis package is added
xll = kwargs.pop("xll", None)
yll = kwargs.pop("yll", None)
self._xul = kwargs.pop("xul", None)
self._yul = kwargs.pop("yul", None)
self._rotation = kwargs.pop("rotation", 0.0)
self._proj4_str = kwargs.pop("proj4_str", None)
self._start_datetime = kwargs.pop("start_datetime", "1-1-1970")
# build model discretization objects
self._modelgrid = Grid(
proj4=self._proj4_str,
xoff=xll,
yoff=yll,
angrot=self._rotation,
)
self._modeltime = None
# Model file information
self.__onunit__ = 10
# external option stuff
self.array_free_format = True
self.free_format_input = True
self.parameter_load = False
self.array_format = None
self.external_fnames = []
self.external_units = []
self.external_binflag = []
self.external_output = []
self.package_units = []
self._next_ext_unit = None
# output files
self.output_fnames = []
self.output_units = []
self.output_binflag = []
self.output_packages = []
return
@property
def modeltime(self):
raise NotImplementedError(
"must define modeltime in child class to use this base class"
)
@property
def modelgrid(self):
raise NotImplementedError(
"must define modelgrid in child class to use this base class"
)
@property
def packagelist(self):
return self._packagelist
@packagelist.setter
def packagelist(self, packagelist):
self._packagelist = packagelist
@property
def namefile(self):
return self._namefile
@namefile.setter
def namefile(self, namefile):
self._namefile = namefile
@property
def model_ws(self):
return self._model_ws
@model_ws.setter
def model_ws(self, model_ws):
self._model_ws = model_ws
@property
def exename(self):
return self._exename
@exename.setter
def exename(self, exename):
self._exename = exename
@property
def version(self):
return self._version
@version.setter
def version(self, version):
self._version = version
@property
def verbose(self):
return self._verbose
@verbose.setter
def verbose(self, verbose):
self._verbose = verbose
@property
def laytyp(self):
if self.get_package("LPF") is not None:
return self.get_package("LPF").laytyp.array
if self.get_package("BCF6") is not None:
return self.get_package("BCF6").laycon.array
if self.get_package("UPW") is not None:
return self.get_package("UPW").laytyp.array
return None
@property
def hdry(self):
if self.get_package("LPF") is not None:
return self.get_package("LPF").hdry
if self.get_package("BCF6") is not None:
return self.get_package("BCF6").hdry
if self.get_package("UPW") is not None:
return self.get_package("UPW").hdry
return None
@property
def hnoflo(self):
try:
bas6 = self.get_package("BAS6")
return bas6.hnoflo
except AttributeError:
return None
@property
def laycbd(self):
try:
dis = self.get_package("DIS")
return dis.laycbd.array
except AttributeError:
return None
# we don't need these - no need for controlled access to array_free_format
# def set_free_format(self, value=True):
# """
# Set the free format flag for the model instance
#
# Parameters
# ----------
# value : bool
# Boolean value to set free format flag for model. (default is True)
#
# Returns
# -------
#
# """
# if not isinstance(value, bool):
# print('Error: set_free_format passed value must be a boolean')
# return False
# self.array_free_format = value
#
# def get_free_format(self):
# """
# Return the free format flag for the model
#
# Returns
# -------
# out : bool
# Free format flag for the model
#
# """
# return self.array_free_format
def next_unit(self, i=None):
if i is not None:
self.__onunit__ = i - 1
else:
self.__onunit__ += 1
return self.__onunit__
def next_ext_unit(self):
"""
Function to encapsulate next_ext_unit attribute
"""
next_unit = self._next_ext_unit + 1
self._next_ext_unit += 1
return next_unit
def export(self, f, **kwargs):
"""
Method to export a model to netcdf or shapefile based on the
extension of the file name (.shp for shapefile, .nc for netcdf)
Parameters
----------
f : str
filename
kwargs : keyword arguments
modelgrid : flopy.discretization.Grid instance
user supplied modelgrid which can be used for exporting
in lieu of the modelgrid associated with the model object
Returns
-------
None or Netcdf object
"""
from .export import utils
return utils.model_export(f, self, **kwargs)
def add_package(self, p):
"""
Add a package.
Parameters
----------
p : Package object
"""
for idx, u in enumerate(p.unit_number):
if u != 0:
if u in self.package_units or u in self.external_units:
try:
pn = p.name[idx]
except:
pn = p.name
if self.verbose:
print(
f"\nWARNING:\n unit {u} of package {pn} already in use."
)
self.package_units.append(u)
for i, pp in enumerate(self.packagelist):
if pp.allowDuplicates:
continue
elif isinstance(p, type(pp)):
if self.verbose:
print(
"\nWARNING:\n Two packages of the same type, "
f"Replacing existing '{p.name[0]}' package."
)
self.packagelist[i] = p
return
if self.verbose:
print("adding Package: ", p.name[0])
self.packagelist.append(p)
def remove_package(self, pname):
"""
Remove a package from this model
Parameters
----------
pname : string
Name of the package, such as 'RIV', 'BAS6', etc.
"""
for i, pp in enumerate(self.packagelist):
if pname.upper() in pp.name:
if self.verbose:
print("removing Package: ", pp.name)
# Remove the package object from the model's packagelist
p = self.packagelist.pop(i)
# Remove the package unit number from the list of package
# units stored with the model
for iu in p.unit_number:
if iu in self.package_units:
self.package_units.remove(iu)
return
raise StopIteration(
"Package name " + pname + " not found in Package list"
)
def __getattr__(self, item):
"""
__getattr__ - syntactic sugar
Parameters
----------
item : str
3 character package name (case insensitive) or "sr" to access
the SpatialReference instance of the ModflowDis object
Returns
-------
sr : SpatialReference instance
pp : Package object
Package object of type :class:`flopy.pakbase.Package`
Note
----
if self.dis is not None, then the spatial reference instance is updated
using self.dis.delr, self.dis.delc, and self.dis.lenuni before being
returned
"""
if item == "output_packages" or not hasattr(self, "output_packages"):
raise AttributeError(item)
if item == "tr":
if self.dis is not None:
return self.dis.tr
else:
return None
if item == "nper":
if self.dis is not None:
return self.dis.nper
else:
return 0
if item == "start_datetime":
if self.dis is not None:
return self.dis.start_datetime
else:
return None
# return self.get_package(item)
# to avoid infinite recursion
if item == "_packagelist" or item == "packagelist":
raise AttributeError(item)
pckg = self.get_package(item)
if pckg is not None or item in self.mfnam_packages:
return pckg
if item == "modelgrid":
return
raise AttributeError(item)
def get_ext_dict_attr(
self, ext_unit_dict=None, unit=None, filetype=None, pop_key=True
):
iu = None
fname = None
if ext_unit_dict is not None:
for key, value in ext_unit_dict.items():
if key == unit:
iu = key
fname = os.path.basename(value.filename)
break
elif value.filetype == filetype:
iu = key
fname = os.path.basename(value.filename)
if pop_key:
self.add_pop_key_list(iu)
break
return iu, fname
def _output_msg(self, i, add=True):
if add:
txt1 = "Adding"
txt2 = "to"
else:
txt1 = "Removing"
txt2 = "from"
print(
f"{txt1} {self.output_fnames[i]} (unit={self.output_units[i]}) "
f"{txt2} the output list."
)
def add_output_file(
self, unit, fname=None, extension="cbc", binflag=True, package=None
):
"""
Add an ascii or binary output file for a package
Parameters
----------
unit : int
unit number of external array
fname : str
filename of external array. (default is None)
extension : str
extension to use for the cell-by-cell file. Only used if fname
is None. (default is cbc)
binflag : bool
boolean flag indicating if the output file is a binary file.
Default is True
package : str
string that defines the package the output file is attached to.
Default is None
"""
add_cbc = False
if unit > 0:
add_cbc = True
# determine if the file is in external_units
if abs(unit) in self.external_units:
idx = self.external_units.index(abs(unit))
if fname is None:
fname = os.path.basename(self.external_fnames[idx])
binflag = self.external_binflag[idx]
self.remove_external(unit=abs(unit))
# determine if the unit exists in the output data
if abs(unit) in self.output_units:
add_cbc = False
idx = self.output_units.index(abs(unit))
# determine if binflag has changed
if binflag is not self.output_binflag[idx]:
add_cbc = True
if add_cbc:
self.remove_output(unit=abs(unit))
else:
if package is not None:
self.output_packages[idx].append(package)
if add_cbc:
if fname is None:
fname = f"{self.name}.{extension}"
# check if this file name exists for a different unit number
if fname in self.output_fnames:
idx = self.output_fnames.index(fname)
iut = self.output_units[idx]
if iut != unit:
# include unit number in fname if package has
# not been passed
if package is None:
fname = f"{self.name}.{unit}.{extension}"
# include package name in fname
else:
fname = f"{self.name}.{package}.{extension}"
else:
fname = os.path.basename(fname)
self.add_output(fname, unit, binflag=binflag, package=package)
return
def add_output(self, fname, unit, binflag=False, package=None):
"""
Assign an external array so that it will be listed as a DATA or
DATA(BINARY) entry in the name file. This will allow an outside
file package to refer to it.
Parameters
----------
fname : str
filename of external array
unit : int
unit number of external array
binflag : boolean
binary or not. (default is False)
"""
if fname in self.output_fnames:
if self.verbose:
print(
"BaseModel.add_output() warning: "
f"replacing existing filename {fname}"
)
idx = self.output_fnames.index(fname)
if self.verbose:
self._output_msg(idx, add=False)
self.output_fnames.pop(idx)
self.output_units.pop(idx)
self.output_binflag.pop(idx)
self.output_packages.pop(idx)
self.output_fnames.append(fname)
self.output_units.append(unit)
self.output_binflag.append(binflag)
if package is not None:
self.output_packages.append([package])
else:
self.output_packages.append([])
if self.verbose:
self._output_msg(-1, add=True)
return
def remove_output(self, fname=None, unit=None):
"""
Remove an output file from the model by specifying either the
file name or the unit number.
Parameters
----------
fname : str
filename of output array
unit : int
unit number of output array
"""
if fname is not None:
for i, e in enumerate(self.output_fnames):
if fname in e:
if self.verbose:
self._output_msg(i, add=False)
self.output_fnames.pop(i)
self.output_units.pop(i)
self.output_binflag.pop(i)
self.output_packages.pop(i)
elif unit is not None:
for i, u in enumerate(self.output_units):
if u == unit:
if self.verbose:
self._output_msg(i, add=False)
self.output_fnames.pop(i)
self.output_units.pop(i)
self.output_binflag.pop(i)
self.output_packages.pop(i)
else:
msg = " either fname or unit must be passed to remove_output()"
raise Exception(msg)
return
def get_output(self, fname=None, unit=None):
"""
Get an output file from the model by specifying either the
file name or the unit number.
Parameters
----------
fname : str
filename of output array
unit : int
unit number of output array
"""
if fname is not None:
for i, e in enumerate(self.output_fnames):
if fname in e:
return self.output_units[i]
return None
elif unit is not None:
for i, u in enumerate(self.output_units):
if u == unit:
return self.output_fnames[i]
return None
else:
msg = " either fname or unit must be passed to get_output()"
raise Exception(msg)
return
def set_output_attribute(self, fname=None, unit=None, attr=None):
"""
Set a variable in an output file from the model by specifying either
the file name or the unit number and a dictionary with attributes
to change.
Parameters
----------
fname : str
filename of output array
unit : int
unit number of output array
"""
idx = None
if fname is not None:
for i, e in enumerate(self.output_fnames):
if fname in e:
idx = i
break
return None
elif unit is not None:
for i, u in enumerate(self.output_units):
if u == unit:
idx = i
break
else:
msg = (
" either fname or unit must be passed "
"to set_output_attribute()"
)
raise Exception(msg)
if attr is not None:
if idx is not None:
for key, value in attr.items:
if key == "binflag":
self.output_binflag[idx] = value
elif key == "fname":
self.output_fnames[idx] = value
elif key == "unit":
self.output_units[idx] = value
return
def get_output_attribute(self, fname=None, unit=None, attr=None):
"""
Get a attribute for an output file from the model by specifying either
the file name or the unit number.
Parameters
----------
fname : str
filename of output array
unit : int
unit number of output array
"""
idx = None
if fname is not None:
for i, e in enumerate(self.output_fnames):
if fname in e:
idx = i
break
return None
elif unit is not None:
for i, u in enumerate(self.output_units):
if u == unit:
idx = i
break
else:
raise Exception(
" either fname or unit must be passed "
"to set_output_attribute()"
)
v = None
if attr is not None:
if idx is not None:
if attr == "binflag":
v = self.output_binflag[idx]
elif attr == "fname":
v = self.output_fnames[idx]
elif attr == "unit":
v = self.output_units[idx]
return v
def add_external(self, fname, unit, binflag=False, output=False):
"""
Assign an external array so that it will be listed as a DATA or
DATA(BINARY) entry in the name file. This will allow an outside
file package to refer to it.
Parameters
----------
fname : str
filename of external array
unit : int
unit number of external array
binflag : boolean
binary or not. (default is False)
"""
if fname in self.external_fnames:
if self.verbose:
print(
"BaseModel.add_external() warning: "
f"replacing existing filename {fname}"
)
idx = self.external_fnames.index(fname)
self.external_fnames.pop(idx)
self.external_units.pop(idx)
self.external_binflag.pop(idx)
self.external_output.pop(idx)
if unit in self.external_units:
if self.verbose:
msg = f"BaseModel.add_external() warning: replacing existing unit {unit}"
print(msg)
idx = self.external_units.index(unit)
self.external_fnames.pop(idx)
self.external_units.pop(idx)
self.external_binflag.pop(idx)
self.external_output.pop(idx)
self.external_fnames.append(fname)
self.external_units.append(unit)
self.external_binflag.append(binflag)
self.external_output.append(output)
return
def remove_external(self, fname=None, unit=None):
"""
Remove an external file from the model by specifying either the
file name or the unit number.
Parameters
----------
fname : str
filename of external array
unit : int
unit number of external array
"""
plist = []
if fname is not None:
for i, e in enumerate(self.external_fnames):
if fname in e:
plist.append(i)
elif unit is not None:
for i, u in enumerate(self.external_units):
if u == unit:
plist.append(i)
else:
msg = " either fname or unit must be passed to remove_external()"
raise Exception(msg)
# remove external file
j = 0
for i in plist:
ipos = i - j
self.external_fnames.pop(ipos)
self.external_units.pop(ipos)
self.external_binflag.pop(ipos)
self.external_output.pop(ipos)
j += 1
return
def add_existing_package(
self, filename, ptype=None, copy_to_model_ws=True
):
"""
Add an existing package to a model instance.
Parameters
----------
filename : str
the name of the file to add as a package
ptype : optional
the model package type (e.g. "lpf", "wel", etc). If None,
then the file extension of the filename arg is used
copy_to_model_ws : bool
flag to copy the package file into the model_ws directory.
Returns
-------
None
"""
if ptype is None:
ptype = filename.split(".")[-1]
ptype = str(ptype).upper()
# for pak in self.packagelist:
# if ptype in pak.name:
# print("BaseModel.add_existing_package() warning: " +\
# "replacing existing package {0}".format(ptype))
class Obj:
pass
fake_package = Obj()
fake_package.write_file = lambda: None
fake_package.name = [ptype]
fake_package.extension = [filename.split(".")[-1]]
fake_package.unit_number = [self.next_ext_unit()]
if copy_to_model_ws:
base_filename = os.path.split(filename)[-1]
fake_package.file_name = [base_filename]
shutil.copy2(filename, os.path.join(self.model_ws, base_filename))
else:
fake_package.file_name = [filename]
fake_package.allowDuplicates = True
self.add_package(fake_package)
def get_name_file_entries(self):
"""
Get a string representation of the name file.
Parameters
----------
"""
lines = []
for p in self.packagelist:
for i in range(len(p.name)):
if p.unit_number[i] == 0:
continue
s = f"{p.name[i]:14s} {p.unit_number[i]:5d} {p.file_name[i]}"
lines.append(s)
return "\n".join(lines) + "\n"
def has_package(self, name):
"""
Check if package name is in package list.
Parameters
----------
name : str
Name of the package, 'DIS', 'BAS6', etc. (case-insensitive).
Returns
-------
bool
True if package name exists, otherwise False if not found.
"""
if not name:
raise ValueError("invalid package name")
name = name.upper()
for p in self.packagelist:
for pn in p.name:
if pn.upper() == name:
return True
return False
def get_package(self, name):
"""
Get a package.
Parameters
----------
name : str
Name of the package, 'RIV', 'LPF', etc. (case-insensitive).
Returns
-------
pp : Package object
Package object of type :class:`flopy.pakbase.Package`
"""
if not name:
raise ValueError("invalid package name")
name = name.upper()
for pp in self.packagelist:
if pp.name[0].upper() == name:
return pp
return None
def set_version(self, version):
self.version = version.lower()
# check that this is a valid model version
if self.version not in list(self.version_types.keys()):
err = (
f"Error: Unsupported model version ({self.version}). "
"Valid model versions are:"
)
for v in list(self.version_types.keys()):
err += f" {v}"
raise Exception(err)
# set namefile heading
self.heading = (
f"# Name file for {self.version_types[self.version]}, "
f"generated by Flopy version {__version__}."
)
# set heading for each package
for p in self.get_package_list():
pak = self.get_package(p)
if hasattr(pak, "heading"):
pak._generate_heading()
return None
def change_model_ws(self, new_pth=None, reset_external=False):
"""
Change the model work space.
Parameters
----------
new_pth : str
Location of new model workspace. If this path does not exist,
it will be created. (default is None, which will be assigned to
the present working directory).
Returns
-------
val : list of strings
Can be used to see what packages are in the model, and can then
be used with get_package to pull out individual packages.
"""
if new_pth is None:
new_pth = os.getcwd()
if not os.path.exists(new_pth):
try:
print(f"\ncreating model workspace...\n {new_pth}")
os.makedirs(new_pth)
except:
raise OSError(f"{new_pth} not valid, workspace-folder")
# line = '\n{} not valid, workspace-folder '.format(new_pth) + \
# 'was changed to {}\n'.format(os.getcwd())
# print(line)
# new_pth = os.getcwd()
# --reset the model workspace
old_pth = self._model_ws
self._model_ws = new_pth
if self.verbose:
print(f"\nchanging model workspace...\n {new_pth}")
# reset the paths for each package
for pp in self.packagelist:
pp.fn_path = os.path.join(self.model_ws, pp.file_name[0])
# create the external path (if needed)
if (
hasattr(self, "external_path")
and self.external_path is not None
and not os.path.exists(
os.path.join(self._model_ws, self.external_path)
)
):
pth = os.path.join(self._model_ws, self.external_path)
os.makedirs(pth)
if reset_external:
self._reset_external(pth, old_pth)
elif reset_external:
self._reset_external(self._model_ws, old_pth)
return None
def _reset_external(self, pth, old_pth):
new_ext_fnames = []
for ext_file, output in zip(
self.external_fnames, self.external_output
):
# new_ext_file = os.path.join(pth, os.path.split(ext_file)[-1])
# this is a wicked mess
if output:
# new_ext_file = os.path.join(pth, os.path.split(ext_file)[-1])
new_ext_file = ext_file
else:
# fpth = os.path.abspath(os.path.join(old_pth, ext_file))
# new_ext_file = os.path.relpath(fpth, os.path.abspath(pth))
fdir = os.path.dirname(ext_file)
if fdir == "":
fpth = os.path.abspath(os.path.join(old_pth, ext_file))
else:
fpth = ext_file
ao = os.path.abspath(os.path.dirname(fpth))
ep = os.path.abspath(pth)
relp = os.path.relpath(ao, ep)
new_ext_file = os.path.join(relp, os.path.basename(ext_file))
new_ext_fnames.append(new_ext_file)
self.external_fnames = new_ext_fnames
@property
def model_ws(self):
return copy.deepcopy(self._model_ws)
def _set_name(self, value):
"""
Set model name
Parameters
----------
value : str
Name to assign to model.
"""
self.__name = str(value)
self.namefile = self.__name + "." + self.namefile_ext
for p in self.packagelist:
for i in range(len(p.extension)):
p.file_name[i] = self.__name + "." + p.extension[i]
p.fn_path = os.path.join(self.model_ws, p.file_name[0])
def __setattr__(self, key, value):
if key == "free_format_input":
# if self.bas6 is not None:
# self.bas6.ifrefm = value
super().__setattr__(key, value)
elif key == "name":
self._set_name(value)
elif key == "model_ws":
self.change_model_ws(value)
elif key == "sr" and value.__class__.__name__ == "SpatialReference":
warnings.warn(
"SpatialReference has been deprecated.",
category=DeprecationWarning,
)
if self.dis is not None:
self.dis.sr = value
else:
raise Exception(
"cannot set SpatialReference - ModflowDis not found"
)
elif key == "tr":
assert isinstance(
value, discretization.reference.TemporalReference
)
if self.dis is not None:
self.dis.tr = value
else:
raise Exception(
"cannot set TemporalReference - ModflowDis not found"
)
elif key == "start_datetime":
if self.dis is not None:
self.dis.start_datetime = value
self.tr.start_datetime = value
else:
raise Exception(
"cannot set start_datetime - ModflowDis not found"
)
else:
super().__setattr__(key, value)
def run_model(
self,
silent=False,
pause=False,
report=False,
normal_msg="normal termination",
):
"""
This method will run the model using subprocess.Popen.
Parameters
----------
silent : boolean
Echo run information to screen (default is True).
pause : boolean, optional
Pause upon completion (default is False).
report : boolean, optional
Save stdout lines to a list (buff) which is returned
by the method . (default is False).
normal_msg : str
Normal termination message used to determine if the
run terminated normally. (default is 'normal termination')
Returns
-------
(success, buff)
success : boolean
buff : list of lines of stdout
"""
return run_model(
self.exe_name,
self.namefile,
model_ws=self.model_ws,
silent=silent,
pause=pause,
report=report,
normal_msg=normal_msg,
)
def load_results(self):
print("load_results not implemented")
return None
def write_input(self, SelPackList=False, check=False):
"""
Write the input.
Parameters
----------
SelPackList : False or list of packages
"""
if check:
# run check prior to writing input
self.check(f=f"{self.name}.chk", verbose=self.verbose, level=1)
# reset the model to free_format if parameter substitution was
# performed on a model load
if self.parameter_load and not self.free_format_input:
if self.verbose:
print(
"\nResetting free_format_input to True to "
"preserve the precision of the parameter data."
)
self.free_format_input = True
if self.verbose:
print("\nWriting packages:")
if SelPackList == False:
for p in self.packagelist:
if self.verbose:
print(" Package: ", p.name[0])
# prevent individual package checks from running after
# model-level package check above
# otherwise checks are run twice
# or the model level check procedure would have to be split up
# or each package would need a check argument,
# or default for package level check would have to be False
try:
p.write_file(check=False)
except TypeError:
p.write_file()
else:
for pon in SelPackList:
for i, p in enumerate(self.packagelist):
if pon in p.name:
if self.verbose:
print(" Package: ", p.name[0])
try:
p.write_file(check=False)
except TypeError:
p.write_file()
break
if self.verbose:
print(" ")
# write name file
self.write_name_file()
# os.chdir(org_dir)
return
def write_name_file(self):
"""
Every Package needs its own writenamefile function
"""
raise Exception(
"IMPLEMENTATION ERROR: writenamefile must be overloaded"
)
def set_model_units(self):
"""
Every model needs its own set_model_units method
"""
raise Exception(
"IMPLEMENTATION ERROR: set_model_units must be overloaded"
)
@property
def name(self):
"""
Get model name
Returns
-------
name : str
name of model
"""
return copy.deepcopy(self.__name)
def add_pop_key_list(self, key):
"""
Add a external file unit number to a list that will be used to remove
model output (typically binary) files from ext_unit_dict.
Parameters
----------
key : int
file unit number
Returns
-------
Examples
--------
"""
if key not in self.pop_key_list:
self.pop_key_list.append(key)
def check(self, f=None, verbose=True, level=1):
"""
Check model data for common errors.
Parameters
----------
f : str or file handle
String defining file name or file handle for summary file
of check method output. If a string is passed a file handle
is created. If f is None, check method does not write
results to a summary file. (default is None)
verbose : bool
Boolean flag used to determine if check method results are
written to the screen
level : int
Check method analysis level. If level=0, summary checks are
performed. If level=1, full checks are performed.
Returns
-------
None
Examples
--------
>>> import flopy
>>> m = flopy.modflow.Modflow.load('model.nam')
>>> m.check()
"""
# check instance for model-level check
chk = utils.check(self, f=f, verbose=verbose, level=level)
# check for unit number conflicts
package_units = {}
duplicate_units = {}
for p in self.packagelist:
for i in range(len(p.name)):
if p.unit_number[i] != 0:
if p.unit_number[i] in package_units.values():
duplicate_units[p.name[i]] = p.unit_number[i]
otherpackage = [
k
for k, v in package_units.items()
if v == p.unit_number[i]
][0]
duplicate_units[otherpackage] = p.unit_number[i]
if len(duplicate_units) > 0:
for k, v in duplicate_units.items():
chk._add_to_summary(
"Error", package=k, value=v, desc="unit number conflict"
)
else:
chk.passed.append("Unit number conflicts")
return self._check(chk, level)
def plot(self, SelPackList=None, **kwargs):
"""
Plot 2-D, 3-D, transient 2-D, and stress period list (MfList)
model input data
Parameters
----------
SelPackList : bool or list
List of of packages to plot. If SelPackList=None all packages
are plotted. (default is None)
**kwargs : dict
filename_base : str
Base file name that will be used to automatically generate file
names for output image files. Plots will be exported as image
files if file_name_base is not None. (default is None)
file_extension : str
Valid matplotlib.pyplot file extension for savefig(). Only used
if filename_base is not None. (default is 'png')
mflay : int
MODFLOW zero-based layer number to return. If None, then all
all layers will be included. (default is None)
kper : int
MODFLOW zero-based stress period number to return.
(default is zero)
key : str
MfList dictionary key. (default is None)
Returns
----------
axes : list
Empty list is returned if filename_base is not None. Otherwise
a list of matplotlib.pyplot.axis are returned.
See Also
--------
Notes
-----
Examples
--------
>>> import flopy
>>> ml = flopy.modflow.Modflow.load('test.nam')
>>> ml.plot()
"""
from flopy.plot import PlotUtilities
axes = PlotUtilities._plot_model_helper(
self, SelPackList=SelPackList, **kwargs
)
return axes
def to_shapefile(self, filename, package_names=None, **kwargs):
"""
Wrapper function for writing a shapefile for the model grid. If
package_names is not None, then search through the requested packages
looking for arrays that can be added to the shapefile as attributes
Parameters
----------
filename : string
name of the shapefile to write
package_names : list of package names (e.g. ["dis","lpf"])
Packages to export data arrays to shapefile. (default is None)
Returns
-------
None
Examples
--------
>>> import flopy
>>> m = flopy.modflow.Modflow()
>>> m.to_shapefile('model.shp', SelPackList)
"""
warnings.warn("to_shapefile() is deprecated. use .export()")
self.export(filename, package_names=package_names)
return
def run_model(
exe_name,
namefile,
model_ws="./",
silent=False,
pause=False,
report=False,
normal_msg="normal termination",
use_async=False,
cargs=None,
):
"""
This function will run the model using subprocess.Popen. It
communicates with the model's stdout asynchronously and reports
progress to the screen with timestamps
Parameters
----------
exe_name : str
Executable name (with path, if necessary) to run.
namefile : str
Namefile of model to run. The namefile must be the
filename of the namefile without the path. Namefile can be None
to allow programs that do not require a control file (name file)
to be passed as a command line argument.
model_ws : str
Path to the location of the namefile. (default is the
current working directory - './')
silent : boolean
Echo run information to screen (default is True).
pause : boolean, optional
Pause upon completion (default is False).
report : boolean, optional
Save stdout lines to a list (buff) which is returned
by the method . (default is False).
normal_msg : str or list
Normal termination message used to determine if the
run terminated normally. More than one message can be provided using
a list. (Default is 'normal termination')
use_async : boolean
asynchronously read model stdout and report with timestamps. good for
models that take long time to run. not good for models that run
really fast
cargs : str or list of strings
additional command line arguments to pass to the executable.
Default is None
Returns
-------
(success, buff)
success : boolean
buff : list of lines of stdout
"""
success = False
buff = []
# convert normal_msg to a list of lower case str for comparison
if isinstance(normal_msg, str):
normal_msg = [normal_msg]
for idx, s in enumerate(normal_msg):
normal_msg[idx] = s.lower()
# Check to make sure that program and namefile exist
exe = which(exe_name)
if exe is None:
import platform
if platform.system() in "Windows":
if not exe_name.lower().endswith(".exe"):
exe = which(exe_name + ".exe")
elif exe_name.lower().endswith(".exe"):
exe = which(exe_name[:-4])
if exe is None:
raise Exception(
f"The program {exe_name} does not exist or is not executable."
)
else:
if not silent:
print(
f"FloPy is using the following executable to run the model: {exe}"
)
if namefile is not None:
if not os.path.isfile(os.path.join(model_ws, namefile)):
raise Exception(
f"The namefile for this model does not exists: {namefile}"
)
# simple little function for the thread to target
def q_output(output, q):
for line in iter(output.readline, b""):
q.put(line)
# time.sleep(1)
# output.close()
# create a list of arguments to pass to Popen
argv = [exe_name]
if namefile is not None:
argv.append(namefile)
# add additional arguments to Popen arguments
if cargs is not None:
if isinstance(cargs, str):
cargs = [cargs]
for t in cargs:
argv.append(t)
# run the model with Popen
proc = Popen(argv, stdout=PIPE, stderr=STDOUT, cwd=model_ws)
if not use_async:
while True:
line = proc.stdout.readline().decode("utf-8")
if line == "" and proc.poll() is not None:
break
if line:
for msg in normal_msg:
if msg in line.lower():
success = True
break
line = line.rstrip("\r\n")
if not silent:
print(line)
if report:
buff.append(line)
else:
break
return success, buff
# some tricks for the async stdout reading
q = Queue.Queue()
thread = threading.Thread(target=q_output, args=(proc.stdout, q))
thread.daemon = True
thread.start()
failed_words = ["fail", "error"]
last = datetime.now()
lastsec = 0.0
while True:
try:
line = q.get_nowait()
except Queue.Empty:
pass
else:
if line == "":
break
line = line.decode().lower().strip()
if line != "":
now = datetime.now()
dt = now - last
tsecs = dt.total_seconds() - lastsec
line = f"(elapsed:{tsecs})-->{line}"
lastsec = tsecs + lastsec
buff.append(line)
if not silent:
print(line)
for fword in failed_words:
if fword in line:
success = False
break
if proc.poll() is not None:
break
proc.wait()
thread.join(timeout=1)
buff.extend(proc.stdout.readlines())
proc.stdout.close()
for line in buff:
for msg in normal_msg:
if msg in line.lower():
print("success")
success = True
break
if pause:
input("Press Enter to continue...")
return success, buff
| 31.202778
| 89
| 0.531968
|
import abc
import os
import shutil
import threading
import warnings
import queue as Queue
from datetime import datetime
from shutil import which
from subprocess import Popen, PIPE, STDOUT
import copy
import numpy as np
from flopy import utils, discretization
from .version import __version__
from .discretization.grid import Grid
intout flag. If >= 0 then array values read are printed in listing file.
iprn = -1
# external exceptions for users
class PackageLoadException(Exception):
def __init__(self, error, location=""):
self.message = error
super().__init__(f"{error} ({location})")
class FileDataEntry:
def __init__(self, fname, unit, binflag=False, output=False, package=None):
self.fname = fname
self.unit = unit
self.binflag = binflag
self.output = output
self.package = package
class FileData:
def __init__(self):
self.file_data = []
return
def add_file(self, fname, unit, binflag=False, output=False, package=None):
ipop = []
for idx, file_data in enumerate(self.file_data):
if file_data.fname == fname or file_data.unit == unit:
ipop.append(idx)
self.file_data.append(
FileDataEntry(
fname, unit, binflag=binflag, output=output, package=package
)
)
return
class ModelInterface:
def __init__(self):
self._mg_resync = True
self._modelgrid = None
def update_modelgrid(self):
if self._modelgrid is not None:
self._modelgrid = Grid(
proj4=self._modelgrid.proj4,
xoff=self._modelgrid.xoffset,
yoff=self._modelgrid.yoffset,
angrot=self._modelgrid.angrot,
)
self._mg_resync = True
@property
@abc.abstractmethod
def modelgrid(self):
raise NotImplementedError(
"must define modelgrid in child class to use this base class"
)
@property
@abc.abstractmethod
def packagelist(self):
raise NotImplementedError(
"must define packagelist in child class to use this base class"
)
@property
@abc.abstractmethod
def namefile(self):
raise NotImplementedError(
"must define namefile in child class to use this base class"
)
@property
@abc.abstractmethod
def model_ws(self):
raise NotImplementedError(
"must define model_ws in child class to use this base class"
)
@property
@abc.abstractmethod
def exename(self):
raise NotImplementedError(
"must define exename in child class to use this base class"
)
@property
@abc.abstractmethod
def version(self):
raise NotImplementedError(
"must define version in child class to use this base class"
)
@property
@abc.abstractmethod
def solver_tols(self):
raise NotImplementedError(
"must define version in child class to use this base class"
)
@abc.abstractmethod
def export(self, f, **kwargs):
raise NotImplementedError(
"must define export in child class to use this base class"
)
@property
@abc.abstractmethod
def laytyp(self):
raise NotImplementedError(
"must define laytyp in child class to use this base class"
)
@property
@abc.abstractmethod
def hdry(self):
raise NotImplementedError(
"must define hdry in child class to use this base class"
)
@property
@abc.abstractmethod
def hnoflo(self):
raise NotImplementedError(
"must define hnoflo in child class to use this base class"
)
@property
@abc.abstractmethod
def laycbd(self):
raise NotImplementedError(
"must define laycbd in child class to use this base class"
)
@property
@abc.abstractmethod
def verbose(self):
raise NotImplementedError(
"must define verbose in child class to use this base class"
)
@abc.abstractmethod
def check(self, f=None, verbose=True, level=1):
raise NotImplementedError(
"must define check in child class to use this base class"
)
def get_package_list(self, ftype=None):
val = []
for pp in self.packagelist:
if ftype is None:
val.append(pp.name[0].upper())
elif pp.package_type.lower() == ftype:
val.append(pp.name[0].upper())
return val
def _check(self, chk, level=1):
# check instance for model-level check
results = {}
for p in self.packagelist:
if chk.package_check_levels.get(p.name[0].lower(), 0) <= level:
results[p.name[0]] = p.check(
f=None,
verbose=False,
level=level - 1,
checktype=chk.__class__,
)
# model level checks
# solver check
if self.version in chk.solver_packages.keys():
solvers = set(chk.solver_packages[self.version]).intersection(
set(self.get_package_list())
)
if not solvers:
chk._add_to_summary(
"Error", desc="\r No solver package", package="model"
)
elif len(list(solvers)) > 1:
for s in solvers:
chk._add_to_summary(
"Error",
desc="\r Multiple solver packages",
package=s,
)
else:
chk.passed.append("Compatible solver package")
# add package check results to model level check summary
for r in results.values():
if (
r is not None and r.summary_array is not None
): # currently SFR doesn't have one
chk.summary_array = np.append(
chk.summary_array, r.summary_array
).view(np.recarray)
chk.passed += [
f"{r.package.name[0]} package: {psd}" for psd in r.passed
]
chk.summarize()
return chk
class BaseModel(ModelInterface):
def __init__(
self,
modelname="modflowtest",
namefile_ext="nam",
exe_name="mf2k.exe",
model_ws=None,
structured=True,
verbose=False,
**kwargs,
):
super().__init__()
self.__name = modelname
self.namefile_ext = namefile_ext or ""
self._namefile = self.__name + "." + self.namefile_ext
self._packagelist = []
self.heading = ""
self.exe_name = exe_name
self._verbose = verbose
self.external_path = None
self.external_extension = "ref"
if model_ws is None:
model_ws = os.getcwd()
if not os.path.exists(model_ws):
try:
os.makedirs(model_ws)
except:
print(
f"\n{model_ws} not valid, "
f"workspace-folder was changed to {os.getcwd()}\n"
)
model_ws = os.getcwd()
self._model_ws = model_ws
self.structured = structured
self.pop_key_list = []
self.cl_params = ""
xll = kwargs.pop("xll", None)
yll = kwargs.pop("yll", None)
self._xul = kwargs.pop("xul", None)
self._yul = kwargs.pop("yul", None)
self._rotation = kwargs.pop("rotation", 0.0)
self._proj4_str = kwargs.pop("proj4_str", None)
self._start_datetime = kwargs.pop("start_datetime", "1-1-1970")
self._modelgrid = Grid(
proj4=self._proj4_str,
xoff=xll,
yoff=yll,
angrot=self._rotation,
)
self._modeltime = None
self.__onunit__ = 10
self.array_free_format = True
self.free_format_input = True
self.parameter_load = False
self.array_format = None
self.external_fnames = []
self.external_units = []
self.external_binflag = []
self.external_output = []
self.package_units = []
self._next_ext_unit = None
self.output_fnames = []
self.output_units = []
self.output_binflag = []
self.output_packages = []
return
@property
def modeltime(self):
raise NotImplementedError(
"must define modeltime in child class to use this base class"
)
@property
def modelgrid(self):
raise NotImplementedError(
"must define modelgrid in child class to use this base class"
)
@property
def packagelist(self):
return self._packagelist
@packagelist.setter
def packagelist(self, packagelist):
self._packagelist = packagelist
@property
def namefile(self):
return self._namefile
@namefile.setter
def namefile(self, namefile):
self._namefile = namefile
@property
def model_ws(self):
return self._model_ws
@model_ws.setter
def model_ws(self, model_ws):
self._model_ws = model_ws
@property
def exename(self):
return self._exename
@exename.setter
def exename(self, exename):
self._exename = exename
@property
def version(self):
return self._version
@version.setter
def version(self, version):
self._version = version
@property
def verbose(self):
return self._verbose
@verbose.setter
def verbose(self, verbose):
self._verbose = verbose
@property
def laytyp(self):
if self.get_package("LPF") is not None:
return self.get_package("LPF").laytyp.array
if self.get_package("BCF6") is not None:
return self.get_package("BCF6").laycon.array
if self.get_package("UPW") is not None:
return self.get_package("UPW").laytyp.array
return None
@property
def hdry(self):
if self.get_package("LPF") is not None:
return self.get_package("LPF").hdry
if self.get_package("BCF6") is not None:
return self.get_package("BCF6").hdry
if self.get_package("UPW") is not None:
return self.get_package("UPW").hdry
return None
@property
def hnoflo(self):
try:
bas6 = self.get_package("BAS6")
return bas6.hnoflo
except AttributeError:
return None
@property
def laycbd(self):
try:
dis = self.get_package("DIS")
return dis.laycbd.array
except AttributeError:
return None
# def set_free_format(self, value=True):
# """
# Set the free format flag for the model instance
#
# Parameters
# ----------
# value : bool
# Boolean value to set free format flag for model. (default is True)
#
# Returns
# -------
#
# """
# if not isinstance(value, bool):
# print('Error: set_free_format passed value must be a boolean')
# return False
# self.array_free_format = value
#
# def get_free_format(self):
# """
# Return the free format flag for the model
#
# Returns
# -------
# out : bool
# Free format flag for the model
#
# """
# return self.array_free_format
def next_unit(self, i=None):
if i is not None:
self.__onunit__ = i - 1
else:
self.__onunit__ += 1
return self.__onunit__
def next_ext_unit(self):
next_unit = self._next_ext_unit + 1
self._next_ext_unit += 1
return next_unit
def export(self, f, **kwargs):
from .export import utils
return utils.model_export(f, self, **kwargs)
def add_package(self, p):
for idx, u in enumerate(p.unit_number):
if u != 0:
if u in self.package_units or u in self.external_units:
try:
pn = p.name[idx]
except:
pn = p.name
if self.verbose:
print(
f"\nWARNING:\n unit {u} of package {pn} already in use."
)
self.package_units.append(u)
for i, pp in enumerate(self.packagelist):
if pp.allowDuplicates:
continue
elif isinstance(p, type(pp)):
if self.verbose:
print(
"\nWARNING:\n Two packages of the same type, "
f"Replacing existing '{p.name[0]}' package."
)
self.packagelist[i] = p
return
if self.verbose:
print("adding Package: ", p.name[0])
self.packagelist.append(p)
def remove_package(self, pname):
for i, pp in enumerate(self.packagelist):
if pname.upper() in pp.name:
if self.verbose:
print("removing Package: ", pp.name)
# Remove the package object from the model's packagelist
p = self.packagelist.pop(i)
for iu in p.unit_number:
if iu in self.package_units:
self.package_units.remove(iu)
return
raise StopIteration(
"Package name " + pname + " not found in Package list"
)
def __getattr__(self, item):
if item == "output_packages" or not hasattr(self, "output_packages"):
raise AttributeError(item)
if item == "tr":
if self.dis is not None:
return self.dis.tr
else:
return None
if item == "nper":
if self.dis is not None:
return self.dis.nper
else:
return 0
if item == "start_datetime":
if self.dis is not None:
return self.dis.start_datetime
else:
return None
if item == "_packagelist" or item == "packagelist":
raise AttributeError(item)
pckg = self.get_package(item)
if pckg is not None or item in self.mfnam_packages:
return pckg
if item == "modelgrid":
return
raise AttributeError(item)
def get_ext_dict_attr(
self, ext_unit_dict=None, unit=None, filetype=None, pop_key=True
):
iu = None
fname = None
if ext_unit_dict is not None:
for key, value in ext_unit_dict.items():
if key == unit:
iu = key
fname = os.path.basename(value.filename)
break
elif value.filetype == filetype:
iu = key
fname = os.path.basename(value.filename)
if pop_key:
self.add_pop_key_list(iu)
break
return iu, fname
def _output_msg(self, i, add=True):
if add:
txt1 = "Adding"
txt2 = "to"
else:
txt1 = "Removing"
txt2 = "from"
print(
f"{txt1} {self.output_fnames[i]} (unit={self.output_units[i]}) "
f"{txt2} the output list."
)
def add_output_file(
self, unit, fname=None, extension="cbc", binflag=True, package=None
):
add_cbc = False
if unit > 0:
add_cbc = True
if abs(unit) in self.external_units:
idx = self.external_units.index(abs(unit))
if fname is None:
fname = os.path.basename(self.external_fnames[idx])
binflag = self.external_binflag[idx]
self.remove_external(unit=abs(unit))
if abs(unit) in self.output_units:
add_cbc = False
idx = self.output_units.index(abs(unit))
if binflag is not self.output_binflag[idx]:
add_cbc = True
if add_cbc:
self.remove_output(unit=abs(unit))
else:
if package is not None:
self.output_packages[idx].append(package)
if add_cbc:
if fname is None:
fname = f"{self.name}.{extension}"
if fname in self.output_fnames:
idx = self.output_fnames.index(fname)
iut = self.output_units[idx]
if iut != unit:
if package is None:
fname = f"{self.name}.{unit}.{extension}"
else:
fname = f"{self.name}.{package}.{extension}"
else:
fname = os.path.basename(fname)
self.add_output(fname, unit, binflag=binflag, package=package)
return
def add_output(self, fname, unit, binflag=False, package=None):
if fname in self.output_fnames:
if self.verbose:
print(
"BaseModel.add_output() warning: "
f"replacing existing filename {fname}"
)
idx = self.output_fnames.index(fname)
if self.verbose:
self._output_msg(idx, add=False)
self.output_fnames.pop(idx)
self.output_units.pop(idx)
self.output_binflag.pop(idx)
self.output_packages.pop(idx)
self.output_fnames.append(fname)
self.output_units.append(unit)
self.output_binflag.append(binflag)
if package is not None:
self.output_packages.append([package])
else:
self.output_packages.append([])
if self.verbose:
self._output_msg(-1, add=True)
return
def remove_output(self, fname=None, unit=None):
if fname is not None:
for i, e in enumerate(self.output_fnames):
if fname in e:
if self.verbose:
self._output_msg(i, add=False)
self.output_fnames.pop(i)
self.output_units.pop(i)
self.output_binflag.pop(i)
self.output_packages.pop(i)
elif unit is not None:
for i, u in enumerate(self.output_units):
if u == unit:
if self.verbose:
self._output_msg(i, add=False)
self.output_fnames.pop(i)
self.output_units.pop(i)
self.output_binflag.pop(i)
self.output_packages.pop(i)
else:
msg = " either fname or unit must be passed to remove_output()"
raise Exception(msg)
return
def get_output(self, fname=None, unit=None):
if fname is not None:
for i, e in enumerate(self.output_fnames):
if fname in e:
return self.output_units[i]
return None
elif unit is not None:
for i, u in enumerate(self.output_units):
if u == unit:
return self.output_fnames[i]
return None
else:
msg = " either fname or unit must be passed to get_output()"
raise Exception(msg)
return
def set_output_attribute(self, fname=None, unit=None, attr=None):
idx = None
if fname is not None:
for i, e in enumerate(self.output_fnames):
if fname in e:
idx = i
break
return None
elif unit is not None:
for i, u in enumerate(self.output_units):
if u == unit:
idx = i
break
else:
msg = (
" either fname or unit must be passed "
"to set_output_attribute()"
)
raise Exception(msg)
if attr is not None:
if idx is not None:
for key, value in attr.items:
if key == "binflag":
self.output_binflag[idx] = value
elif key == "fname":
self.output_fnames[idx] = value
elif key == "unit":
self.output_units[idx] = value
return
def get_output_attribute(self, fname=None, unit=None, attr=None):
idx = None
if fname is not None:
for i, e in enumerate(self.output_fnames):
if fname in e:
idx = i
break
return None
elif unit is not None:
for i, u in enumerate(self.output_units):
if u == unit:
idx = i
break
else:
raise Exception(
" either fname or unit must be passed "
"to set_output_attribute()"
)
v = None
if attr is not None:
if idx is not None:
if attr == "binflag":
v = self.output_binflag[idx]
elif attr == "fname":
v = self.output_fnames[idx]
elif attr == "unit":
v = self.output_units[idx]
return v
def add_external(self, fname, unit, binflag=False, output=False):
if fname in self.external_fnames:
if self.verbose:
print(
"BaseModel.add_external() warning: "
f"replacing existing filename {fname}"
)
idx = self.external_fnames.index(fname)
self.external_fnames.pop(idx)
self.external_units.pop(idx)
self.external_binflag.pop(idx)
self.external_output.pop(idx)
if unit in self.external_units:
if self.verbose:
msg = f"BaseModel.add_external() warning: replacing existing unit {unit}"
print(msg)
idx = self.external_units.index(unit)
self.external_fnames.pop(idx)
self.external_units.pop(idx)
self.external_binflag.pop(idx)
self.external_output.pop(idx)
self.external_fnames.append(fname)
self.external_units.append(unit)
self.external_binflag.append(binflag)
self.external_output.append(output)
return
def remove_external(self, fname=None, unit=None):
plist = []
if fname is not None:
for i, e in enumerate(self.external_fnames):
if fname in e:
plist.append(i)
elif unit is not None:
for i, u in enumerate(self.external_units):
if u == unit:
plist.append(i)
else:
msg = " either fname or unit must be passed to remove_external()"
raise Exception(msg)
j = 0
for i in plist:
ipos = i - j
self.external_fnames.pop(ipos)
self.external_units.pop(ipos)
self.external_binflag.pop(ipos)
self.external_output.pop(ipos)
j += 1
return
def add_existing_package(
self, filename, ptype=None, copy_to_model_ws=True
):
if ptype is None:
ptype = filename.split(".")[-1]
ptype = str(ptype).upper()
class Obj:
pass
fake_package = Obj()
fake_package.write_file = lambda: None
fake_package.name = [ptype]
fake_package.extension = [filename.split(".")[-1]]
fake_package.unit_number = [self.next_ext_unit()]
if copy_to_model_ws:
base_filename = os.path.split(filename)[-1]
fake_package.file_name = [base_filename]
shutil.copy2(filename, os.path.join(self.model_ws, base_filename))
else:
fake_package.file_name = [filename]
fake_package.allowDuplicates = True
self.add_package(fake_package)
def get_name_file_entries(self):
lines = []
for p in self.packagelist:
for i in range(len(p.name)):
if p.unit_number[i] == 0:
continue
s = f"{p.name[i]:14s} {p.unit_number[i]:5d} {p.file_name[i]}"
lines.append(s)
return "\n".join(lines) + "\n"
def has_package(self, name):
if not name:
raise ValueError("invalid package name")
name = name.upper()
for p in self.packagelist:
for pn in p.name:
if pn.upper() == name:
return True
return False
def get_package(self, name):
if not name:
raise ValueError("invalid package name")
name = name.upper()
for pp in self.packagelist:
if pp.name[0].upper() == name:
return pp
return None
def set_version(self, version):
self.version = version.lower()
if self.version not in list(self.version_types.keys()):
err = (
f"Error: Unsupported model version ({self.version}). "
"Valid model versions are:"
)
for v in list(self.version_types.keys()):
err += f" {v}"
raise Exception(err)
self.heading = (
f"# Name file for {self.version_types[self.version]}, "
f"generated by Flopy version {__version__}."
)
for p in self.get_package_list():
pak = self.get_package(p)
if hasattr(pak, "heading"):
pak._generate_heading()
return None
def change_model_ws(self, new_pth=None, reset_external=False):
if new_pth is None:
new_pth = os.getcwd()
if not os.path.exists(new_pth):
try:
print(f"\ncreating model workspace...\n {new_pth}")
os.makedirs(new_pth)
except:
raise OSError(f"{new_pth} not valid, workspace-folder")
old_pth = self._model_ws
self._model_ws = new_pth
if self.verbose:
print(f"\nchanging model workspace...\n {new_pth}")
for pp in self.packagelist:
pp.fn_path = os.path.join(self.model_ws, pp.file_name[0])
if (
hasattr(self, "external_path")
and self.external_path is not None
and not os.path.exists(
os.path.join(self._model_ws, self.external_path)
)
):
pth = os.path.join(self._model_ws, self.external_path)
os.makedirs(pth)
if reset_external:
self._reset_external(pth, old_pth)
elif reset_external:
self._reset_external(self._model_ws, old_pth)
return None
def _reset_external(self, pth, old_pth):
new_ext_fnames = []
for ext_file, output in zip(
self.external_fnames, self.external_output
):
if output:
new_ext_file = ext_file
else:
fdir = os.path.dirname(ext_file)
if fdir == "":
fpth = os.path.abspath(os.path.join(old_pth, ext_file))
else:
fpth = ext_file
ao = os.path.abspath(os.path.dirname(fpth))
ep = os.path.abspath(pth)
relp = os.path.relpath(ao, ep)
new_ext_file = os.path.join(relp, os.path.basename(ext_file))
new_ext_fnames.append(new_ext_file)
self.external_fnames = new_ext_fnames
@property
def model_ws(self):
return copy.deepcopy(self._model_ws)
def _set_name(self, value):
self.__name = str(value)
self.namefile = self.__name + "." + self.namefile_ext
for p in self.packagelist:
for i in range(len(p.extension)):
p.file_name[i] = self.__name + "." + p.extension[i]
p.fn_path = os.path.join(self.model_ws, p.file_name[0])
def __setattr__(self, key, value):
if key == "free_format_input":
super().__setattr__(key, value)
elif key == "name":
self._set_name(value)
elif key == "model_ws":
self.change_model_ws(value)
elif key == "sr" and value.__class__.__name__ == "SpatialReference":
warnings.warn(
"SpatialReference has been deprecated.",
category=DeprecationWarning,
)
if self.dis is not None:
self.dis.sr = value
else:
raise Exception(
"cannot set SpatialReference - ModflowDis not found"
)
elif key == "tr":
assert isinstance(
value, discretization.reference.TemporalReference
)
if self.dis is not None:
self.dis.tr = value
else:
raise Exception(
"cannot set TemporalReference - ModflowDis not found"
)
elif key == "start_datetime":
if self.dis is not None:
self.dis.start_datetime = value
self.tr.start_datetime = value
else:
raise Exception(
"cannot set start_datetime - ModflowDis not found"
)
else:
super().__setattr__(key, value)
def run_model(
self,
silent=False,
pause=False,
report=False,
normal_msg="normal termination",
):
return run_model(
self.exe_name,
self.namefile,
model_ws=self.model_ws,
silent=silent,
pause=pause,
report=report,
normal_msg=normal_msg,
)
def load_results(self):
print("load_results not implemented")
return None
def write_input(self, SelPackList=False, check=False):
if check:
self.check(f=f"{self.name}.chk", verbose=self.verbose, level=1)
if self.parameter_load and not self.free_format_input:
if self.verbose:
print(
"\nResetting free_format_input to True to "
"preserve the precision of the parameter data."
)
self.free_format_input = True
if self.verbose:
print("\nWriting packages:")
if SelPackList == False:
for p in self.packagelist:
if self.verbose:
print(" Package: ", p.name[0])
try:
p.write_file(check=False)
except TypeError:
p.write_file()
else:
for pon in SelPackList:
for i, p in enumerate(self.packagelist):
if pon in p.name:
if self.verbose:
print(" Package: ", p.name[0])
try:
p.write_file(check=False)
except TypeError:
p.write_file()
break
if self.verbose:
print(" ")
self.write_name_file()
return
def write_name_file(self):
raise Exception(
"IMPLEMENTATION ERROR: writenamefile must be overloaded"
)
def set_model_units(self):
raise Exception(
"IMPLEMENTATION ERROR: set_model_units must be overloaded"
)
@property
def name(self):
return copy.deepcopy(self.__name)
def add_pop_key_list(self, key):
if key not in self.pop_key_list:
self.pop_key_list.append(key)
def check(self, f=None, verbose=True, level=1):
chk = utils.check(self, f=f, verbose=verbose, level=level)
package_units = {}
duplicate_units = {}
for p in self.packagelist:
for i in range(len(p.name)):
if p.unit_number[i] != 0:
if p.unit_number[i] in package_units.values():
duplicate_units[p.name[i]] = p.unit_number[i]
otherpackage = [
k
for k, v in package_units.items()
if v == p.unit_number[i]
][0]
duplicate_units[otherpackage] = p.unit_number[i]
if len(duplicate_units) > 0:
for k, v in duplicate_units.items():
chk._add_to_summary(
"Error", package=k, value=v, desc="unit number conflict"
)
else:
chk.passed.append("Unit number conflicts")
return self._check(chk, level)
def plot(self, SelPackList=None, **kwargs):
from flopy.plot import PlotUtilities
axes = PlotUtilities._plot_model_helper(
self, SelPackList=SelPackList, **kwargs
)
return axes
def to_shapefile(self, filename, package_names=None, **kwargs):
warnings.warn("to_shapefile() is deprecated. use .export()")
self.export(filename, package_names=package_names)
return
def run_model(
exe_name,
namefile,
model_ws="./",
silent=False,
pause=False,
report=False,
normal_msg="normal termination",
use_async=False,
cargs=None,
):
success = False
buff = []
if isinstance(normal_msg, str):
normal_msg = [normal_msg]
for idx, s in enumerate(normal_msg):
normal_msg[idx] = s.lower()
exe = which(exe_name)
if exe is None:
import platform
if platform.system() in "Windows":
if not exe_name.lower().endswith(".exe"):
exe = which(exe_name + ".exe")
elif exe_name.lower().endswith(".exe"):
exe = which(exe_name[:-4])
if exe is None:
raise Exception(
f"The program {exe_name} does not exist or is not executable."
)
else:
if not silent:
print(
f"FloPy is using the following executable to run the model: {exe}"
)
if namefile is not None:
if not os.path.isfile(os.path.join(model_ws, namefile)):
raise Exception(
f"The namefile for this model does not exists: {namefile}"
)
def q_output(output, q):
for line in iter(output.readline, b""):
q.put(line)
argv = [exe_name]
if namefile is not None:
argv.append(namefile)
if cargs is not None:
if isinstance(cargs, str):
cargs = [cargs]
for t in cargs:
argv.append(t)
proc = Popen(argv, stdout=PIPE, stderr=STDOUT, cwd=model_ws)
if not use_async:
while True:
line = proc.stdout.readline().decode("utf-8")
if line == "" and proc.poll() is not None:
break
if line:
for msg in normal_msg:
if msg in line.lower():
success = True
break
line = line.rstrip("\r\n")
if not silent:
print(line)
if report:
buff.append(line)
else:
break
return success, buff
q = Queue.Queue()
thread = threading.Thread(target=q_output, args=(proc.stdout, q))
thread.daemon = True
thread.start()
failed_words = ["fail", "error"]
last = datetime.now()
lastsec = 0.0
while True:
try:
line = q.get_nowait()
except Queue.Empty:
pass
else:
if line == "":
break
line = line.decode().lower().strip()
if line != "":
now = datetime.now()
dt = now - last
tsecs = dt.total_seconds() - lastsec
line = f"(elapsed:{tsecs})-->{line}"
lastsec = tsecs + lastsec
buff.append(line)
if not silent:
print(line)
for fword in failed_words:
if fword in line:
success = False
break
if proc.poll() is not None:
break
proc.wait()
thread.join(timeout=1)
buff.extend(proc.stdout.readlines())
proc.stdout.close()
for line in buff:
for msg in normal_msg:
if msg in line.lower():
print("success")
success = True
break
if pause:
input("Press Enter to continue...")
return success, buff
| true
| true
|
79065a35e9b373ad699a0edd6b2417b0ba72f4ea
| 6,770
|
py
|
Python
|
pennylane/ops/qubit/arithmetic_ops.py
|
QDaria/pennylane
|
5a28983fc7bd950cde8a4014e54261fef4b54293
|
[
"Apache-2.0"
] | null | null | null |
pennylane/ops/qubit/arithmetic_ops.py
|
QDaria/pennylane
|
5a28983fc7bd950cde8a4014e54261fef4b54293
|
[
"Apache-2.0"
] | null | null | null |
pennylane/ops/qubit/arithmetic_ops.py
|
QDaria/pennylane
|
5a28983fc7bd950cde8a4014e54261fef4b54293
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018-2021 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This submodule contains the discrete-variable quantum operations that perform
arithmetic operations on their input states.
"""
# pylint:disable=abstract-method,arguments-differ,protected-access
import numpy as np
import pennylane as qml
from pennylane.operation import Operation
class QubitCarry(Operation):
r"""QubitCarry(wires)
Apply the ``QubitCarry`` operation to four input wires.
This operation performs the transformation:
.. math::
|a\rangle |b\rangle |c\rangle |d\rangle \rightarrow |a\rangle |b\rangle |b\oplus c\rangle |bc \oplus d\oplus (b\oplus c)a\rangle
.. figure:: ../../_static/ops/QubitCarry.svg
:align: center
:width: 60%
:target: javascript:void(0);
See `here <https://arxiv.org/abs/quant-ph/0008033v1>`__ for more information.
.. note::
The first wire should be used to input a carry bit from previous operations. The final wire
holds the carry bit of this operation and the input state on this wire should be
:math:`|0\rangle`.
**Details:**
* Number of wires: 4
* Number of parameters: 0
Args:
wires (Sequence[int]): the wires the operation acts on
**Example**
The ``QubitCarry`` operation maps the state :math:`|0110\rangle` to :math:`|0101\rangle`, where
the last qubit denotes the carry value:
.. code-block::
input_bitstring = (0, 1, 1, 0)
@qml.qnode(dev)
def circuit(basis_state):
qml.BasisState(basis_state, wires=[0, 1, 2, 3])
qml.QubitCarry(wires=[0, 1, 2, 3])
return qml.probs(wires=[0, 1, 2, 3])
probs = circuit(input_bitstring)
probs_indx = np.argwhere(probs == 1).flatten()[0]
bitstrings = list(itertools.product(range(2), repeat=4))
output_bitstring = bitstrings[probs_indx]
The output bitstring is
>>> output_bitstring
(0, 1, 0, 1)
The action of ``QubitCarry`` is to add wires ``1`` and ``2``. The modulo-two result is output
in wire ``2`` with a carry value output in wire ``3``. In this case, :math:`1 \oplus 1 = 0` with
a carry, so we have:
>>> bc_sum = output_bitstring[2]
>>> bc_sum
0
>>> carry = output_bitstring[3]
>>> carry
1
"""
num_wires = 4
num_params = 0
_mat = np.array(
[
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
]
)
@classmethod
def _matrix(cls, *params):
return QubitCarry._mat
@staticmethod
def decomposition(wires):
decomp_ops = [
qml.Toffoli(wires=wires[1:]),
qml.CNOT(wires=[wires[1], wires[2]]),
qml.Toffoli(wires=[wires[0], wires[2], wires[3]]),
]
return decomp_ops
class QubitSum(Operation):
r"""QubitSum(wires)
Apply a ``QubitSum`` operation on three input wires.
This operation performs the transformation:
.. math::
|a\rangle |b\rangle |c\rangle \rightarrow |a\rangle |b\rangle |a\oplus b\oplus c\rangle
.. figure:: ../../_static/ops/QubitSum.svg
:align: center
:width: 40%
:target: javascript:void(0);
See `here <https://arxiv.org/abs/quant-ph/0008033v1>`__ for more information.
**Details:**
* Number of wires: 3
* Number of parameters: 0
Args:
wires (Sequence[int]): the wires the operation acts on
**Example**
The ``QubitSum`` operation maps the state :math:`|010\rangle` to :math:`|011\rangle`, with the
final wire holding the modulo-two sum of the first two wires:
.. code-block::
input_bitstring = (0, 1, 0)
@qml.qnode(dev)
def circuit(basis_state):
qml.BasisState(basis_state, wires = [0, 1, 2])
qml.QubitSum(wires=[0, 1, 2])
return qml.probs(wires=[0, 1, 2])
probs = circuit(input_bitstring)
probs_indx = np.argwhere(probs == 1).flatten()[0]
bitstrings = list(itertools.product(range(2), repeat=3))
output_bitstring = bitstrings[probs_indx]
The output bitstring is
>>> output_bitstring
(0, 1, 1)
The action of ``QubitSum`` is to add wires ``0``, ``1``, and ``2``. The modulo-two result is
output in wire ``2``. In this case, :math:`0 \oplus 1 \oplus 0 = 1`, so we have:
>>> abc_sum = output_bitstring[2]
>>> abc_sum
1
"""
num_wires = 3
num_params = 0
_mat = np.array(
[
[1, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 1],
]
)
def label(self, decimals=None, base_label=None):
return super().label(decimals=decimals, base_label=base_label or "Σ")
@classmethod
def _matrix(cls, *params):
return QubitSum._mat
@staticmethod
def decomposition(wires):
decomp_ops = [
qml.CNOT(wires=[wires[1], wires[2]]),
qml.CNOT(wires=[wires[0], wires[2]]),
]
return decomp_ops
def adjoint(self):
return QubitSum(wires=self.wires)
| 31.342593
| 136
| 0.550812
|
import numpy as np
import pennylane as qml
from pennylane.operation import Operation
class QubitCarry(Operation):
num_wires = 4
num_params = 0
_mat = np.array(
[
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
]
)
@classmethod
def _matrix(cls, *params):
return QubitCarry._mat
@staticmethod
def decomposition(wires):
decomp_ops = [
qml.Toffoli(wires=wires[1:]),
qml.CNOT(wires=[wires[1], wires[2]]),
qml.Toffoli(wires=[wires[0], wires[2], wires[3]]),
]
return decomp_ops
class QubitSum(Operation):
num_wires = 3
num_params = 0
_mat = np.array(
[
[1, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 1],
]
)
def label(self, decimals=None, base_label=None):
return super().label(decimals=decimals, base_label=base_label or "Σ")
@classmethod
def _matrix(cls, *params):
return QubitSum._mat
@staticmethod
def decomposition(wires):
decomp_ops = [
qml.CNOT(wires=[wires[1], wires[2]]),
qml.CNOT(wires=[wires[0], wires[2]]),
]
return decomp_ops
def adjoint(self):
return QubitSum(wires=self.wires)
| true
| true
|
79065aed458ad131937200dcf31582f0b7102f8a
| 2,644
|
py
|
Python
|
utilities/log_parser/parser_utility.py
|
cihuang123/Next-simulation
|
e8552a5804184b30022d103d47c8728fb242b5bc
|
[
"BSD-3-Clause"
] | null | null | null |
utilities/log_parser/parser_utility.py
|
cihuang123/Next-simulation
|
e8552a5804184b30022d103d47c8728fb242b5bc
|
[
"BSD-3-Clause"
] | null | null | null |
utilities/log_parser/parser_utility.py
|
cihuang123/Next-simulation
|
e8552a5804184b30022d103d47c8728fb242b5bc
|
[
"BSD-3-Clause"
] | 2
|
2021-05-05T14:59:37.000Z
|
2021-06-17T03:19:45.000Z
|
import csv
import itertools
import sys
import re
import math
def get_root_mean_square( mean_square, number):
return math.sqrt(mean_square / number)
def gpsr_tlm_compare(target_arr, answer_arr, lift_off_time, fileobj, csv_header):
cache_idx = 0
sim_data_list = []
start_flight_idx = 0
iter_idx = 0
mean_len_sq = 0.0
mean_speed_sq = 0.0
filecusor = csv.writer(fileobj)
filecusor.writerow(csv_header)
for target_elem in enumerate(target_arr):
sim_data_list = []
iter_idx += 1
if target_elem[0] == 0:
continue;
if float(target_elem[1][0]) == lift_off_time:
start_flight_idx = iter_idx
for answer_elem in enumerate(answer_arr , start = cache_idx):
cache_idx = answer_elem[0]
if answer_elem[0] == 0:
continue;
if abs(float(target_elem[1][0]) - float(answer_elem[1][0])) == 0.0:
# simtime
sim_data_list.append(target_elem[1][0])
# gps sow time
sim_data_list.append(target_elem[1][1])
# DM Length
dm_length = math.sqrt(float(answer_elem[1][2])**2 + float(answer_elem[1][3])**2 + float(answer_elem[1][4])**2)
sim_data_list.append(dm_length)
# DM SPEED
dm_speed = math.sqrt(float(answer_elem[1][5])**2 + float(answer_elem[1][6])**2 + float(answer_elem[1][7])**2)
sim_data_list.append(dm_speed)
# DM ABEE
dm_abee = float(answer_elem[1][10])
sim_data_list.append(dm_abee)
# Target Benchmark (DM_GPSR_TLM - target_GPSR_TLM)
target_length_err = float(answer_elem[1][18]) - float(target_elem[1][18])
target_speed_err = float(answer_elem[1][19]) - float(target_elem[1][19])
sim_data_list.append(target_length_err)
sim_data_list.append(target_speed_err)
# Answer DM-TLM
sim_data_list.append(answer_elem[1][20])
sim_data_list.append(answer_elem[1][21])
# Target DM-TLM
sim_data_list.append(target_elem[1][20])
sim_data_list.append(target_elem[1][21])
filecusor.writerow(sim_data_list)
# Root Mean square
if iter_idx >= start_flight_idx:
mean_len_sq = mean_len_sq + target_length_err**2
mean_speed_sq = mean_speed_sq + target_speed_err**2
break
return (iter_idx - start_flight_idx), mean_len_sq, mean_speed_sq
| 41.968254
| 127
| 0.580938
|
import csv
import itertools
import sys
import re
import math
def get_root_mean_square( mean_square, number):
return math.sqrt(mean_square / number)
def gpsr_tlm_compare(target_arr, answer_arr, lift_off_time, fileobj, csv_header):
cache_idx = 0
sim_data_list = []
start_flight_idx = 0
iter_idx = 0
mean_len_sq = 0.0
mean_speed_sq = 0.0
filecusor = csv.writer(fileobj)
filecusor.writerow(csv_header)
for target_elem in enumerate(target_arr):
sim_data_list = []
iter_idx += 1
if target_elem[0] == 0:
continue;
if float(target_elem[1][0]) == lift_off_time:
start_flight_idx = iter_idx
for answer_elem in enumerate(answer_arr , start = cache_idx):
cache_idx = answer_elem[0]
if answer_elem[0] == 0:
continue;
if abs(float(target_elem[1][0]) - float(answer_elem[1][0])) == 0.0:
sim_data_list.append(target_elem[1][0])
sim_data_list.append(target_elem[1][1])
dm_length = math.sqrt(float(answer_elem[1][2])**2 + float(answer_elem[1][3])**2 + float(answer_elem[1][4])**2)
sim_data_list.append(dm_length)
dm_speed = math.sqrt(float(answer_elem[1][5])**2 + float(answer_elem[1][6])**2 + float(answer_elem[1][7])**2)
sim_data_list.append(dm_speed)
dm_abee = float(answer_elem[1][10])
sim_data_list.append(dm_abee)
target_length_err = float(answer_elem[1][18]) - float(target_elem[1][18])
target_speed_err = float(answer_elem[1][19]) - float(target_elem[1][19])
sim_data_list.append(target_length_err)
sim_data_list.append(target_speed_err)
sim_data_list.append(answer_elem[1][20])
sim_data_list.append(answer_elem[1][21])
sim_data_list.append(target_elem[1][20])
sim_data_list.append(target_elem[1][21])
filecusor.writerow(sim_data_list)
if iter_idx >= start_flight_idx:
mean_len_sq = mean_len_sq + target_length_err**2
mean_speed_sq = mean_speed_sq + target_speed_err**2
break
return (iter_idx - start_flight_idx), mean_len_sq, mean_speed_sq
| true
| true
|
79065b39026273c110873050fd38dd038152970a
| 1,085
|
py
|
Python
|
release/stubs.min/Autodesk/Revit/DB/__init___parts/Domain.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 182
|
2017-06-27T02:26:15.000Z
|
2022-03-30T18:53:43.000Z
|
release/stubs.min/Autodesk/Revit/DB/__init___parts/Domain.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 28
|
2017-06-27T13:38:23.000Z
|
2022-03-15T11:19:44.000Z
|
release/stubs.min/Autodesk/Revit/DB/__init___parts/Domain.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 67
|
2017-06-28T09:43:59.000Z
|
2022-03-20T21:17:10.000Z
|
class Domain(Enum,IComparable,IFormattable,IConvertible):
"""
Enumeration of connector domain types
enum Domain,values: DomainCableTrayConduit (4),DomainElectrical (2),DomainHvac (1),DomainPiping (3),DomainUndefined (0)
"""
def __eq__(self,*args):
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self,*args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __ge__(self,*args):
pass
def __gt__(self,*args):
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self,*args):
pass
def __lt__(self,*args):
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
DomainCableTrayConduit=None
DomainElectrical=None
DomainHvac=None
DomainPiping=None
DomainUndefined=None
value__=None
| 27.820513
| 215
| 0.684793
|
class Domain(Enum,IComparable,IFormattable,IConvertible):
def __eq__(self,*args):
pass
def __format__(self,*args):
pass
def __ge__(self,*args):
pass
def __gt__(self,*args):
pass
def __init__(self,*args):
pass
def __le__(self,*args):
pass
def __lt__(self,*args):
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
DomainCableTrayConduit=None
DomainElectrical=None
DomainHvac=None
DomainPiping=None
DomainUndefined=None
value__=None
| true
| true
|
79065bcf6ad565741783848b2deb3f3fe40685e3
| 18,818
|
py
|
Python
|
log_complete/model_160.py
|
LoLab-VU/Bayesian_Inference_of_Network_Dynamics
|
54a5ef7e868be34289836bbbb024a2963c0c9c86
|
[
"MIT"
] | null | null | null |
log_complete/model_160.py
|
LoLab-VU/Bayesian_Inference_of_Network_Dynamics
|
54a5ef7e868be34289836bbbb024a2963c0c9c86
|
[
"MIT"
] | null | null | null |
log_complete/model_160.py
|
LoLab-VU/Bayesian_Inference_of_Network_Dynamics
|
54a5ef7e868be34289836bbbb024a2963c0c9c86
|
[
"MIT"
] | null | null | null |
# exported from PySB model 'model'
from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD
Model()
Monomer('Ligand', ['Receptor'])
Monomer('ParpU', ['C3A'])
Monomer('C8A', ['BidU', 'C3pro'])
Monomer('SmacM', ['BaxA'])
Monomer('BaxM', ['BidM', 'BaxA'])
Monomer('Apop', ['C3pro', 'Xiap'])
Monomer('Fadd', ['Receptor', 'C8pro'])
Monomer('SmacC', ['Xiap'])
Monomer('ParpC')
Monomer('Xiap', ['SmacC', 'Apop', 'C3A'])
Monomer('C9')
Monomer('C3ub')
Monomer('C8pro', ['Fadd', 'C6A'])
Monomer('C6A', ['C8pro'])
Monomer('C3pro', ['Apop', 'C8A'])
Monomer('CytoCM', ['BaxA'])
Monomer('CytoCC')
Monomer('BaxA', ['BaxM', 'BaxA_1', 'BaxA_2', 'SmacM', 'CytoCM'])
Monomer('ApafI')
Monomer('BidU', ['C8A'])
Monomer('BidT')
Monomer('C3A', ['Xiap', 'ParpU', 'C6pro'])
Monomer('ApafA')
Monomer('BidM', ['BaxM'])
Monomer('Receptor', ['Ligand', 'Fadd'])
Monomer('C6pro', ['C3A'])
Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0)
Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0)
Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0)
Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0)
Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0)
Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0)
Parameter('pore_formation_0_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_0_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_1_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_1_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_2_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_2_BaxA_pore_1kr', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr', 1.0)
Parameter('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('Ligand_0', 1000.0)
Parameter('ParpU_0', 1000000.0)
Parameter('C8A_0', 0.0)
Parameter('SmacM_0', 100000.0)
Parameter('BaxM_0', 40000.0)
Parameter('Apop_0', 0.0)
Parameter('Fadd_0', 130000.0)
Parameter('SmacC_0', 0.0)
Parameter('ParpC_0', 0.0)
Parameter('Xiap_0', 40000.0)
Parameter('C9_0', 100000.0)
Parameter('C3ub_0', 0.0)
Parameter('C8pro_0', 130000.0)
Parameter('C6A_0', 0.0)
Parameter('C3pro_0', 21000.0)
Parameter('CytoCM_0', 500000.0)
Parameter('CytoCC_0', 0.0)
Parameter('BaxA_0', 0.0)
Parameter('ApafI_0', 100000.0)
Parameter('BidU_0', 171000.0)
Parameter('BidT_0', 0.0)
Parameter('C3A_0', 0.0)
Parameter('ApafA_0', 0.0)
Parameter('BidM_0', 0.0)
Parameter('Receptor_0', 100.0)
Parameter('C6pro_0', 100.0)
Observable('Ligand_obs', Ligand())
Observable('ParpU_obs', ParpU())
Observable('C8A_obs', C8A())
Observable('SmacM_obs', SmacM())
Observable('BaxM_obs', BaxM())
Observable('Apop_obs', Apop())
Observable('Fadd_obs', Fadd())
Observable('SmacC_obs', SmacC())
Observable('ParpC_obs', ParpC())
Observable('Xiap_obs', Xiap())
Observable('C9_obs', C9())
Observable('C3ub_obs', C3ub())
Observable('C8pro_obs', C8pro())
Observable('C6A_obs', C6A())
Observable('C3pro_obs', C3pro())
Observable('CytoCM_obs', CytoCM())
Observable('CytoCC_obs', CytoCC())
Observable('BaxA_obs', BaxA())
Observable('ApafI_obs', ApafI())
Observable('BidU_obs', BidU())
Observable('BidT_obs', BidT())
Observable('C3A_obs', C3A())
Observable('ApafA_obs', ApafA())
Observable('BidM_obs', BidM())
Observable('Receptor_obs', Receptor())
Observable('C6pro_obs', C6pro())
Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr)
Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr)
Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None, C6A=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr)
Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None, C3pro=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc)
Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None, C3pro=None) + BidU(C8A=None) | C8A(BidU=1, C3pro=None) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr)
Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1, C3pro=None) % BidU(C8A=1) >> C8A(BidU=None, C3pro=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc)
Rule('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex', ApafI() + CytoCC() | ApafA(), conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf, conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr)
Rule('inhibition_0_SmacC_inhibitor_Xiap_inh_target', SmacC(Xiap=None) + Xiap(SmacC=None, Apop=None, C3A=None) | SmacC(Xiap=1) % Xiap(SmacC=1, Apop=None, C3A=None), inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf, inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr)
Rule('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex', ApafA() + C9() | Apop(C3pro=None, Xiap=None), conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf, conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr)
Rule('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=None, Xiap=None) + C3pro(Apop=None, C8A=None) | Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None), catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None) >> Apop(C3pro=None, Xiap=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('inhibition_0_Xiap_inhibitor_Apop_inh_target', Xiap(SmacC=None, Apop=None, C3A=None) + Apop(C3pro=None, Xiap=None) | Xiap(SmacC=None, Apop=1, C3A=None) % Apop(C3pro=None, Xiap=1), inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf, inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr)
Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=None) + C3A(Xiap=None, ParpU=None, C6pro=None) | Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr)
Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None) >> Xiap(SmacC=None, Apop=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc)
Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None, C6pro=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr)
Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc)
Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr)
Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr)
Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc)
Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr)
Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc)
Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr)
Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr)
Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr)
Rule('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacM(BaxA=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5), transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf, transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5) >> BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacC(Xiap=None), transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc)
Rule('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCM(BaxA=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5), transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf, transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5) >> BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCC(), transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc)
Rule('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=None) + C3pro(Apop=None, C8A=None) | C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1), catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1) >> C8A(BidU=None, C3pro=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=None) + C6pro(C3A=None) | C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1), catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf, catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr)
Rule('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + C6A(C8pro=None), catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc)
Rule('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=None) + C8pro(Fadd=None, C6A=None) | C6A(C8pro=1) % C8pro(Fadd=None, C6A=1), catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf, catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr)
Rule('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=1) % C8pro(Fadd=None, C6A=1) >> C6A(C8pro=None) + C8A(BidU=None, C3pro=None), catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc)
Initial(Ligand(Receptor=None), Ligand_0)
Initial(ParpU(C3A=None), ParpU_0)
Initial(C8A(BidU=None, C3pro=None), C8A_0)
Initial(SmacM(BaxA=None), SmacM_0)
Initial(BaxM(BidM=None, BaxA=None), BaxM_0)
Initial(Apop(C3pro=None, Xiap=None), Apop_0)
Initial(Fadd(Receptor=None, C8pro=None), Fadd_0)
Initial(SmacC(Xiap=None), SmacC_0)
Initial(ParpC(), ParpC_0)
Initial(Xiap(SmacC=None, Apop=None, C3A=None), Xiap_0)
Initial(C9(), C9_0)
Initial(C3ub(), C3ub_0)
Initial(C8pro(Fadd=None, C6A=None), C8pro_0)
Initial(C6A(C8pro=None), C6A_0)
Initial(C3pro(Apop=None, C8A=None), C3pro_0)
Initial(CytoCM(BaxA=None), CytoCM_0)
Initial(CytoCC(), CytoCC_0)
Initial(BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), BaxA_0)
Initial(ApafI(), ApafI_0)
Initial(BidU(C8A=None), BidU_0)
Initial(BidT(), BidT_0)
Initial(C3A(Xiap=None, ParpU=None, C6pro=None), C3A_0)
Initial(ApafA(), ApafA_0)
Initial(BidM(BaxM=None), BidM_0)
Initial(Receptor(Ligand=None, Fadd=None), Receptor_0)
Initial(C6pro(C3A=None), C6pro_0)
| 91.349515
| 710
| 0.806515
|
from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD
Model()
Monomer('Ligand', ['Receptor'])
Monomer('ParpU', ['C3A'])
Monomer('C8A', ['BidU', 'C3pro'])
Monomer('SmacM', ['BaxA'])
Monomer('BaxM', ['BidM', 'BaxA'])
Monomer('Apop', ['C3pro', 'Xiap'])
Monomer('Fadd', ['Receptor', 'C8pro'])
Monomer('SmacC', ['Xiap'])
Monomer('ParpC')
Monomer('Xiap', ['SmacC', 'Apop', 'C3A'])
Monomer('C9')
Monomer('C3ub')
Monomer('C8pro', ['Fadd', 'C6A'])
Monomer('C6A', ['C8pro'])
Monomer('C3pro', ['Apop', 'C8A'])
Monomer('CytoCM', ['BaxA'])
Monomer('CytoCC')
Monomer('BaxA', ['BaxM', 'BaxA_1', 'BaxA_2', 'SmacM', 'CytoCM'])
Monomer('ApafI')
Monomer('BidU', ['C8A'])
Monomer('BidT')
Monomer('C3A', ['Xiap', 'ParpU', 'C6pro'])
Monomer('ApafA')
Monomer('BidM', ['BaxM'])
Monomer('Receptor', ['Ligand', 'Fadd'])
Monomer('C6pro', ['C3A'])
Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0)
Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0)
Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0)
Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0)
Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0)
Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0)
Parameter('pore_formation_0_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_0_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_1_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_1_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_2_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_2_BaxA_pore_1kr', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr', 1.0)
Parameter('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('Ligand_0', 1000.0)
Parameter('ParpU_0', 1000000.0)
Parameter('C8A_0', 0.0)
Parameter('SmacM_0', 100000.0)
Parameter('BaxM_0', 40000.0)
Parameter('Apop_0', 0.0)
Parameter('Fadd_0', 130000.0)
Parameter('SmacC_0', 0.0)
Parameter('ParpC_0', 0.0)
Parameter('Xiap_0', 40000.0)
Parameter('C9_0', 100000.0)
Parameter('C3ub_0', 0.0)
Parameter('C8pro_0', 130000.0)
Parameter('C6A_0', 0.0)
Parameter('C3pro_0', 21000.0)
Parameter('CytoCM_0', 500000.0)
Parameter('CytoCC_0', 0.0)
Parameter('BaxA_0', 0.0)
Parameter('ApafI_0', 100000.0)
Parameter('BidU_0', 171000.0)
Parameter('BidT_0', 0.0)
Parameter('C3A_0', 0.0)
Parameter('ApafA_0', 0.0)
Parameter('BidM_0', 0.0)
Parameter('Receptor_0', 100.0)
Parameter('C6pro_0', 100.0)
Observable('Ligand_obs', Ligand())
Observable('ParpU_obs', ParpU())
Observable('C8A_obs', C8A())
Observable('SmacM_obs', SmacM())
Observable('BaxM_obs', BaxM())
Observable('Apop_obs', Apop())
Observable('Fadd_obs', Fadd())
Observable('SmacC_obs', SmacC())
Observable('ParpC_obs', ParpC())
Observable('Xiap_obs', Xiap())
Observable('C9_obs', C9())
Observable('C3ub_obs', C3ub())
Observable('C8pro_obs', C8pro())
Observable('C6A_obs', C6A())
Observable('C3pro_obs', C3pro())
Observable('CytoCM_obs', CytoCM())
Observable('CytoCC_obs', CytoCC())
Observable('BaxA_obs', BaxA())
Observable('ApafI_obs', ApafI())
Observable('BidU_obs', BidU())
Observable('BidT_obs', BidT())
Observable('C3A_obs', C3A())
Observable('ApafA_obs', ApafA())
Observable('BidM_obs', BidM())
Observable('Receptor_obs', Receptor())
Observable('C6pro_obs', C6pro())
Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr)
Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr)
Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None, C6A=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr)
Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None, C3pro=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc)
Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None, C3pro=None) + BidU(C8A=None) | C8A(BidU=1, C3pro=None) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr)
Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1, C3pro=None) % BidU(C8A=1) >> C8A(BidU=None, C3pro=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc)
Rule('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex', ApafI() + CytoCC() | ApafA(), conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf, conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr)
Rule('inhibition_0_SmacC_inhibitor_Xiap_inh_target', SmacC(Xiap=None) + Xiap(SmacC=None, Apop=None, C3A=None) | SmacC(Xiap=1) % Xiap(SmacC=1, Apop=None, C3A=None), inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf, inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr)
Rule('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex', ApafA() + C9() | Apop(C3pro=None, Xiap=None), conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf, conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr)
Rule('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=None, Xiap=None) + C3pro(Apop=None, C8A=None) | Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None), catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None) >> Apop(C3pro=None, Xiap=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('inhibition_0_Xiap_inhibitor_Apop_inh_target', Xiap(SmacC=None, Apop=None, C3A=None) + Apop(C3pro=None, Xiap=None) | Xiap(SmacC=None, Apop=1, C3A=None) % Apop(C3pro=None, Xiap=1), inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf, inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr)
Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=None) + C3A(Xiap=None, ParpU=None, C6pro=None) | Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr)
Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None) >> Xiap(SmacC=None, Apop=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc)
Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None, C6pro=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr)
Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc)
Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr)
Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr)
Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc)
Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr)
Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc)
Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr)
Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr)
Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr)
Rule('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacM(BaxA=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5), transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf, transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5) >> BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacC(Xiap=None), transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc)
Rule('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCM(BaxA=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5), transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf, transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5) >> BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCC(), transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc)
Rule('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=None) + C3pro(Apop=None, C8A=None) | C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1), catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1) >> C8A(BidU=None, C3pro=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=None) + C6pro(C3A=None) | C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1), catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf, catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr)
Rule('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + C6A(C8pro=None), catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc)
Rule('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=None) + C8pro(Fadd=None, C6A=None) | C6A(C8pro=1) % C8pro(Fadd=None, C6A=1), catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf, catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr)
Rule('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=1) % C8pro(Fadd=None, C6A=1) >> C6A(C8pro=None) + C8A(BidU=None, C3pro=None), catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc)
Initial(Ligand(Receptor=None), Ligand_0)
Initial(ParpU(C3A=None), ParpU_0)
Initial(C8A(BidU=None, C3pro=None), C8A_0)
Initial(SmacM(BaxA=None), SmacM_0)
Initial(BaxM(BidM=None, BaxA=None), BaxM_0)
Initial(Apop(C3pro=None, Xiap=None), Apop_0)
Initial(Fadd(Receptor=None, C8pro=None), Fadd_0)
Initial(SmacC(Xiap=None), SmacC_0)
Initial(ParpC(), ParpC_0)
Initial(Xiap(SmacC=None, Apop=None, C3A=None), Xiap_0)
Initial(C9(), C9_0)
Initial(C3ub(), C3ub_0)
Initial(C8pro(Fadd=None, C6A=None), C8pro_0)
Initial(C6A(C8pro=None), C6A_0)
Initial(C3pro(Apop=None, C8A=None), C3pro_0)
Initial(CytoCM(BaxA=None), CytoCM_0)
Initial(CytoCC(), CytoCC_0)
Initial(BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), BaxA_0)
Initial(ApafI(), ApafI_0)
Initial(BidU(C8A=None), BidU_0)
Initial(BidT(), BidT_0)
Initial(C3A(Xiap=None, ParpU=None, C6pro=None), C3A_0)
Initial(ApafA(), ApafA_0)
Initial(BidM(BaxM=None), BidM_0)
Initial(Receptor(Ligand=None, Fadd=None), Receptor_0)
Initial(C6pro(C3A=None), C6pro_0)
| true
| true
|
79065cc2d8327872894a396a7ff96af30ded2d5b
| 1,494
|
py
|
Python
|
deepSI/systems/narendra_li_benchmark.py
|
csutakbalazs/deepSI
|
895030225937fb5fcbd4fc0eaba6c306ec0b5820
|
[
"BSD-3-Clause"
] | 1
|
2022-01-19T13:56:00.000Z
|
2022-01-19T13:56:00.000Z
|
deepSI/systems/narendra_li_benchmark.py
|
csutakbalazs/deepSI
|
895030225937fb5fcbd4fc0eaba6c306ec0b5820
|
[
"BSD-3-Clause"
] | null | null | null |
deepSI/systems/narendra_li_benchmark.py
|
csutakbalazs/deepSI
|
895030225937fb5fcbd4fc0eaba6c306ec0b5820
|
[
"BSD-3-Clause"
] | null | null | null |
import deepSI
from deepSI.systems.system import System_ss, System_data
import numpy as np
class NarendraLiBenchmark(System_ss): #https://arxiv.org/pdf/2003.14162.pdf
"""docstring for NarendraLiBenchmark"""
def __init__(self):
'''Noise, system setting and x0 settings'''
super(NarendraLiBenchmark, self).__init__(nx=2)
def f(self,x,u):
x1,x2 = x
x1new = (x1/(1+x1**2)+1)*np.sin(x2)
x2new = x2*np.cos(x2) + x1*np.exp(-(x1**2+x2**2)/8) + u**3/(1+u**2+0.5*np.cos(x1+x2))
return [x1new,x2new]
def h(self,x):
x1,x2 = x
return x1/(1+0.5*np.sin(x2)) + x2/(1+0.5*np.sin(x1)) + self.random.normal(scale=0.1)
def get_train_data(self):
exp = System_data(u=self.random.uniform(low=-2.5,high=2.5,size=(2000,)))
return self.apply_experiment(exp)
def get_test_data(self):
exp = System_data(u=self.random.uniform(low=-2.5,high=2.5,size=(2000,)))
return self.apply_experiment(exp)
if __name__ == '__main__':
from deepSI import fit_systems
sys = NarendraLiBenchmark()
sys_data = sys.get_train_data()
SYS = fit_systems.System_IO_fit_linear
# sys_fit, score, kwargs = fit_systems.fit_system_tuner(SYS, sys_data, dict(na=range(0,7),nb=range(1,7)))
score, sys_fit, kwargs, _ = fit_systems.grid_search(SYS, sys_data, dict(na=range(0,7),nb=range(1,7)))
sys_data_predict = sys_fit.apply_experiment(sys_data)
sys_data.plot()
sys_data_predict.plot(show=True)
| 37.35
| 109
| 0.655957
|
import deepSI
from deepSI.systems.system import System_ss, System_data
import numpy as np
class NarendraLiBenchmark(System_ss):
def __init__(self):
super(NarendraLiBenchmark, self).__init__(nx=2)
def f(self,x,u):
x1,x2 = x
x1new = (x1/(1+x1**2)+1)*np.sin(x2)
x2new = x2*np.cos(x2) + x1*np.exp(-(x1**2+x2**2)/8) + u**3/(1+u**2+0.5*np.cos(x1+x2))
return [x1new,x2new]
def h(self,x):
x1,x2 = x
return x1/(1+0.5*np.sin(x2)) + x2/(1+0.5*np.sin(x1)) + self.random.normal(scale=0.1)
def get_train_data(self):
exp = System_data(u=self.random.uniform(low=-2.5,high=2.5,size=(2000,)))
return self.apply_experiment(exp)
def get_test_data(self):
exp = System_data(u=self.random.uniform(low=-2.5,high=2.5,size=(2000,)))
return self.apply_experiment(exp)
if __name__ == '__main__':
from deepSI import fit_systems
sys = NarendraLiBenchmark()
sys_data = sys.get_train_data()
SYS = fit_systems.System_IO_fit_linear
score, sys_fit, kwargs, _ = fit_systems.grid_search(SYS, sys_data, dict(na=range(0,7),nb=range(1,7)))
sys_data_predict = sys_fit.apply_experiment(sys_data)
sys_data.plot()
sys_data_predict.plot(show=True)
| true
| true
|
79065d9567f305e04b73dbf1a532c46c2739b9fe
| 530,439
|
py
|
Python
|
ns-3-dev-git/src/flow-monitor/bindings/modulegen__gcc_ILP32.py
|
rahul0324/Upgrade-AQM-Evaluation-Suite-of-ns-3
|
9d46441749da1059b2e9525d72fce61cb0e42150
|
[
"MIT"
] | 1
|
2022-03-23T13:55:42.000Z
|
2022-03-23T13:55:42.000Z
|
ns-3-dev-git/src/flow-monitor/bindings/modulegen__gcc_ILP32.py
|
rahulkumdas/Upgrade-AQM-Evaluation-Suite-of-ns-3
|
9d46441749da1059b2e9525d72fce61cb0e42150
|
[
"MIT"
] | null | null | null |
ns-3-dev-git/src/flow-monitor/bindings/modulegen__gcc_ILP32.py
|
rahulkumdas/Upgrade-AQM-Evaluation-Suite-of-ns-3
|
9d46441749da1059b2e9525d72fce61cb0e42150
|
[
"MIT"
] | null | null | null |
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.flow_monitor', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator', u'ns3::AttributeConstructionList::CIterator')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator*', u'ns3::AttributeConstructionList::CIterator*')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator&', u'ns3::AttributeConstructionList::CIterator&')
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeChecker'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeValue'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::EventImpl> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::EventImpl'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::FlowClassifier> [struct]
module.add_class('DefaultDeleter', template_parameters=['ns3::FlowClassifier'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::NixVector'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Packet'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor'])
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## flow-monitor-helper.h (module 'flow-monitor'): ns3::FlowMonitorHelper [class]
module.add_class('FlowMonitorHelper')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## histogram.h (module 'flow-monitor'): ns3::Histogram [class]
module.add_class('Histogram')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
module.add_class('Inet6SocketAddress', import_from_module='ns.network')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
root_module['ns3::Inet6SocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
module.add_class('InetSocketAddress', import_from_module='ns.network')
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
root_module['ns3::InetSocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress [class]
module.add_class('Ipv4InterfaceAddress', import_from_module='ns.internet')
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e [enumeration]
module.add_enum('InterfaceAddressScope_e', ['HOST', 'LINK', 'GLOBAL'], outer_class=root_module['ns3::Ipv4InterfaceAddress'], import_from_module='ns.internet')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress [class]
module.add_class('Ipv6InterfaceAddress', import_from_module='ns.internet')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::State_e [enumeration]
module.add_enum('State_e', ['TENTATIVE', 'DEPRECATED', 'PREFERRED', 'PERMANENT', 'HOMEADDRESS', 'TENTATIVE_OPTIMISTIC', 'INVALID'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Scope_e [enumeration]
module.add_enum('Scope_e', ['HOST', 'LINKLOCAL', 'GLOBAL'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
module.add_class('Mac48Address', import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )', u'ns3::Mac48Address::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )*', u'ns3::Mac48Address::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )&', u'ns3::Mac48Address::TracedCallback&')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
## mac8-address.h (module 'network'): ns3::Mac8Address [class]
module.add_class('Mac8Address', import_from_module='ns.network')
## mac8-address.h (module 'network'): ns3::Mac8Address [class]
root_module['ns3::Mac8Address'].implicitly_converts_to(root_module['ns3::Address'])
## node-container.h (module 'network'): ns3::NodeContainer [class]
module.add_class('NodeContainer', import_from_module='ns.network')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator', u'ns3::NodeContainer::Iterator')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator*', u'ns3::NodeContainer::Iterator*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator&', u'ns3::NodeContainer::Iterator&')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::ItemType [enumeration]
module.add_enum('ItemType', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simulator.h (module 'core'): ns3::Simulator [class]
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
## simulator.h (module 'core'): ns3::Simulator [enumeration]
module.add_enum('', ['NO_CONTEXT'], outer_class=root_module['ns3::Simulator'], import_from_module='ns.core')
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## nstime.h (module 'core'): ns3::TimeWithUnit [class]
module.add_class('TimeWithUnit', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::SupportLevel [enumeration]
module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
typehandlers.add_type_alias(u'uint32_t', u'ns3::TypeId::hash_t')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::TypeId::hash_t*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::TypeId::hash_t&')
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-128.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## int64x64-128.h (module 'core'): ns3::int64x64_t::impl_type [enumeration]
module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header [class]
module.add_class('Ipv4Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::DscpType [enumeration]
module.add_enum('DscpType', ['DscpDefault', 'DSCP_CS1', 'DSCP_AF11', 'DSCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::EcnType [enumeration]
module.add_enum('EcnType', ['ECN_NotECT', 'ECN_ECT1', 'ECN_ECT0', 'ECN_CE'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
## ipv6-header.h (module 'internet'): ns3::Ipv6Header [class]
module.add_class('Ipv6Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::DscpType [enumeration]
module.add_enum('DscpType', ['DscpDefault', 'DSCP_CS1', 'DSCP_AF11', 'DSCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::NextHeader_e [enumeration]
module.add_enum('NextHeader_e', ['IPV6_EXT_HOP_BY_HOP', 'IPV6_IPV4', 'IPV6_TCP', 'IPV6_UDP', 'IPV6_IPV6', 'IPV6_EXT_ROUTING', 'IPV6_EXT_FRAGMENTATION', 'IPV6_EXT_CONFIDENTIALITY', 'IPV6_EXT_AUTHENTIFICATION', 'IPV6_ICMPV6', 'IPV6_EXT_END', 'IPV6_EXT_DESTINATION', 'IPV6_SCTP', 'IPV6_EXT_MOBILITY', 'IPV6_UDP_LITE'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::EcnType [enumeration]
module.add_enum('EcnType', ['ECN_NotECT', 'ECN_ECT1', 'ECN_ECT0', 'ECN_CE'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::FlowClassifier', 'ns3::empty', 'ns3::DefaultDeleter<ns3::FlowClassifier>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4MulticastRoute', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4MulticastRoute>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4Route', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4Route>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::QueueItem', 'ns3::empty', 'ns3::DefaultDeleter<ns3::QueueItem>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## socket.h (module 'network'): ns3::Socket [class]
module.add_class('Socket', import_from_module='ns.network', parent=root_module['ns3::Object'])
## socket.h (module 'network'): ns3::Socket::SocketErrno [enumeration]
module.add_enum('SocketErrno', ['ERROR_NOTERROR', 'ERROR_ISCONN', 'ERROR_NOTCONN', 'ERROR_MSGSIZE', 'ERROR_AGAIN', 'ERROR_SHUTDOWN', 'ERROR_OPNOTSUPP', 'ERROR_AFNOSUPPORT', 'ERROR_INVAL', 'ERROR_BADF', 'ERROR_NOROUTETOHOST', 'ERROR_NODEV', 'ERROR_ADDRNOTAVAIL', 'ERROR_ADDRINUSE', 'SOCKET_ERRNO_LAST'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::Socket::SocketType [enumeration]
module.add_enum('SocketType', ['NS3_SOCK_STREAM', 'NS3_SOCK_SEQPACKET', 'NS3_SOCK_DGRAM', 'NS3_SOCK_RAW'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::Socket::SocketPriority [enumeration]
module.add_enum('SocketPriority', ['NS3_PRIO_BESTEFFORT', 'NS3_PRIO_FILLER', 'NS3_PRIO_BULK', 'NS3_PRIO_INTERACTIVE_BULK', 'NS3_PRIO_INTERACTIVE', 'NS3_PRIO_CONTROL'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::Socket::Ipv6MulticastFilterMode [enumeration]
module.add_enum('Ipv6MulticastFilterMode', ['INCLUDE', 'EXCLUDE'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::SocketIpTosTag [class]
module.add_class('SocketIpTosTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpTtlTag [class]
module.add_class('SocketIpTtlTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag [class]
module.add_class('SocketIpv6HopLimitTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpv6TclassTag [class]
module.add_class('SocketIpv6TclassTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketPriorityTag [class]
module.add_class('SocketPriorityTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag [class]
module.add_class('SocketSetDontFragmentTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )', u'ns3::Time::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )*', u'ns3::Time::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )&', u'ns3::Time::TracedCallback&')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## trailer.h (module 'network'): ns3::Trailer [class]
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor [class]
module.add_class('EmptyAttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::AttributeAccessor'])
## attribute.h (module 'core'): ns3::EmptyAttributeChecker [class]
module.add_class('EmptyAttributeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## event-impl.h (module 'core'): ns3::EventImpl [class]
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
## flow-classifier.h (module 'flow-monitor'): ns3::FlowClassifier [class]
module.add_class('FlowClassifier', parent=root_module['ns3::SimpleRefCount< ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> >'])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor [class]
module.add_class('FlowMonitor', parent=root_module['ns3::Object'])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats [struct]
module.add_class('FlowStats', outer_class=root_module['ns3::FlowMonitor'])
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats >', u'ns3::FlowMonitor::FlowStatsContainer')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats >*', u'ns3::FlowMonitor::FlowStatsContainer*')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats >&', u'ns3::FlowMonitor::FlowStatsContainer&')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > iterator', u'ns3::FlowMonitor::FlowStatsContainerI')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > iterator*', u'ns3::FlowMonitor::FlowStatsContainerI*')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > iterator&', u'ns3::FlowMonitor::FlowStatsContainerI&')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > const_iterator', u'ns3::FlowMonitor::FlowStatsContainerCI')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > const_iterator*', u'ns3::FlowMonitor::FlowStatsContainerCI*')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > const_iterator&', u'ns3::FlowMonitor::FlowStatsContainerCI&')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > >', u'ns3::FlowMonitor::FlowProbeContainer')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > >*', u'ns3::FlowMonitor::FlowProbeContainer*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > >&', u'ns3::FlowMonitor::FlowProbeContainer&')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > iterator', u'ns3::FlowMonitor::FlowProbeContainerI')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > iterator*', u'ns3::FlowMonitor::FlowProbeContainerI*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > iterator&', u'ns3::FlowMonitor::FlowProbeContainerI&')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > const_iterator', u'ns3::FlowMonitor::FlowProbeContainerCI')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > const_iterator*', u'ns3::FlowMonitor::FlowProbeContainerCI*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > const_iterator&', u'ns3::FlowMonitor::FlowProbeContainerCI&')
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe [class]
module.add_class('FlowProbe', parent=root_module['ns3::Object'])
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats [struct]
module.add_class('FlowStats', outer_class=root_module['ns3::FlowProbe'])
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowProbe::FlowStats >', u'ns3::FlowProbe::Stats')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowProbe::FlowStats >*', u'ns3::FlowProbe::Stats*')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowProbe::FlowStats >&', u'ns3::FlowProbe::Stats&')
## ipv4.h (module 'internet'): ns3::Ipv4 [class]
module.add_class('Ipv4', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier [class]
module.add_class('Ipv4FlowClassifier', parent=root_module['ns3::FlowClassifier'])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple [struct]
module.add_class('FiveTuple', outer_class=root_module['ns3::Ipv4FlowClassifier'])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::SortByCount [class]
module.add_class('SortByCount', outer_class=root_module['ns3::Ipv4FlowClassifier'])
## ipv4-flow-probe.h (module 'flow-monitor'): ns3::Ipv4FlowProbe [class]
module.add_class('Ipv4FlowProbe', parent=root_module['ns3::FlowProbe'])
## ipv4-flow-probe.h (module 'flow-monitor'): ns3::Ipv4FlowProbe::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_NO_ROUTE', 'DROP_TTL_EXPIRE', 'DROP_BAD_CHECKSUM', 'DROP_QUEUE', 'DROP_QUEUE_DISC', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_FRAGMENT_TIMEOUT', 'DROP_INVALID_REASON'], outer_class=root_module['ns3::Ipv4FlowProbe'])
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol [class]
module.add_class('Ipv4L3Protocol', import_from_module='ns.internet', parent=root_module['ns3::Ipv4'])
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_TTL_EXPIRED', 'DROP_NO_ROUTE', 'DROP_BAD_CHECKSUM', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_FRAGMENT_TIMEOUT'], outer_class=root_module['ns3::Ipv4L3Protocol'], import_from_module='ns.internet')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )', u'ns3::Ipv4L3Protocol::SentTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )*', u'ns3::Ipv4L3Protocol::SentTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )&', u'ns3::Ipv4L3Protocol::SentTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, uint32_t )', u'ns3::Ipv4L3Protocol::TxRxTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, uint32_t )*', u'ns3::Ipv4L3Protocol::TxRxTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, uint32_t )&', u'ns3::Ipv4L3Protocol::TxRxTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, uint32_t )', u'ns3::Ipv4L3Protocol::DropTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, uint32_t )*', u'ns3::Ipv4L3Protocol::DropTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, uint32_t )&', u'ns3::Ipv4L3Protocol::DropTracedCallback&')
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute [class]
module.add_class('Ipv4MulticastRoute', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
## ipv4-route.h (module 'internet'): ns3::Ipv4Route [class]
module.add_class('Ipv4Route', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol [class]
module.add_class('Ipv4RoutingProtocol', import_from_module='ns.internet', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::ErrorCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::ErrorCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::ErrorCallback&')
## ipv6.h (module 'internet'): ns3::Ipv6 [class]
module.add_class('Ipv6', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier [class]
module.add_class('Ipv6FlowClassifier', parent=root_module['ns3::FlowClassifier'])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple [struct]
module.add_class('FiveTuple', outer_class=root_module['ns3::Ipv6FlowClassifier'])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::SortByCount [class]
module.add_class('SortByCount', outer_class=root_module['ns3::Ipv6FlowClassifier'])
## ipv6-flow-probe.h (module 'flow-monitor'): ns3::Ipv6FlowProbe [class]
module.add_class('Ipv6FlowProbe', parent=root_module['ns3::FlowProbe'])
## ipv6-flow-probe.h (module 'flow-monitor'): ns3::Ipv6FlowProbe::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_NO_ROUTE', 'DROP_TTL_EXPIRE', 'DROP_BAD_CHECKSUM', 'DROP_QUEUE', 'DROP_QUEUE_DISC', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_UNKNOWN_PROTOCOL', 'DROP_UNKNOWN_OPTION', 'DROP_MALFORMED_HEADER', 'DROP_FRAGMENT_TIMEOUT', 'DROP_INVALID_REASON'], outer_class=root_module['ns3::Ipv6FlowProbe'])
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6L3Protocol [class]
module.add_class('Ipv6L3Protocol', import_from_module='ns.internet', parent=root_module['ns3::Ipv6'])
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6L3Protocol::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_TTL_EXPIRED', 'DROP_NO_ROUTE', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_UNKNOWN_PROTOCOL', 'DROP_UNKNOWN_OPTION', 'DROP_MALFORMED_HEADER', 'DROP_FRAGMENT_TIMEOUT'], outer_class=root_module['ns3::Ipv6L3Protocol'], import_from_module='ns.internet')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )', u'ns3::Ipv6L3Protocol::SentTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )*', u'ns3::Ipv6L3Protocol::SentTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )&', u'ns3::Ipv6L3Protocol::SentTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, uint32_t )', u'ns3::Ipv6L3Protocol::TxRxTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, uint32_t )*', u'ns3::Ipv6L3Protocol::TxRxTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, uint32_t )&', u'ns3::Ipv6L3Protocol::TxRxTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, uint32_t )', u'ns3::Ipv6L3Protocol::DropTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, uint32_t )*', u'ns3::Ipv6L3Protocol::DropTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, uint32_t )&', u'ns3::Ipv6L3Protocol::DropTracedCallback&')
## ipv6-pmtu-cache.h (module 'internet'): ns3::Ipv6PmtuCache [class]
module.add_class('Ipv6PmtuCache', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker [class]
module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue [class]
module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( )', u'ns3::NetDevice::LinkChangeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( )*', u'ns3::NetDevice::LinkChangeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( )&', u'ns3::NetDevice::LinkChangeTracedCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::ReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::ReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::ReceiveCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::PromiscReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::PromiscReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::PromiscReceiveCallback&')
## nix-vector.h (module 'network'): ns3::NixVector [class]
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::ProtocolHandler')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::ProtocolHandler*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::ProtocolHandler&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::DeviceAdditionListener')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::DeviceAdditionListener*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::DeviceAdditionListener&')
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper [class]
module.add_class('OutputStreamWrapper', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
## packet.h (module 'network'): ns3::Packet [class]
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )', u'ns3::Packet::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )*', u'ns3::Packet::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )&', u'ns3::Packet::TracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )', u'ns3::Packet::AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )*', u'ns3::Packet::AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )&', u'ns3::Packet::AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )', u'ns3::Packet::TwoAddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )*', u'ns3::Packet::TwoAddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )&', u'ns3::Packet::TwoAddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )', u'ns3::Packet::Mac48AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )*', u'ns3::Packet::Mac48AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )&', u'ns3::Packet::Mac48AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )', u'ns3::Packet::SizeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )*', u'ns3::Packet::SizeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )&', u'ns3::Packet::SizeTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )', u'ns3::Packet::SinrTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )*', u'ns3::Packet::SinrTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )&', u'ns3::Packet::SinrTracedCallback&')
## queue-item.h (module 'network'): ns3::QueueItem [class]
module.add_class('QueueItem', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >'])
## queue-item.h (module 'network'): ns3::QueueItem::Uint8Values [enumeration]
module.add_enum('Uint8Values', ['IP_DSFIELD'], outer_class=root_module['ns3::QueueItem'], import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )', u'ns3::QueueItem::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )*', u'ns3::QueueItem::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )&', u'ns3::QueueItem::TracedCallback&')
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## callback.h (module 'core'): ns3::CallbackImpl<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['bool', 'ns3::Ptr<ns3::Socket>', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::Ipv4Header &', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ipv4L3Protocol::DropReason', 'ns3::Ptr<ns3::Ipv4>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::Ipv4Header &', 'ns3::Ptr<const ns3::Packet>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::Ipv6Header &', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ipv6L3Protocol::DropReason', 'ns3::Ptr<ns3::Ipv6>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::Ipv6Header &', 'ns3::Ptr<const ns3::Packet>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ptr<ns3::Ipv4>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ptr<ns3::Ipv6>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Socket>', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Socket>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Socket>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## queue-item.h (module 'network'): ns3::QueueDiscItem [class]
module.add_class('QueueDiscItem', import_from_module='ns.network', parent=root_module['ns3::QueueItem'])
module.add_container('std::vector< ns3::Ipv6Address >', 'ns3::Ipv6Address', container_type=u'vector')
module.add_container('std::vector< unsigned int >', 'unsigned int', container_type=u'vector')
module.add_container('std::vector< unsigned long long >', 'long unsigned int', container_type=u'vector')
module.add_container('std::map< unsigned int, ns3::FlowMonitor::FlowStats >', ('unsigned int', 'ns3::FlowMonitor::FlowStats'), container_type=u'map')
module.add_container('std::vector< ns3::Ptr< ns3::FlowProbe > >', 'ns3::Ptr< ns3::FlowProbe >', container_type=u'vector')
module.add_container('std::map< unsigned int, ns3::FlowProbe::FlowStats >', ('unsigned int', 'ns3::FlowProbe::FlowStats'), container_type=u'map')
module.add_container('ns3::FlowProbe::Stats', ('unsigned int', 'ns3::FlowProbe::FlowStats'), container_type=u'map')
module.add_container('std::vector< std::pair< ns3::Ipv4Header::DscpType, unsigned int > >', 'std::pair< ns3::Ipv4Header::DscpType, unsigned int >', container_type=u'vector')
module.add_container('std::map< unsigned int, unsigned int >', ('unsigned int', 'unsigned int'), container_type=u'map')
module.add_container('std::vector< std::pair< ns3::Ipv6Header::DscpType, unsigned int > >', 'std::pair< ns3::Ipv6Header::DscpType, unsigned int >', container_type=u'vector')
typehandlers.add_type_alias(u'uint32_t', u'ns3::FlowId')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::FlowId*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::FlowId&')
typehandlers.add_type_alias(u'uint32_t', u'ns3::FlowPacketId')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::FlowPacketId*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::FlowPacketId&')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
## Register a nested module for the namespace TracedValueCallback
nested_module = module.add_cpp_namespace('TracedValueCallback')
register_types_ns3_TracedValueCallback(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, std::size_t const )', u'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, std::size_t const )*', u'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, std::size_t const )&', u'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, std::size_t const )', u'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, std::size_t const )*', u'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, std::size_t const )&', u'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_types_ns3_TracedValueCallback(module):
root_module = module.get_root()
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time )', u'ns3::TracedValueCallback::Time')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time )*', u'ns3::TracedValueCallback::Time*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time )&', u'ns3::TracedValueCallback::Time&')
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3DefaultDeleter__Ns3AttributeAccessor_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeAccessor >'])
register_Ns3DefaultDeleter__Ns3AttributeChecker_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeChecker >'])
register_Ns3DefaultDeleter__Ns3AttributeValue_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeValue >'])
register_Ns3DefaultDeleter__Ns3CallbackImplBase_methods(root_module, root_module['ns3::DefaultDeleter< ns3::CallbackImplBase >'])
register_Ns3DefaultDeleter__Ns3EventImpl_methods(root_module, root_module['ns3::DefaultDeleter< ns3::EventImpl >'])
register_Ns3DefaultDeleter__Ns3FlowClassifier_methods(root_module, root_module['ns3::DefaultDeleter< ns3::FlowClassifier >'])
register_Ns3DefaultDeleter__Ns3HashImplementation_methods(root_module, root_module['ns3::DefaultDeleter< ns3::Hash::Implementation >'])
register_Ns3DefaultDeleter__Ns3NixVector_methods(root_module, root_module['ns3::DefaultDeleter< ns3::NixVector >'])
register_Ns3DefaultDeleter__Ns3Packet_methods(root_module, root_module['ns3::DefaultDeleter< ns3::Packet >'])
register_Ns3DefaultDeleter__Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::DefaultDeleter< ns3::TraceSourceAccessor >'])
register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])
register_Ns3FlowMonitorHelper_methods(root_module, root_module['ns3::FlowMonitorHelper'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3Histogram_methods(root_module, root_module['ns3::Histogram'])
register_Ns3Inet6SocketAddress_methods(root_module, root_module['ns3::Inet6SocketAddress'])
register_Ns3InetSocketAddress_methods(root_module, root_module['ns3::InetSocketAddress'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4InterfaceAddress_methods(root_module, root_module['ns3::Ipv4InterfaceAddress'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6InterfaceAddress_methods(root_module, root_module['ns3::Ipv6InterfaceAddress'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address'])
register_Ns3Mac8Address_methods(root_module, root_module['ns3::Mac8Address'])
register_Ns3NodeContainer_methods(root_module, root_module['ns3::NodeContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TimeWithUnit_methods(root_module, root_module['ns3::TimeWithUnit'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3Ipv4Header_methods(root_module, root_module['ns3::Ipv4Header'])
register_Ns3Ipv6Header_methods(root_module, root_module['ns3::Ipv6Header'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
register_Ns3SimpleRefCount__Ns3FlowClassifier_Ns3Empty_Ns3DefaultDeleter__lt__ns3FlowClassifier__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3Ipv4MulticastRoute_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4MulticastRoute__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
register_Ns3SimpleRefCount__Ns3Ipv4Route_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4Route__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3QueueItem_Ns3Empty_Ns3DefaultDeleter__lt__ns3QueueItem__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Socket_methods(root_module, root_module['ns3::Socket'])
register_Ns3SocketIpTosTag_methods(root_module, root_module['ns3::SocketIpTosTag'])
register_Ns3SocketIpTtlTag_methods(root_module, root_module['ns3::SocketIpTtlTag'])
register_Ns3SocketIpv6HopLimitTag_methods(root_module, root_module['ns3::SocketIpv6HopLimitTag'])
register_Ns3SocketIpv6TclassTag_methods(root_module, root_module['ns3::SocketIpv6TclassTag'])
register_Ns3SocketPriorityTag_methods(root_module, root_module['ns3::SocketPriorityTag'])
register_Ns3SocketSetDontFragmentTag_methods(root_module, root_module['ns3::SocketSetDontFragmentTag'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3EmptyAttributeAccessor_methods(root_module, root_module['ns3::EmptyAttributeAccessor'])
register_Ns3EmptyAttributeChecker_methods(root_module, root_module['ns3::EmptyAttributeChecker'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])
register_Ns3FlowClassifier_methods(root_module, root_module['ns3::FlowClassifier'])
register_Ns3FlowMonitor_methods(root_module, root_module['ns3::FlowMonitor'])
register_Ns3FlowMonitorFlowStats_methods(root_module, root_module['ns3::FlowMonitor::FlowStats'])
register_Ns3FlowProbe_methods(root_module, root_module['ns3::FlowProbe'])
register_Ns3FlowProbeFlowStats_methods(root_module, root_module['ns3::FlowProbe::FlowStats'])
register_Ns3Ipv4_methods(root_module, root_module['ns3::Ipv4'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4FlowClassifier_methods(root_module, root_module['ns3::Ipv4FlowClassifier'])
register_Ns3Ipv4FlowClassifierFiveTuple_methods(root_module, root_module['ns3::Ipv4FlowClassifier::FiveTuple'])
register_Ns3Ipv4FlowClassifierSortByCount_methods(root_module, root_module['ns3::Ipv4FlowClassifier::SortByCount'])
register_Ns3Ipv4FlowProbe_methods(root_module, root_module['ns3::Ipv4FlowProbe'])
register_Ns3Ipv4L3Protocol_methods(root_module, root_module['ns3::Ipv4L3Protocol'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv4MulticastRoute_methods(root_module, root_module['ns3::Ipv4MulticastRoute'])
register_Ns3Ipv4Route_methods(root_module, root_module['ns3::Ipv4Route'])
register_Ns3Ipv4RoutingProtocol_methods(root_module, root_module['ns3::Ipv4RoutingProtocol'])
register_Ns3Ipv6_methods(root_module, root_module['ns3::Ipv6'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6FlowClassifier_methods(root_module, root_module['ns3::Ipv6FlowClassifier'])
register_Ns3Ipv6FlowClassifierFiveTuple_methods(root_module, root_module['ns3::Ipv6FlowClassifier::FiveTuple'])
register_Ns3Ipv6FlowClassifierSortByCount_methods(root_module, root_module['ns3::Ipv6FlowClassifier::SortByCount'])
register_Ns3Ipv6FlowProbe_methods(root_module, root_module['ns3::Ipv6FlowProbe'])
register_Ns3Ipv6L3Protocol_methods(root_module, root_module['ns3::Ipv6L3Protocol'])
register_Ns3Ipv6PmtuCache_methods(root_module, root_module['ns3::Ipv6PmtuCache'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker'])
register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3OutputStreamWrapper_methods(root_module, root_module['ns3::OutputStreamWrapper'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3QueueItem_methods(root_module, root_module['ns3::QueueItem'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3CallbackImpl__Bool_Ns3Ptr__lt__ns3Socket__gt___Const_ns3Address___amp___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Ns3ObjectBase___star___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Const_ns3Ipv4Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ipv4L3ProtocolDropReason_Ns3Ptr__lt__ns3Ipv4__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Const_ns3Ipv4Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Const_ns3Ipv6Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ipv6L3ProtocolDropReason_Ns3Ptr__lt__ns3Ipv6__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Const_ns3Ipv6Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ptr__lt__ns3Ipv4__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ptr__lt__ns3Ipv6__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_short_Const_ns3Address___amp___Const_ns3Address___amp___Ns3NetDevicePacketType_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Socket__gt___Const_ns3Address___amp___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Socket__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Socket__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3QueueDiscItem_methods(root_module, root_module['ns3::QueueDiscItem'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<const ns3::AttributeChecker> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::CIterator ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'ns3::AttributeConstructionList::CIterator',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::CIterator ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'ns3::AttributeConstructionList::CIterator',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Buffer_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [constructor]
cls.add_constructor([param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(uint32_t end) [member function]
cls.add_method('AddAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtStart(uint32_t start) [member function]
cls.add_method('AddAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function]
cls.add_method('Begin',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Buffer',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function]
cls.add_method('End',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [constructor]
cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')])
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function]
cls.add_method('GetDistanceFrom',
'uint32_t',
[param('ns3::Buffer::Iterator const &', 'o')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetRemainingSize() const [member function]
cls.add_method('GetRemainingSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function]
cls.add_method('IsEnd',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function]
cls.add_method('IsStart',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function]
cls.add_method('Next',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function]
cls.add_method('Next',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::PeekU8() [member function]
cls.add_method('PeekU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function]
cls.add_method('Prev',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function]
cls.add_method('Prev',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(ns3::Buffer::Iterator start, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('uint32_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function]
cls.add_method('ReadLsbtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function]
cls.add_method('ReadLsbtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function]
cls.add_method('ReadLsbtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function]
cls.add_method('ReadNtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function]
cls.add_method('ReadNtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function]
cls.add_method('ReadNtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Write',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function]
cls.add_method('WriteHtolsbU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function]
cls.add_method('WriteHtolsbU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function]
cls.add_method('WriteHtolsbU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function]
cls.add_method('WriteHtonU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function]
cls.add_method('WriteHtonU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function]
cls.add_method('WriteHtonU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data'), param('uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagIterator::Item',
[])
return
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function]
cls.add_method('GetEnd',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function]
cls.add_method('GetStart',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor]
cls.add_constructor([])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [constructor]
cls.add_constructor([param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function]
cls.add_method('Add',
'ns3::TagBuffer',
[param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function]
cls.add_method('Add',
'void',
[param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t appendOffset) [member function]
cls.add_method('AddAtEnd',
'void',
[param('int32_t', 'appendOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t prependOffset) [member function]
cls.add_method('AddAtStart',
'void',
[param('int32_t', 'prependOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Adjust(int32_t adjustment) [member function]
cls.add_method('Adjust',
'void',
[param('int32_t', 'adjustment')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function]
cls.add_method('Begin',
'ns3::ByteTagList::Iterator',
[param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')],
is_const=True)
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3ByteTagListIterator_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')])
## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function]
cls.add_method('GetOffsetStart',
'uint32_t',
[],
is_const=True)
## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagList::Iterator::Item',
[])
return
def register_Ns3ByteTagListIteratorItem_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor]
cls.add_constructor([param('ns3::TagBuffer', 'buf')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable]
cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable]
cls.add_instance_attribute('end', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable]
cls.add_instance_attribute('start', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
return
def register_Ns3DefaultDeleter__Ns3AttributeAccessor_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor>::DefaultDeleter(ns3::DefaultDeleter<ns3::AttributeAccessor> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::AttributeAccessor > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::AttributeAccessor>::Delete(ns3::AttributeAccessor * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::AttributeAccessor *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3AttributeChecker_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker>::DefaultDeleter(ns3::DefaultDeleter<ns3::AttributeChecker> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::AttributeChecker > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::AttributeChecker>::Delete(ns3::AttributeChecker * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::AttributeChecker *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3AttributeValue_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue>::DefaultDeleter(ns3::DefaultDeleter<ns3::AttributeValue> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::AttributeValue > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::AttributeValue>::Delete(ns3::AttributeValue * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::AttributeValue *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3CallbackImplBase_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase>::DefaultDeleter(ns3::DefaultDeleter<ns3::CallbackImplBase> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::CallbackImplBase > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::CallbackImplBase>::Delete(ns3::CallbackImplBase * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::CallbackImplBase *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3EventImpl_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::EventImpl>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::EventImpl>::DefaultDeleter(ns3::DefaultDeleter<ns3::EventImpl> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::EventImpl > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::EventImpl>::Delete(ns3::EventImpl * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::EventImpl *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3FlowClassifier_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::FlowClassifier>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::FlowClassifier>::DefaultDeleter(ns3::DefaultDeleter<ns3::FlowClassifier> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::FlowClassifier > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::FlowClassifier>::Delete(ns3::FlowClassifier * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::FlowClassifier *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3HashImplementation_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation>::DefaultDeleter(ns3::DefaultDeleter<ns3::Hash::Implementation> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::Hash::Implementation > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::Hash::Implementation>::Delete(ns3::Hash::Implementation * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Hash::Implementation *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3NixVector_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector>::DefaultDeleter(ns3::DefaultDeleter<ns3::NixVector> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::NixVector > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::NixVector>::Delete(ns3::NixVector * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::NixVector *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3Packet_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet>::DefaultDeleter(ns3::DefaultDeleter<ns3::Packet> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::Packet > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::Packet>::Delete(ns3::Packet * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Packet *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3TraceSourceAccessor_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor>::DefaultDeleter(ns3::DefaultDeleter<ns3::TraceSourceAccessor> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::TraceSourceAccessor > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::TraceSourceAccessor>::Delete(ns3::TraceSourceAccessor * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::TraceSourceAccessor *', 'object')],
is_static=True)
return
def register_Ns3EventId_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [constructor]
cls.add_constructor([param('ns3::EventId const &', 'arg0')])
## event-id.h (module 'core'): ns3::EventId::EventId() [constructor]
cls.add_constructor([])
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')])
## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function]
cls.add_method('GetTs',
'uint64_t',
[],
is_const=True)
## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function]
cls.add_method('GetUid',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function]
cls.add_method('PeekEventImpl',
'ns3::EventImpl *',
[],
is_const=True)
return
def register_Ns3FlowMonitorHelper_methods(root_module, cls):
## flow-monitor-helper.h (module 'flow-monitor'): ns3::FlowMonitorHelper::FlowMonitorHelper() [constructor]
cls.add_constructor([])
## flow-monitor-helper.h (module 'flow-monitor'): void ns3::FlowMonitorHelper::SetMonitorAttribute(std::string n1, ns3::AttributeValue const & v1) [member function]
cls.add_method('SetMonitorAttribute',
'void',
[param('std::string', 'n1'), param('ns3::AttributeValue const &', 'v1')])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowMonitor> ns3::FlowMonitorHelper::Install(ns3::NodeContainer nodes) [member function]
cls.add_method('Install',
'ns3::Ptr< ns3::FlowMonitor >',
[param('ns3::NodeContainer', 'nodes')])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowMonitor> ns3::FlowMonitorHelper::Install(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Install',
'ns3::Ptr< ns3::FlowMonitor >',
[param('ns3::Ptr< ns3::Node >', 'node')])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowMonitor> ns3::FlowMonitorHelper::InstallAll() [member function]
cls.add_method('InstallAll',
'ns3::Ptr< ns3::FlowMonitor >',
[])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowMonitor> ns3::FlowMonitorHelper::GetMonitor() [member function]
cls.add_method('GetMonitor',
'ns3::Ptr< ns3::FlowMonitor >',
[])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowClassifier> ns3::FlowMonitorHelper::GetClassifier() [member function]
cls.add_method('GetClassifier',
'ns3::Ptr< ns3::FlowClassifier >',
[])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowClassifier> ns3::FlowMonitorHelper::GetClassifier6() [member function]
cls.add_method('GetClassifier6',
'ns3::Ptr< ns3::FlowClassifier >',
[])
## flow-monitor-helper.h (module 'flow-monitor'): void ns3::FlowMonitorHelper::SerializeToXmlStream(std::ostream & os, uint16_t indent, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'indent'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
## flow-monitor-helper.h (module 'flow-monitor'): std::string ns3::FlowMonitorHelper::SerializeToXmlString(uint16_t indent, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlString',
'std::string',
[param('uint16_t', 'indent'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
## flow-monitor-helper.h (module 'flow-monitor'): void ns3::FlowMonitorHelper::SerializeToXmlFile(std::string fileName, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlFile',
'void',
[param('std::string', 'fileName'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3Histogram_methods(root_module, cls):
## histogram.h (module 'flow-monitor'): ns3::Histogram::Histogram(ns3::Histogram const & arg0) [constructor]
cls.add_constructor([param('ns3::Histogram const &', 'arg0')])
## histogram.h (module 'flow-monitor'): ns3::Histogram::Histogram(double binWidth) [constructor]
cls.add_constructor([param('double', 'binWidth')])
## histogram.h (module 'flow-monitor'): ns3::Histogram::Histogram() [constructor]
cls.add_constructor([])
## histogram.h (module 'flow-monitor'): void ns3::Histogram::AddValue(double value) [member function]
cls.add_method('AddValue',
'void',
[param('double', 'value')])
## histogram.h (module 'flow-monitor'): uint32_t ns3::Histogram::GetBinCount(uint32_t index) [member function]
cls.add_method('GetBinCount',
'uint32_t',
[param('uint32_t', 'index')])
## histogram.h (module 'flow-monitor'): double ns3::Histogram::GetBinEnd(uint32_t index) [member function]
cls.add_method('GetBinEnd',
'double',
[param('uint32_t', 'index')])
## histogram.h (module 'flow-monitor'): double ns3::Histogram::GetBinStart(uint32_t index) [member function]
cls.add_method('GetBinStart',
'double',
[param('uint32_t', 'index')])
## histogram.h (module 'flow-monitor'): double ns3::Histogram::GetBinWidth(uint32_t index) const [member function]
cls.add_method('GetBinWidth',
'double',
[param('uint32_t', 'index')],
is_const=True)
## histogram.h (module 'flow-monitor'): uint32_t ns3::Histogram::GetNBins() const [member function]
cls.add_method('GetNBins',
'uint32_t',
[],
is_const=True)
## histogram.h (module 'flow-monitor'): void ns3::Histogram::SerializeToXmlStream(std::ostream & os, uint16_t indent, std::string elementName) const [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'indent'), param('std::string', 'elementName')],
is_const=True)
## histogram.h (module 'flow-monitor'): void ns3::Histogram::SetDefaultBinWidth(double binWidth) [member function]
cls.add_method('SetDefaultBinWidth',
'void',
[param('double', 'binWidth')])
return
def register_Ns3Inet6SocketAddress_methods(root_module, cls):
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Inet6SocketAddress const & arg0) [constructor]
cls.add_constructor([param('ns3::Inet6SocketAddress const &', 'arg0')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6) [constructor]
cls.add_constructor([param('char const *', 'ipv6')])
## inet6-socket-address.h (module 'network'): static ns3::Inet6SocketAddress ns3::Inet6SocketAddress::ConvertFrom(ns3::Address const & addr) [member function]
cls.add_method('ConvertFrom',
'ns3::Inet6SocketAddress',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): ns3::Ipv6Address ns3::Inet6SocketAddress::GetIpv6() const [member function]
cls.add_method('GetIpv6',
'ns3::Ipv6Address',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): uint16_t ns3::Inet6SocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): static bool ns3::Inet6SocketAddress::IsMatchingType(ns3::Address const & addr) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetIpv6(ns3::Ipv6Address ipv6) [member function]
cls.add_method('SetIpv6',
'void',
[param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
return
def register_Ns3InetSocketAddress_methods(root_module, cls):
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::InetSocketAddress const & arg0) [constructor]
cls.add_constructor([param('ns3::InetSocketAddress const &', 'arg0')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4) [constructor]
cls.add_constructor([param('char const *', 'ipv4')])
## inet-socket-address.h (module 'network'): static ns3::InetSocketAddress ns3::InetSocketAddress::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::InetSocketAddress',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): ns3::Ipv4Address ns3::InetSocketAddress::GetIpv4() const [member function]
cls.add_method('GetIpv4',
'ns3::Ipv4Address',
[],
is_const=True)
## inet-socket-address.h (module 'network'): uint16_t ns3::InetSocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet-socket-address.h (module 'network'): uint8_t ns3::InetSocketAddress::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## inet-socket-address.h (module 'network'): static bool ns3::InetSocketAddress::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetIpv4(ns3::Ipv4Address address) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ipv4Address', 'address')])
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4InterfaceAddress_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress() [constructor]
cls.add_constructor([])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress(ns3::Ipv4Address local, ns3::Ipv4Mask mask) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'local'), param('ns3::Ipv4Mask', 'mask')])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress(ns3::Ipv4InterfaceAddress const & o) [constructor]
cls.add_constructor([param('ns3::Ipv4InterfaceAddress const &', 'o')])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4InterfaceAddress::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4InterfaceAddress::GetLocal() const [member function]
cls.add_method('GetLocal',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Mask ns3::Ipv4InterfaceAddress::GetMask() const [member function]
cls.add_method('GetMask',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e ns3::Ipv4InterfaceAddress::GetScope() const [member function]
cls.add_method('GetScope',
'ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): bool ns3::Ipv4InterfaceAddress::IsSecondary() const [member function]
cls.add_method('IsSecondary',
'bool',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetBroadcast(ns3::Ipv4Address broadcast) [member function]
cls.add_method('SetBroadcast',
'void',
[param('ns3::Ipv4Address', 'broadcast')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetLocal(ns3::Ipv4Address local) [member function]
cls.add_method('SetLocal',
'void',
[param('ns3::Ipv4Address', 'local')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetMask(ns3::Ipv4Mask mask) [member function]
cls.add_method('SetMask',
'void',
[param('ns3::Ipv4Mask', 'mask')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetPrimary() [member function]
cls.add_method('SetPrimary',
'void',
[])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetScope(ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SetScope',
'void',
[param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetSecondary() [member function]
cls.add_method('SetSecondary',
'void',
[])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
deprecated=True, is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsDocumentation() const [member function]
cls.add_method('IsDocumentation',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() const [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac16Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac64Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac8Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac8Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac16Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac64Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac8Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac8Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6InterfaceAddress_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress() [constructor]
cls.add_constructor([])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6Address address) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'address')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6Address address, ns3::Ipv6Prefix prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'address'), param('ns3::Ipv6Prefix', 'prefix')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6InterfaceAddress const & o) [constructor]
cls.add_constructor([param('ns3::Ipv6InterfaceAddress const &', 'o')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6InterfaceAddress::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): uint32_t ns3::Ipv6InterfaceAddress::GetNsDadUid() const [member function]
cls.add_method('GetNsDadUid',
'uint32_t',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6Prefix ns3::Ipv6InterfaceAddress::GetPrefix() const [member function]
cls.add_method('GetPrefix',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Scope_e ns3::Ipv6InterfaceAddress::GetScope() const [member function]
cls.add_method('GetScope',
'ns3::Ipv6InterfaceAddress::Scope_e',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::State_e ns3::Ipv6InterfaceAddress::GetState() const [member function]
cls.add_method('GetState',
'ns3::Ipv6InterfaceAddress::State_e',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): bool ns3::Ipv6InterfaceAddress::IsInSameSubnet(ns3::Ipv6Address b) const [member function]
cls.add_method('IsInSameSubnet',
'bool',
[param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetAddress(ns3::Ipv6Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Ipv6Address', 'address')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetNsDadUid(uint32_t uid) [member function]
cls.add_method('SetNsDadUid',
'void',
[param('uint32_t', 'uid')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetScope(ns3::Ipv6InterfaceAddress::Scope_e scope) [member function]
cls.add_method('SetScope',
'void',
[param('ns3::Ipv6InterfaceAddress::Scope_e', 'scope')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetState(ns3::Ipv6InterfaceAddress::State_e state) [member function]
cls.add_method('SetState',
'void',
[param('ns3::Ipv6InterfaceAddress::State_e', 'state')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3Mac48Address_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(ns3::Mac48Address const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac48Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv4Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv4Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv6Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv6Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast6Prefix() [member function]
cls.add_method('GetMulticast6Prefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticastPrefix() [member function]
cls.add_method('GetMulticastPrefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsGroup() const [member function]
cls.add_method('IsGroup',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): static bool ns3::Mac48Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3Mac8Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
## mac8-address.h (module 'network'): ns3::Mac8Address::Mac8Address(ns3::Mac8Address const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac8Address const &', 'arg0')])
## mac8-address.h (module 'network'): ns3::Mac8Address::Mac8Address() [constructor]
cls.add_constructor([])
## mac8-address.h (module 'network'): ns3::Mac8Address::Mac8Address(uint8_t addr) [constructor]
cls.add_constructor([param('uint8_t', 'addr')])
## mac8-address.h (module 'network'): static ns3::Mac8Address ns3::Mac8Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac8Address',
[],
is_static=True)
## mac8-address.h (module 'network'): static ns3::Mac8Address ns3::Mac8Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac8Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac8-address.h (module 'network'): void ns3::Mac8Address::CopyFrom(uint8_t const * pBuffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'pBuffer')])
## mac8-address.h (module 'network'): void ns3::Mac8Address::CopyTo(uint8_t * pBuffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'pBuffer')],
is_const=True)
## mac8-address.h (module 'network'): static ns3::Mac8Address ns3::Mac8Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac8Address',
[],
is_static=True)
## mac8-address.h (module 'network'): static bool ns3::Mac8Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3NodeContainer_methods(root_module, cls):
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & arg0) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'arg0')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer() [constructor]
cls.add_constructor([])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(std::string nodeName) [constructor]
cls.add_constructor([param('std::string', 'nodeName')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d, ns3::NodeContainer const & e) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd'), param('ns3::NodeContainer const &', 'e')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::NodeContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NodeContainer', 'other')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(std::string nodeName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'nodeName')])
## node-container.h (module 'network'): ns3::NodeContainer::Iterator ns3::NodeContainer::Begin() const [member function]
cls.add_method('Begin',
'ns3::NodeContainer::Iterator',
[],
is_const=True)
## node-container.h (module 'network'): bool ns3::NodeContainer::Contains(uint32_t id) const [member function]
cls.add_method('Contains',
'bool',
[param('uint32_t', 'id')],
is_const=True)
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n')])
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n, uint32_t systemId) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n'), param('uint32_t', 'systemId')])
## node-container.h (module 'network'): ns3::NodeContainer::Iterator ns3::NodeContainer::End() const [member function]
cls.add_method('End',
'ns3::NodeContainer::Iterator',
[],
is_const=True)
## node-container.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NodeContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::Node >',
[param('uint32_t', 'i')],
is_const=True)
## node-container.h (module 'network'): static ns3::NodeContainer ns3::NodeContainer::GetGlobal() [member function]
cls.add_method('GetGlobal',
'ns3::NodeContainer',
[],
is_static=True)
## node-container.h (module 'network'): uint32_t ns3::NodeContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3PacketMetadata_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor]
cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[param('ns3::Buffer', 'buffer')],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function]
cls.add_method('CreateFragment',
'ns3::PacketMetadata',
[param('uint32_t', 'start'), param('uint32_t', 'end')],
is_const=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function]
cls.add_method('Enable',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('RemoveTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor]
cls.add_constructor([])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable]
cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable]
cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable]
cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable]
cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable]
cls.add_instance_attribute('isFragment', 'bool', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::type [variable]
cls.add_instance_attribute('type', 'ns3::PacketMetadata::Item::ItemType', is_const=False)
return
def register_Ns3PacketMetadataItemIterator_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')])
## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketMetadata::Item',
[])
return
def register_Ns3PacketTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketTagIterator::Item',
[])
return
def register_Ns3PacketTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3PacketTagList_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [constructor]
cls.add_constructor([param('ns3::PacketTagList const &', 'o')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function]
cls.add_method('Add',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function]
cls.add_method('Head',
'ns3::PacketTagList::TagData const *',
[],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function]
cls.add_method('Peek',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Tag &', 'tag')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Replace(ns3::Tag & tag) [member function]
cls.add_method('Replace',
'bool',
[param('ns3::Tag &', 'tag')])
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable]
cls.add_instance_attribute('count', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable]
cls.add_instance_attribute('data', 'uint8_t [ 1 ]', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable]
cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
return
def register_Ns3Simulator_methods(root_module, cls):
## simulator.h (module 'core'): ns3::Simulator::Simulator(ns3::Simulator const & arg0) [constructor]
cls.add_constructor([param('ns3::Simulator const &', 'arg0')])
## simulator.h (module 'core'): static void ns3::Simulator::Cancel(ns3::EventId const & id) [member function]
cls.add_method('Cancel',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Destroy() [member function]
cls.add_method('Destroy',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetContext() [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetDelayLeft(ns3::EventId const & id) [member function]
cls.add_method('GetDelayLeft',
'ns3::Time',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static uint64_t ns3::Simulator::GetEventCount() [member function]
cls.add_method('GetEventCount',
'uint64_t',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Ptr<ns3::SimulatorImpl> ns3::Simulator::GetImplementation() [member function]
cls.add_method('GetImplementation',
'ns3::Ptr< ns3::SimulatorImpl >',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetMaximumSimulationTime() [member function]
cls.add_method('GetMaximumSimulationTime',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetSystemId() [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsExpired(ns3::EventId const & id) [member function]
cls.add_method('IsExpired',
'bool',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsFinished() [member function]
cls.add_method('IsFinished',
'bool',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::Now() [member function]
cls.add_method('Now',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Remove(ns3::EventId const & id) [member function]
cls.add_method('Remove',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetImplementation(ns3::Ptr<ns3::SimulatorImpl> impl) [member function]
cls.add_method('SetImplementation',
'void',
[param('ns3::Ptr< ns3::SimulatorImpl >', 'impl')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function]
cls.add_method('SetScheduler',
'void',
[param('ns3::ObjectFactory', 'schedulerFactory')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop() [member function]
cls.add_method('Stop',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop(ns3::Time const & delay) [member function]
cls.add_method('Stop',
'void',
[param('ns3::Time const &', 'delay')],
is_static=True)
return
def register_Ns3Tag_methods(root_module, cls):
## tag.h (module 'network'): ns3::Tag::Tag() [constructor]
cls.add_constructor([])
## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [constructor]
cls.add_constructor([param('ns3::Tag const &', 'arg0')])
## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_virtual=True)
## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t v) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t v) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TimeWithUnit_methods(root_module, cls):
cls.add_output_stream_operator()
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::TimeWithUnit const & arg0) [constructor]
cls.add_constructor([param('ns3::TimeWithUnit const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::Time const time, ns3::Time::Unit const unit) [constructor]
cls.add_constructor([param('ns3::Time const', 'time'), param('ns3::Time::Unit const', 'unit')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<const ns3::AttributeAccessor> accessor, ns3::Ptr<const ns3::AttributeChecker> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SupportLevel::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<const ns3::AttributeAccessor> accessor, ns3::Ptr<const ns3::AttributeChecker> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SupportLevel::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<const ns3::TraceSourceAccessor> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')],
deprecated=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<const ns3::TraceSourceAccessor> accessor, std::string callback, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SupportLevel::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(std::size_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('std::size_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(std::size_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('std::size_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::hash_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'ns3::TypeId::hash_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint16_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint16_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint16_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint16_t',
[],
is_static=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function]
cls.add_method('GetSize',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(std::size_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('std::size_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(ns3::TypeId::hash_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(ns3::TypeId::hash_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<const ns3::TraceSourceAccessor> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): ns3::Ptr<const ns3::TraceSourceAccessor> ns3::TypeId::LookupTraceSourceByName(std::string name, ns3::TypeId::TraceSourceInformation * info) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name'), param('ns3::TypeId::TraceSourceInformation *', 'info')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(std::size_t i, ns3::Ptr<const ns3::AttributeValue> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('std::size_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function]
cls.add_method('SetSize',
'ns3::TypeId',
[param('std::size_t', 'size')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t uid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'uid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportLevel [variable]
cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportMsg [variable]
cls.add_instance_attribute('supportMsg', 'std::string', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable]
cls.add_instance_attribute('callback', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportLevel [variable]
cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportMsg [variable]
cls.add_instance_attribute('supportMsg', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::int64x64_t'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('>=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', u'right'))
cls.add_unary_numeric_operator('-')
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(double const value) [constructor]
cls.add_constructor([param('double const', 'value')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(long double const value) [constructor]
cls.add_constructor([param('long double const', 'value')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(int const v) [constructor]
cls.add_constructor([param('int const', 'v')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(long int const v) [constructor]
cls.add_constructor([param('long int const', 'v')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int const v) [constructor]
cls.add_constructor([param('long long int const', 'v')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int const v) [constructor]
cls.add_constructor([param('unsigned int const', 'v')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int const v) [constructor]
cls.add_constructor([param('long unsigned int const', 'v')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int const v) [constructor]
cls.add_constructor([param('long long unsigned int const', 'v')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t const hi, uint64_t const lo) [constructor]
cls.add_constructor([param('int64_t const', 'hi'), param('uint64_t const', 'lo')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-128.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-128.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-128.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-128.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t const v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t const', 'v')],
is_static=True)
## int64x64-128.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::implementation [variable]
cls.add_static_attribute('implementation', 'ns3::int64x64_t::impl_type const', is_const=True)
return
def register_Ns3Chunk_methods(root_module, cls):
## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor]
cls.add_constructor([])
## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [constructor]
cls.add_constructor([param('ns3::Chunk const &', 'arg0')])
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')],
is_virtual=True)
## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
## header.h (module 'network'): ns3::Header::Header() [constructor]
cls.add_constructor([])
## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [constructor]
cls.add_constructor([param('ns3::Header const &', 'arg0')])
## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Ipv4Header_methods(root_module, cls):
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::Ipv4Header(ns3::Ipv4Header const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4Header const &', 'arg0')])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::Ipv4Header() [constructor]
cls.add_constructor([])
## ipv4-header.h (module 'internet'): uint32_t ns3::Ipv4Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ipv4-header.h (module 'internet'): std::string ns3::Ipv4Header::DscpTypeToString(ns3::Ipv4Header::DscpType dscp) const [member function]
cls.add_method('DscpTypeToString',
'std::string',
[param('ns3::Ipv4Header::DscpType', 'dscp')],
is_const=True)
## ipv4-header.h (module 'internet'): std::string ns3::Ipv4Header::EcnTypeToString(ns3::Ipv4Header::EcnType ecn) const [member function]
cls.add_method('EcnTypeToString',
'std::string',
[param('ns3::Ipv4Header::EcnType', 'ecn')],
is_const=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::EnableChecksum() [member function]
cls.add_method('EnableChecksum',
'void',
[])
## ipv4-header.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Header::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::DscpType ns3::Ipv4Header::GetDscp() const [member function]
cls.add_method('GetDscp',
'ns3::Ipv4Header::DscpType',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::EcnType ns3::Ipv4Header::GetEcn() const [member function]
cls.add_method('GetEcn',
'ns3::Ipv4Header::EcnType',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetFragmentOffset() const [member function]
cls.add_method('GetFragmentOffset',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetIdentification() const [member function]
cls.add_method('GetIdentification',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::TypeId ns3::Ipv4Header::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetPayloadSize() const [member function]
cls.add_method('GetPayloadSize',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetProtocol() const [member function]
cls.add_method('GetProtocol',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint32_t ns3::Ipv4Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Header::GetSource() const [member function]
cls.add_method('GetSource',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): static ns3::TypeId ns3::Ipv4Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsChecksumOk() const [member function]
cls.add_method('IsChecksumOk',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsDontFragment() const [member function]
cls.add_method('IsDontFragment',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsLastFragment() const [member function]
cls.add_method('IsLastFragment',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDestination(ns3::Ipv4Address destination) [member function]
cls.add_method('SetDestination',
'void',
[param('ns3::Ipv4Address', 'destination')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDontFragment() [member function]
cls.add_method('SetDontFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDscp(ns3::Ipv4Header::DscpType dscp) [member function]
cls.add_method('SetDscp',
'void',
[param('ns3::Ipv4Header::DscpType', 'dscp')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetEcn(ns3::Ipv4Header::EcnType ecn) [member function]
cls.add_method('SetEcn',
'void',
[param('ns3::Ipv4Header::EcnType', 'ecn')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetFragmentOffset(uint16_t offsetBytes) [member function]
cls.add_method('SetFragmentOffset',
'void',
[param('uint16_t', 'offsetBytes')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetIdentification(uint16_t identification) [member function]
cls.add_method('SetIdentification',
'void',
[param('uint16_t', 'identification')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetLastFragment() [member function]
cls.add_method('SetLastFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetMayFragment() [member function]
cls.add_method('SetMayFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetMoreFragments() [member function]
cls.add_method('SetMoreFragments',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetPayloadSize(uint16_t size) [member function]
cls.add_method('SetPayloadSize',
'void',
[param('uint16_t', 'size')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetProtocol(uint8_t num) [member function]
cls.add_method('SetProtocol',
'void',
[param('uint8_t', 'num')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetSource(ns3::Ipv4Address source) [member function]
cls.add_method('SetSource',
'void',
[param('ns3::Ipv4Address', 'source')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3Ipv6Header_methods(root_module, cls):
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header(ns3::Ipv6Header const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6Header const &', 'arg0')])
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header() [constructor]
cls.add_constructor([])
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ipv6-header.h (module 'internet'): std::string ns3::Ipv6Header::DscpTypeToString(ns3::Ipv6Header::DscpType dscp) const [member function]
cls.add_method('DscpTypeToString',
'std::string',
[param('ns3::Ipv6Header::DscpType', 'dscp')],
is_const=True)
## ipv6-header.h (module 'internet'): std::string ns3::Ipv6Header::EcnTypeToString(ns3::Ipv6Header::EcnType ecn) const [member function]
cls.add_method('EcnTypeToString',
'std::string',
[param('ns3::Ipv6Header::EcnType', 'ecn')],
is_const=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetDestinationAddress() const [member function]
cls.add_method('GetDestinationAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::DscpType ns3::Ipv6Header::GetDscp() const [member function]
cls.add_method('GetDscp',
'ns3::Ipv6Header::DscpType',
[],
is_const=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::EcnType ns3::Ipv6Header::GetEcn() const [member function]
cls.add_method('GetEcn',
'ns3::Ipv6Header::EcnType',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetFlowLabel() const [member function]
cls.add_method('GetFlowLabel',
'uint32_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): ns3::TypeId ns3::Ipv6Header::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetNextHeader() const [member function]
cls.add_method('GetNextHeader',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint16_t ns3::Ipv6Header::GetPayloadLength() const [member function]
cls.add_method('GetPayloadLength',
'uint16_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetSourceAddress() const [member function]
cls.add_method('GetSourceAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetTrafficClass() const [member function]
cls.add_method('GetTrafficClass',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): static ns3::TypeId ns3::Ipv6Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetDestinationAddress(ns3::Ipv6Address dst) [member function]
cls.add_method('SetDestinationAddress',
'void',
[param('ns3::Ipv6Address', 'dst')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetDscp(ns3::Ipv6Header::DscpType dscp) [member function]
cls.add_method('SetDscp',
'void',
[param('ns3::Ipv6Header::DscpType', 'dscp')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetEcn(ns3::Ipv6Header::EcnType ecn) [member function]
cls.add_method('SetEcn',
'void',
[param('ns3::Ipv6Header::EcnType', 'ecn')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetFlowLabel(uint32_t flow) [member function]
cls.add_method('SetFlowLabel',
'void',
[param('uint32_t', 'flow')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetHopLimit(uint8_t limit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'limit')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetNextHeader(uint8_t next) [member function]
cls.add_method('SetNextHeader',
'void',
[param('uint8_t', 'next')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetPayloadLength(uint16_t len) [member function]
cls.add_method('SetPayloadLength',
'void',
[param('uint16_t', 'len')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetSourceAddress(ns3::Ipv6Address src) [member function]
cls.add_method('SetSourceAddress',
'void',
[param('ns3::Ipv6Address', 'src')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetTrafficClass(uint8_t traffic) [member function]
cls.add_method('SetTrafficClass',
'void',
[param('uint8_t', 'traffic')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): bool ns3::Object::IsInitialized() const [member function]
cls.add_method('IsInitialized',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<const ns3::Object> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3FlowClassifier_Ns3Empty_Ns3DefaultDeleter__lt__ns3FlowClassifier__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> >::SimpleRefCount(ns3::SimpleRefCount<ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter< ns3::FlowClassifier > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3Ipv4MulticastRoute_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4MulticastRoute__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter< ns3::Ipv4MulticastRoute > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3Ipv4Route_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4Route__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter< ns3::Ipv4Route > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount(ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter< ns3::OutputStreamWrapper > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3QueueItem_Ns3Empty_Ns3DefaultDeleter__lt__ns3QueueItem__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >::SimpleRefCount(ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter< ns3::QueueItem > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
return
def register_Ns3Socket_methods(root_module, cls):
## socket.h (module 'network'): ns3::Socket::Socket(ns3::Socket const & arg0) [constructor]
cls.add_constructor([param('ns3::Socket const &', 'arg0')])
## socket.h (module 'network'): ns3::Socket::Socket() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): int ns3::Socket::Bind(ns3::Address const & address) [member function]
cls.add_method('Bind',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind() [member function]
cls.add_method('Bind',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind6() [member function]
cls.add_method('Bind6',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::BindToNetDevice(ns3::Ptr<ns3::NetDevice> netdevice) [member function]
cls.add_method('BindToNetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'netdevice')],
is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Close() [member function]
cls.add_method('Close',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Connect(ns3::Address const & address) [member function]
cls.add_method('Connect',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): static ns3::Ptr<ns3::Socket> ns3::Socket::CreateSocket(ns3::Ptr<ns3::Node> node, ns3::TypeId tid) [member function]
cls.add_method('CreateSocket',
'ns3::Ptr< ns3::Socket >',
[param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::TypeId', 'tid')],
is_static=True)
## socket.h (module 'network'): bool ns3::Socket::GetAllowBroadcast() const [member function]
cls.add_method('GetAllowBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Socket::GetBoundNetDevice() [member function]
cls.add_method('GetBoundNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[])
## socket.h (module 'network'): ns3::Socket::SocketErrno ns3::Socket::GetErrno() const [member function]
cls.add_method('GetErrno',
'ns3::Socket::SocketErrno',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpTos() const [member function]
cls.add_method('GetIpTos',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpTtl() const [member function]
cls.add_method('GetIpTtl',
'uint8_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpv6HopLimit() const [member function]
cls.add_method('GetIpv6HopLimit',
'uint8_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpv6Tclass() const [member function]
cls.add_method('GetIpv6Tclass',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Node> ns3::Socket::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::GetPeerName(ns3::Address & address) const [member function]
cls.add_method('GetPeerName',
'int',
[param('ns3::Address &', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetPriority() const [member function]
cls.add_method('GetPriority',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetRxAvailable() const [member function]
cls.add_method('GetRxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::GetSockName(ns3::Address & address) const [member function]
cls.add_method('GetSockName',
'int',
[param('ns3::Address &', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Socket::SocketType ns3::Socket::GetSocketType() const [member function]
cls.add_method('GetSocketType',
'ns3::Socket::SocketType',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetTxAvailable() const [member function]
cls.add_method('GetTxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::Socket::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): static uint8_t ns3::Socket::IpTos2Priority(uint8_t ipTos) [member function]
cls.add_method('IpTos2Priority',
'uint8_t',
[param('uint8_t', 'ipTos')],
is_static=True)
## socket.h (module 'network'): void ns3::Socket::Ipv6JoinGroup(ns3::Ipv6Address address, ns3::Socket::Ipv6MulticastFilterMode filterMode, std::vector<ns3::Ipv6Address, std::allocator<ns3::Ipv6Address> > sourceAddresses) [member function]
cls.add_method('Ipv6JoinGroup',
'void',
[param('ns3::Ipv6Address', 'address'), param('ns3::Socket::Ipv6MulticastFilterMode', 'filterMode'), param('std::vector< ns3::Ipv6Address >', 'sourceAddresses')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::Ipv6JoinGroup(ns3::Ipv6Address address) [member function]
cls.add_method('Ipv6JoinGroup',
'void',
[param('ns3::Ipv6Address', 'address')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::Ipv6LeaveGroup() [member function]
cls.add_method('Ipv6LeaveGroup',
'void',
[],
is_virtual=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpRecvTos() const [member function]
cls.add_method('IsIpRecvTos',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpRecvTtl() const [member function]
cls.add_method('IsIpRecvTtl',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpv6RecvHopLimit() const [member function]
cls.add_method('IsIpv6RecvHopLimit',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpv6RecvTclass() const [member function]
cls.add_method('IsIpv6RecvTclass',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsRecvPktInfo() const [member function]
cls.add_method('IsRecvPktInfo',
'bool',
[],
is_const=True)
## socket.h (module 'network'): int ns3::Socket::Listen() [member function]
cls.add_method('Listen',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv(uint32_t maxSize, uint32_t flags) [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv() [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[])
## socket.h (module 'network'): int ns3::Socket::Recv(uint8_t * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Recv',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(uint32_t maxSize, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::RecvFrom(uint8_t * buf, uint32_t size, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p')])
## socket.h (module 'network'): int ns3::Socket::Send(uint8_t const * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): int ns3::Socket::SendTo(ns3::Ptr<ns3::Packet> p, uint32_t flags, ns3::Address const & toAddress) [member function]
cls.add_method('SendTo',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags'), param('ns3::Address const &', 'toAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::SendTo(uint8_t const * buf, uint32_t size, uint32_t flags, ns3::Address const & address) [member function]
cls.add_method('SendTo',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address const &', 'address')])
## socket.h (module 'network'): void ns3::Socket::SetAcceptCallback(ns3::Callback<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionRequest, ns3::Callback<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> newConnectionCreated) [member function]
cls.add_method('SetAcceptCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionRequest'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'newConnectionCreated')])
## socket.h (module 'network'): bool ns3::Socket::SetAllowBroadcast(bool allowBroadcast) [member function]
cls.add_method('SetAllowBroadcast',
'bool',
[param('bool', 'allowBroadcast')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetCloseCallbacks(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> normalClose, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> errorClose) [member function]
cls.add_method('SetCloseCallbacks',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'normalClose'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'errorClose')])
## socket.h (module 'network'): void ns3::Socket::SetConnectCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionSucceeded, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionFailed) [member function]
cls.add_method('SetConnectCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionSucceeded'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionFailed')])
## socket.h (module 'network'): void ns3::Socket::SetDataSentCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> dataSent) [member function]
cls.add_method('SetDataSentCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'dataSent')])
## socket.h (module 'network'): void ns3::Socket::SetIpRecvTos(bool ipv4RecvTos) [member function]
cls.add_method('SetIpRecvTos',
'void',
[param('bool', 'ipv4RecvTos')])
## socket.h (module 'network'): void ns3::Socket::SetIpRecvTtl(bool ipv4RecvTtl) [member function]
cls.add_method('SetIpRecvTtl',
'void',
[param('bool', 'ipv4RecvTtl')])
## socket.h (module 'network'): void ns3::Socket::SetIpTos(uint8_t ipTos) [member function]
cls.add_method('SetIpTos',
'void',
[param('uint8_t', 'ipTos')])
## socket.h (module 'network'): void ns3::Socket::SetIpTtl(uint8_t ipTtl) [member function]
cls.add_method('SetIpTtl',
'void',
[param('uint8_t', 'ipTtl')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetIpv6HopLimit(uint8_t ipHopLimit) [member function]
cls.add_method('SetIpv6HopLimit',
'void',
[param('uint8_t', 'ipHopLimit')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetIpv6RecvHopLimit(bool ipv6RecvHopLimit) [member function]
cls.add_method('SetIpv6RecvHopLimit',
'void',
[param('bool', 'ipv6RecvHopLimit')])
## socket.h (module 'network'): void ns3::Socket::SetIpv6RecvTclass(bool ipv6RecvTclass) [member function]
cls.add_method('SetIpv6RecvTclass',
'void',
[param('bool', 'ipv6RecvTclass')])
## socket.h (module 'network'): void ns3::Socket::SetIpv6Tclass(int ipTclass) [member function]
cls.add_method('SetIpv6Tclass',
'void',
[param('int', 'ipTclass')])
## socket.h (module 'network'): void ns3::Socket::SetPriority(uint8_t priority) [member function]
cls.add_method('SetPriority',
'void',
[param('uint8_t', 'priority')])
## socket.h (module 'network'): void ns3::Socket::SetRecvCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> arg0) [member function]
cls.add_method('SetRecvCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'arg0')])
## socket.h (module 'network'): void ns3::Socket::SetRecvPktInfo(bool flag) [member function]
cls.add_method('SetRecvPktInfo',
'void',
[param('bool', 'flag')])
## socket.h (module 'network'): void ns3::Socket::SetSendCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> sendCb) [member function]
cls.add_method('SetSendCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'sendCb')])
## socket.h (module 'network'): int ns3::Socket::ShutdownRecv() [member function]
cls.add_method('ShutdownRecv',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::ShutdownSend() [member function]
cls.add_method('ShutdownSend',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## socket.h (module 'network'): bool ns3::Socket::IsManualIpTtl() const [member function]
cls.add_method('IsManualIpTtl',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpv6HopLimit() const [member function]
cls.add_method('IsManualIpv6HopLimit',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpv6Tclass() const [member function]
cls.add_method('IsManualIpv6Tclass',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionFailed() [member function]
cls.add_method('NotifyConnectionFailed',
'void',
[],
visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::NotifyConnectionRequest(ns3::Address const & from) [member function]
cls.add_method('NotifyConnectionRequest',
'bool',
[param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionSucceeded() [member function]
cls.add_method('NotifyConnectionSucceeded',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataRecv() [member function]
cls.add_method('NotifyDataRecv',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataSent(uint32_t size) [member function]
cls.add_method('NotifyDataSent',
'void',
[param('uint32_t', 'size')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyErrorClose() [member function]
cls.add_method('NotifyErrorClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNewConnectionCreated(ns3::Ptr<ns3::Socket> socket, ns3::Address const & from) [member function]
cls.add_method('NotifyNewConnectionCreated',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket'), param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNormalClose() [member function]
cls.add_method('NotifyNormalClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifySend(uint32_t spaceAvailable) [member function]
cls.add_method('NotifySend',
'void',
[param('uint32_t', 'spaceAvailable')],
visibility='protected')
return
def register_Ns3SocketIpTosTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpTosTag::SocketIpTosTag(ns3::SocketIpTosTag const & arg0) [constructor]
cls.add_constructor([param('ns3::SocketIpTosTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpTosTag::SocketIpTosTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpTosTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpTosTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpTosTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpTosTag::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpTosTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
return
def register_Ns3SocketIpTtlTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag(ns3::SocketIpTtlTag const & arg0) [constructor]
cls.add_constructor([param('ns3::SocketIpTtlTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpTtlTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpTtlTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpTtlTag::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpTtlTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3SocketIpv6HopLimitTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag::SocketIpv6HopLimitTag(ns3::SocketIpv6HopLimitTag const & arg0) [constructor]
cls.add_constructor([param('ns3::SocketIpv6HopLimitTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag::SocketIpv6HopLimitTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpv6HopLimitTag::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpv6HopLimitTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpv6HopLimitTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpv6HopLimitTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::SetHopLimit(uint8_t hopLimit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'hopLimit')])
return
def register_Ns3SocketIpv6TclassTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpv6TclassTag::SocketIpv6TclassTag(ns3::SocketIpv6TclassTag const & arg0) [constructor]
cls.add_constructor([param('ns3::SocketIpv6TclassTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpv6TclassTag::SocketIpv6TclassTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpv6TclassTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpv6TclassTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpv6TclassTag::GetTclass() const [member function]
cls.add_method('GetTclass',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpv6TclassTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::SetTclass(uint8_t tclass) [member function]
cls.add_method('SetTclass',
'void',
[param('uint8_t', 'tclass')])
return
def register_Ns3SocketPriorityTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketPriorityTag::SocketPriorityTag(ns3::SocketPriorityTag const & arg0) [constructor]
cls.add_constructor([param('ns3::SocketPriorityTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketPriorityTag::SocketPriorityTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketPriorityTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketPriorityTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketPriorityTag::GetPriority() const [member function]
cls.add_method('GetPriority',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): uint32_t ns3::SocketPriorityTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketPriorityTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketPriorityTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketPriorityTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketPriorityTag::SetPriority(uint8_t priority) [member function]
cls.add_method('SetPriority',
'void',
[param('uint8_t', 'priority')])
return
def register_Ns3SocketSetDontFragmentTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag(ns3::SocketSetDontFragmentTag const & arg0) [constructor]
cls.add_constructor([param('ns3::SocketSetDontFragmentTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Disable() [member function]
cls.add_method('Disable',
'void',
[])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Enable() [member function]
cls.add_method('Enable',
'void',
[])
## socket.h (module 'network'): ns3::TypeId ns3::SocketSetDontFragmentTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketSetDontFragmentTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketSetDontFragmentTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): bool ns3::SocketSetDontFragmentTag::IsEnabled() const [member function]
cls.add_method('IsEnabled',
'bool',
[],
is_const=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('>=')
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', u'right'))
cls.add_output_stream_operator()
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & v) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::TimeWithUnit ns3::Time::As(ns3::Time::Unit const unit) const [member function]
cls.add_method('As',
'ns3::TimeWithUnit',
[param('ns3::Time::Unit const', 'unit')],
is_const=True)
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value, ns3::Time::Unit unit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit unit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit unit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDays() const [member function]
cls.add_method('GetDays',
'double',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetHours() const [member function]
cls.add_method('GetHours',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetMinutes() const [member function]
cls.add_method('GetMinutes',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetYears() const [member function]
cls.add_method('GetYears',
'double',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Max() [member function]
cls.add_method('Max',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Min() [member function]
cls.add_method('Min',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): static bool ns3::Time::StaticInit() [member function]
cls.add_method('StaticInit',
'bool',
[],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit unit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit unit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit unit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')],
is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): std::string ns3::CallbackImplBase::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<const ns3::CallbackImplBase> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::ObjectBase*'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'void'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::NetDevice> '])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Packet const> '])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'unsigned short'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Address const&'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::NetDevice::PacketType'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Socket> '])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'bool'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'unsigned int'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ipv4Header const&'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Ipv4> '])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ipv4L3Protocol::DropReason'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Ipv6> '])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ipv6Header const&'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ipv6L3Protocol::DropReason'])
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3EmptyAttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor(ns3::EmptyAttributeAccessor const & arg0) [constructor]
cls.add_constructor([param('ns3::EmptyAttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object'), param('ns3::AttributeValue const &', 'value')],
is_const=True, is_virtual=True)
return
def register_Ns3EmptyAttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker(ns3::EmptyAttributeChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::EmptyAttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_const=True, is_virtual=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3EventImpl_methods(root_module, cls):
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [constructor]
cls.add_constructor([param('ns3::EventImpl const &', 'arg0')])
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor]
cls.add_constructor([])
## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[])
## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function]
cls.add_method('IsCancelled',
'bool',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function]
cls.add_method('Notify',
'void',
[],
is_pure_virtual=True, visibility='protected', is_virtual=True)
return
def register_Ns3FlowClassifier_methods(root_module, cls):
## flow-classifier.h (module 'flow-monitor'): ns3::FlowClassifier::FlowClassifier() [constructor]
cls.add_constructor([])
## flow-classifier.h (module 'flow-monitor'): void ns3::FlowClassifier::SerializeToXmlStream(std::ostream & os, uint16_t indent) const [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'indent')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## flow-classifier.h (module 'flow-monitor'): ns3::FlowId ns3::FlowClassifier::GetNewFlowId() [member function]
cls.add_method('GetNewFlowId',
'ns3::FlowId',
[],
visibility='protected')
## flow-classifier.h (module 'flow-monitor'): void ns3::FlowClassifier::Indent(std::ostream & os, uint16_t level) const [member function]
cls.add_method('Indent',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'level')],
is_const=True, visibility='protected')
return
def register_Ns3FlowMonitor_methods(root_module, cls):
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowMonitor(ns3::FlowMonitor const & arg0) [constructor]
cls.add_constructor([param('ns3::FlowMonitor const &', 'arg0')])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowMonitor() [constructor]
cls.add_constructor([])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::AddFlowClassifier(ns3::Ptr<ns3::FlowClassifier> classifier) [member function]
cls.add_method('AddFlowClassifier',
'void',
[param('ns3::Ptr< ns3::FlowClassifier >', 'classifier')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::AddProbe(ns3::Ptr<ns3::FlowProbe> probe) [member function]
cls.add_method('AddProbe',
'void',
[param('ns3::Ptr< ns3::FlowProbe >', 'probe')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::CheckForLostPackets() [member function]
cls.add_method('CheckForLostPackets',
'void',
[])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::CheckForLostPackets(ns3::Time maxDelay) [member function]
cls.add_method('CheckForLostPackets',
'void',
[param('ns3::Time', 'maxDelay')])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowProbeContainer const & ns3::FlowMonitor::GetAllProbes() const [member function]
cls.add_method('GetAllProbes',
'ns3::FlowMonitor::FlowProbeContainer const &',
[],
is_const=True)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStatsContainer const & ns3::FlowMonitor::GetFlowStats() const [member function]
cls.add_method('GetFlowStats',
'ns3::FlowMonitor::FlowStatsContainer const &',
[],
is_const=True)
## flow-monitor.h (module 'flow-monitor'): ns3::TypeId ns3::FlowMonitor::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## flow-monitor.h (module 'flow-monitor'): static ns3::TypeId ns3::FlowMonitor::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::ReportDrop(ns3::Ptr<ns3::FlowProbe> probe, ns3::FlowId flowId, ns3::FlowPacketId packetId, uint32_t packetSize, uint32_t reasonCode) [member function]
cls.add_method('ReportDrop',
'void',
[param('ns3::Ptr< ns3::FlowProbe >', 'probe'), param('uint32_t', 'flowId'), param('uint32_t', 'packetId'), param('uint32_t', 'packetSize'), param('uint32_t', 'reasonCode')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::ReportFirstTx(ns3::Ptr<ns3::FlowProbe> probe, ns3::FlowId flowId, ns3::FlowPacketId packetId, uint32_t packetSize) [member function]
cls.add_method('ReportFirstTx',
'void',
[param('ns3::Ptr< ns3::FlowProbe >', 'probe'), param('uint32_t', 'flowId'), param('uint32_t', 'packetId'), param('uint32_t', 'packetSize')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::ReportForwarding(ns3::Ptr<ns3::FlowProbe> probe, ns3::FlowId flowId, ns3::FlowPacketId packetId, uint32_t packetSize) [member function]
cls.add_method('ReportForwarding',
'void',
[param('ns3::Ptr< ns3::FlowProbe >', 'probe'), param('uint32_t', 'flowId'), param('uint32_t', 'packetId'), param('uint32_t', 'packetSize')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::ReportLastRx(ns3::Ptr<ns3::FlowProbe> probe, ns3::FlowId flowId, ns3::FlowPacketId packetId, uint32_t packetSize) [member function]
cls.add_method('ReportLastRx',
'void',
[param('ns3::Ptr< ns3::FlowProbe >', 'probe'), param('uint32_t', 'flowId'), param('uint32_t', 'packetId'), param('uint32_t', 'packetSize')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::SerializeToXmlFile(std::string fileName, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlFile',
'void',
[param('std::string', 'fileName'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::SerializeToXmlStream(std::ostream & os, uint16_t indent, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'indent'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
## flow-monitor.h (module 'flow-monitor'): std::string ns3::FlowMonitor::SerializeToXmlString(uint16_t indent, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlString',
'std::string',
[param('uint16_t', 'indent'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::Start(ns3::Time const & time) [member function]
cls.add_method('Start',
'void',
[param('ns3::Time const &', 'time')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::StartRightNow() [member function]
cls.add_method('StartRightNow',
'void',
[])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::Stop(ns3::Time const & time) [member function]
cls.add_method('Stop',
'void',
[param('ns3::Time const &', 'time')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::StopRightNow() [member function]
cls.add_method('StopRightNow',
'void',
[])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3FlowMonitorFlowStats_methods(root_module, cls):
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::FlowStats() [constructor]
cls.add_constructor([])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::FlowStats(ns3::FlowMonitor::FlowStats const & arg0) [constructor]
cls.add_constructor([param('ns3::FlowMonitor::FlowStats const &', 'arg0')])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::bytesDropped [variable]
cls.add_instance_attribute('bytesDropped', 'std::vector< unsigned long long >', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::delayHistogram [variable]
cls.add_instance_attribute('delayHistogram', 'ns3::Histogram', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::delaySum [variable]
cls.add_instance_attribute('delaySum', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::flowInterruptionsHistogram [variable]
cls.add_instance_attribute('flowInterruptionsHistogram', 'ns3::Histogram', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::jitterHistogram [variable]
cls.add_instance_attribute('jitterHistogram', 'ns3::Histogram', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::jitterSum [variable]
cls.add_instance_attribute('jitterSum', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::lastDelay [variable]
cls.add_instance_attribute('lastDelay', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::lostPackets [variable]
cls.add_instance_attribute('lostPackets', 'uint32_t', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::packetSizeHistogram [variable]
cls.add_instance_attribute('packetSizeHistogram', 'ns3::Histogram', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::packetsDropped [variable]
cls.add_instance_attribute('packetsDropped', 'std::vector< unsigned int >', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::rxBytes [variable]
cls.add_instance_attribute('rxBytes', 'uint64_t', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::rxPackets [variable]
cls.add_instance_attribute('rxPackets', 'uint32_t', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::timeFirstRxPacket [variable]
cls.add_instance_attribute('timeFirstRxPacket', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::timeFirstTxPacket [variable]
cls.add_instance_attribute('timeFirstTxPacket', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::timeLastRxPacket [variable]
cls.add_instance_attribute('timeLastRxPacket', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::timeLastTxPacket [variable]
cls.add_instance_attribute('timeLastTxPacket', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::timesForwarded [variable]
cls.add_instance_attribute('timesForwarded', 'uint32_t', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::txBytes [variable]
cls.add_instance_attribute('txBytes', 'uint64_t', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::txPackets [variable]
cls.add_instance_attribute('txPackets', 'uint32_t', is_const=False)
return
def register_Ns3FlowProbe_methods(root_module, cls):
## flow-probe.h (module 'flow-monitor'): void ns3::FlowProbe::AddPacketDropStats(ns3::FlowId flowId, uint32_t packetSize, uint32_t reasonCode) [member function]
cls.add_method('AddPacketDropStats',
'void',
[param('uint32_t', 'flowId'), param('uint32_t', 'packetSize'), param('uint32_t', 'reasonCode')])
## flow-probe.h (module 'flow-monitor'): void ns3::FlowProbe::AddPacketStats(ns3::FlowId flowId, uint32_t packetSize, ns3::Time delayFromFirstProbe) [member function]
cls.add_method('AddPacketStats',
'void',
[param('uint32_t', 'flowId'), param('uint32_t', 'packetSize'), param('ns3::Time', 'delayFromFirstProbe')])
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::Stats ns3::FlowProbe::GetStats() const [member function]
cls.add_method('GetStats',
'ns3::FlowProbe::Stats',
[],
is_const=True)
## flow-probe.h (module 'flow-monitor'): static ns3::TypeId ns3::FlowProbe::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## flow-probe.h (module 'flow-monitor'): void ns3::FlowProbe::SerializeToXmlStream(std::ostream & os, uint16_t indent, uint32_t index) const [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'indent'), param('uint32_t', 'index')],
is_const=True)
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowProbe(ns3::Ptr<ns3::FlowMonitor> flowMonitor) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::FlowMonitor >', 'flowMonitor')],
visibility='protected')
## flow-probe.h (module 'flow-monitor'): void ns3::FlowProbe::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3FlowProbeFlowStats_methods(root_module, cls):
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::FlowStats(ns3::FlowProbe::FlowStats const & arg0) [constructor]
cls.add_constructor([param('ns3::FlowProbe::FlowStats const &', 'arg0')])
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::FlowStats() [constructor]
cls.add_constructor([])
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::bytes [variable]
cls.add_instance_attribute('bytes', 'uint64_t', is_const=False)
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::bytesDropped [variable]
cls.add_instance_attribute('bytesDropped', 'std::vector< unsigned long long >', is_const=False)
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::delayFromFirstProbeSum [variable]
cls.add_instance_attribute('delayFromFirstProbeSum', 'ns3::Time', is_const=False)
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::packets [variable]
cls.add_instance_attribute('packets', 'uint32_t', is_const=False)
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::packetsDropped [variable]
cls.add_instance_attribute('packetsDropped', 'std::vector< unsigned int >', is_const=False)
return
def register_Ns3Ipv4_methods(root_module, cls):
## ipv4.h (module 'internet'): ns3::Ipv4::Ipv4(ns3::Ipv4 const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4 const &', 'arg0')])
## ipv4.h (module 'internet'): ns3::Ipv4::Ipv4() [constructor]
cls.add_constructor([])
## ipv4.h (module 'internet'): bool ns3::Ipv4::AddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv4::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4::GetAddress(uint32_t interface, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForAddress(ns3::Ipv4Address address) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv4Address', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForPrefix(ns3::Ipv4Address address, ns3::Ipv4Mask mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv4Address', 'address'), param('ns3::Ipv4Mask', 'mask')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint16_t ns3::Ipv4::GetMetric(uint32_t interface) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint16_t ns3::Ipv4::GetMtu(uint32_t interface) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4::GetNetDevice(uint32_t interface) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4::GetProtocol(int protocolNumber, int32_t interfaceIndex) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber'), param('int32_t', 'interfaceIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): static ns3::TypeId ns3::Ipv4::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsDestinationAddress(ns3::Ipv4Address address, uint32_t iif) const [member function]
cls.add_method('IsDestinationAddress',
'bool',
[param('ns3::Ipv4Address', 'address'), param('uint32_t', 'iif')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsForwarding(uint32_t interface) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsUp(uint32_t interface) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::RemoveAddress(uint32_t interface, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::RemoveAddress(uint32_t interface, ns3::Ipv4Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4::SelectSourceAddress(ns3::Ptr<const ns3::NetDevice> device, ns3::Ipv4Address dst, ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SelectSourceAddress',
'ns3::Ipv4Address',
[param('ns3::Ptr< ns3::NetDevice const >', 'device'), param('ns3::Ipv4Address', 'dst'), param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Address source, ns3::Ipv4Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Address', 'source'), param('ns3::Ipv4Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SendWithHeader(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Header ipHeader, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SendWithHeader',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Header', 'ipHeader'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetDown(uint32_t interface) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetForwarding(uint32_t interface, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'interface'), param('bool', 'val')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetMetric(uint32_t interface, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'interface'), param('uint16_t', 'metric')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetRoutingProtocol(ns3::Ptr<ns3::Ipv4RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv4RoutingProtocol >', 'routingProtocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetUp(uint32_t interface) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4::SourceAddressSelection(uint32_t interface, ns3::Ipv4Address dest) [member function]
cls.add_method('SourceAddressSelection',
'ns3::Ipv4Address',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'dest')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4::IF_ANY [variable]
cls.add_static_attribute('IF_ANY', 'uint32_t const', is_const=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::GetWeakEsModel() const [member function]
cls.add_method('GetWeakEsModel',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetWeakEsModel(bool model) [member function]
cls.add_method('SetWeakEsModel',
'void',
[param('bool', 'model')],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4FlowClassifier_methods(root_module, cls):
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::Ipv4FlowClassifier() [constructor]
cls.add_constructor([])
## ipv4-flow-classifier.h (module 'flow-monitor'): bool ns3::Ipv4FlowClassifier::Classify(ns3::Ipv4Header const & ipHeader, ns3::Ptr<const ns3::Packet> ipPayload, uint32_t * out_flowId, uint32_t * out_packetId) [member function]
cls.add_method('Classify',
'bool',
[param('ns3::Ipv4Header const &', 'ipHeader'), param('ns3::Ptr< ns3::Packet const >', 'ipPayload'), param('uint32_t *', 'out_flowId'), param('uint32_t *', 'out_packetId')])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple ns3::Ipv4FlowClassifier::FindFlow(ns3::FlowId flowId) const [member function]
cls.add_method('FindFlow',
'ns3::Ipv4FlowClassifier::FiveTuple',
[param('uint32_t', 'flowId')],
is_const=True)
## ipv4-flow-classifier.h (module 'flow-monitor'): std::vector<std::pair<ns3::Ipv4Header::DscpType, unsigned int>, std::allocator<std::pair<ns3::Ipv4Header::DscpType, unsigned int> > > ns3::Ipv4FlowClassifier::GetDscpCounts(ns3::FlowId flowId) const [member function]
cls.add_method('GetDscpCounts',
'std::vector< std::pair< ns3::Ipv4Header::DscpType, unsigned int > >',
[param('uint32_t', 'flowId')],
is_const=True)
## ipv4-flow-classifier.h (module 'flow-monitor'): void ns3::Ipv4FlowClassifier::SerializeToXmlStream(std::ostream & os, uint16_t indent) const [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'indent')],
is_const=True, is_virtual=True)
return
def register_Ns3Ipv4FlowClassifierFiveTuple_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('==')
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::FiveTuple() [constructor]
cls.add_constructor([])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::FiveTuple(ns3::Ipv4FlowClassifier::FiveTuple const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4FlowClassifier::FiveTuple const &', 'arg0')])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::destinationAddress [variable]
cls.add_instance_attribute('destinationAddress', 'ns3::Ipv4Address', is_const=False)
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::destinationPort [variable]
cls.add_instance_attribute('destinationPort', 'uint16_t', is_const=False)
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::protocol [variable]
cls.add_instance_attribute('protocol', 'uint8_t', is_const=False)
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::sourceAddress [variable]
cls.add_instance_attribute('sourceAddress', 'ns3::Ipv4Address', is_const=False)
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::sourcePort [variable]
cls.add_instance_attribute('sourcePort', 'uint16_t', is_const=False)
return
def register_Ns3Ipv4FlowClassifierSortByCount_methods(root_module, cls):
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::SortByCount::SortByCount() [constructor]
cls.add_constructor([])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::SortByCount::SortByCount(ns3::Ipv4FlowClassifier::SortByCount const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4FlowClassifier::SortByCount const &', 'arg0')])
## ipv4-flow-classifier.h (module 'flow-monitor'): bool ns3::Ipv4FlowClassifier::SortByCount::operator()(std::pair<ns3::Ipv4Header::DscpType, unsigned int> left, std::pair<ns3::Ipv4Header::DscpType, unsigned int> right) [member operator]
cls.add_method('operator()',
'bool',
[param('std::pair< ns3::Ipv4Header::DscpType, unsigned int >', 'left'), param('std::pair< ns3::Ipv4Header::DscpType, unsigned int >', 'right')],
custom_name=u'__call__')
return
def register_Ns3Ipv4FlowProbe_methods(root_module, cls):
## ipv4-flow-probe.h (module 'flow-monitor'): ns3::Ipv4FlowProbe::Ipv4FlowProbe(ns3::Ptr<ns3::FlowMonitor> monitor, ns3::Ptr<ns3::Ipv4FlowClassifier> classifier, ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::FlowMonitor >', 'monitor'), param('ns3::Ptr< ns3::Ipv4FlowClassifier >', 'classifier'), param('ns3::Ptr< ns3::Node >', 'node')])
## ipv4-flow-probe.h (module 'flow-monitor'): static ns3::TypeId ns3::Ipv4FlowProbe::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-flow-probe.h (module 'flow-monitor'): void ns3::Ipv4FlowProbe::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3Ipv4L3Protocol_methods(root_module, cls):
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::Ipv4L3Protocol() [constructor]
cls.add_constructor([])
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::AddAddress(uint32_t i, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'i'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv4L3Protocol::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4L3Protocol::GetAddress(uint32_t interfaceIndex, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4Interface> ns3::Ipv4L3Protocol::GetInterface(uint32_t i) const [member function]
cls.add_method('GetInterface',
'ns3::Ptr< ns3::Ipv4Interface >',
[param('uint32_t', 'i')],
is_const=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForAddress(ns3::Ipv4Address addr) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv4Address', 'addr')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForPrefix(ns3::Ipv4Address addr, ns3::Ipv4Mask mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv4Address', 'addr'), param('ns3::Ipv4Mask', 'mask')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv4L3Protocol::GetMetric(uint32_t i) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv4L3Protocol::GetMtu(uint32_t i) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4L3Protocol::GetNetDevice(uint32_t i) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4L3Protocol::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4L3Protocol::GetProtocol(int protocolNumber, int32_t interfaceIndex) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber'), param('int32_t', 'interfaceIndex')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4L3Protocol::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv4L3Protocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsDestinationAddress(ns3::Ipv4Address address, uint32_t iif) const [member function]
cls.add_method('IsDestinationAddress',
'bool',
[param('ns3::Ipv4Address', 'address'), param('uint32_t', 'iif')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsForwarding(uint32_t i) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsUnicast(ns3::Ipv4Address ad) const [member function]
cls.add_method('IsUnicast',
'bool',
[param('ns3::Ipv4Address', 'ad')],
is_const=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsUp(uint32_t i) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Receive(ns3::Ptr<ns3::NetDevice> device, ns3::Ptr<const ns3::Packet> p, uint16_t protocol, ns3::Address const & from, ns3::Address const & to, ns3::NetDevice::PacketType packetType) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device'), param('ns3::Ptr< ns3::Packet const >', 'p'), param('uint16_t', 'protocol'), param('ns3::Address const &', 'from'), param('ns3::Address const &', 'to'), param('ns3::NetDevice::PacketType', 'packetType')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::RemoveAddress(uint32_t interfaceIndex, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::RemoveAddress(uint32_t interface, ns3::Ipv4Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'address')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4L3Protocol::SelectSourceAddress(ns3::Ptr<const ns3::NetDevice> device, ns3::Ipv4Address dst, ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SelectSourceAddress',
'ns3::Ipv4Address',
[param('ns3::Ptr< ns3::NetDevice const >', 'device'), param('ns3::Ipv4Address', 'dst'), param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Address source, ns3::Ipv4Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Address', 'source'), param('ns3::Ipv4Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SendWithHeader(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Header ipHeader, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SendWithHeader',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Header', 'ipHeader'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetDefaultTtl(uint8_t ttl) [member function]
cls.add_method('SetDefaultTtl',
'void',
[param('uint8_t', 'ttl')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetDown(uint32_t i) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetForwarding(uint32_t i, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'i'), param('bool', 'val')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetMetric(uint32_t i, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'i'), param('uint16_t', 'metric')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetRoutingProtocol(ns3::Ptr<ns3::Ipv4RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv4RoutingProtocol >', 'routingProtocol')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetUp(uint32_t i) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4L3Protocol::SourceAddressSelection(uint32_t interface, ns3::Ipv4Address dest) [member function]
cls.add_method('SourceAddressSelection',
'ns3::Ipv4Address',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'dest')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::PROT_NUMBER [variable]
cls.add_static_attribute('PROT_NUMBER', 'uint16_t const', is_const=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::GetWeakEsModel() const [member function]
cls.add_method('GetWeakEsModel',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetWeakEsModel(bool model) [member function]
cls.add_method('SetWeakEsModel',
'void',
[param('bool', 'model')],
visibility='private', is_virtual=True)
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv4MulticastRoute_methods(root_module, cls):
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::Ipv4MulticastRoute(ns3::Ipv4MulticastRoute const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4MulticastRoute const &', 'arg0')])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::Ipv4MulticastRoute() [constructor]
cls.add_constructor([])
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4MulticastRoute::GetGroup() const [member function]
cls.add_method('GetGroup',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4MulticastRoute::GetOrigin() const [member function]
cls.add_method('GetOrigin',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): std::map<unsigned int, unsigned int, std::less<unsigned int>, std::allocator<std::pair<const unsigned int, unsigned int> > > ns3::Ipv4MulticastRoute::GetOutputTtlMap() const [member function]
cls.add_method('GetOutputTtlMap',
'std::map< unsigned int, unsigned int >',
[],
is_const=True)
## ipv4-route.h (module 'internet'): uint32_t ns3::Ipv4MulticastRoute::GetParent() const [member function]
cls.add_method('GetParent',
'uint32_t',
[],
is_const=True)
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetGroup(ns3::Ipv4Address const group) [member function]
cls.add_method('SetGroup',
'void',
[param('ns3::Ipv4Address const', 'group')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetOrigin(ns3::Ipv4Address const origin) [member function]
cls.add_method('SetOrigin',
'void',
[param('ns3::Ipv4Address const', 'origin')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetOutputTtl(uint32_t oif, uint32_t ttl) [member function]
cls.add_method('SetOutputTtl',
'void',
[param('uint32_t', 'oif'), param('uint32_t', 'ttl')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetParent(uint32_t iif) [member function]
cls.add_method('SetParent',
'void',
[param('uint32_t', 'iif')])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::MAX_INTERFACES [variable]
cls.add_static_attribute('MAX_INTERFACES', 'uint32_t const', is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::MAX_TTL [variable]
cls.add_static_attribute('MAX_TTL', 'uint32_t const', is_const=True)
return
def register_Ns3Ipv4Route_methods(root_module, cls):
cls.add_output_stream_operator()
## ipv4-route.h (module 'internet'): ns3::Ipv4Route::Ipv4Route(ns3::Ipv4Route const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4Route const &', 'arg0')])
## ipv4-route.h (module 'internet'): ns3::Ipv4Route::Ipv4Route() [constructor]
cls.add_constructor([])
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetGateway() const [member function]
cls.add_method('GetGateway',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4Route::GetOutputDevice() const [member function]
cls.add_method('GetOutputDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetSource() const [member function]
cls.add_method('GetSource',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetDestination(ns3::Ipv4Address dest) [member function]
cls.add_method('SetDestination',
'void',
[param('ns3::Ipv4Address', 'dest')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetGateway(ns3::Ipv4Address gw) [member function]
cls.add_method('SetGateway',
'void',
[param('ns3::Ipv4Address', 'gw')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetOutputDevice(ns3::Ptr<ns3::NetDevice> outputDevice) [member function]
cls.add_method('SetOutputDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'outputDevice')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetSource(ns3::Ipv4Address src) [member function]
cls.add_method('SetSource',
'void',
[param('ns3::Ipv4Address', 'src')])
return
def register_Ns3Ipv4RoutingProtocol_methods(root_module, cls):
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol::Ipv4RoutingProtocol() [constructor]
cls.add_constructor([])
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol::Ipv4RoutingProtocol(ns3::Ipv4RoutingProtocol const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4RoutingProtocol const &', 'arg0')])
## ipv4-routing-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv4RoutingProtocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyAddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyAddAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyInterfaceDown(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyInterfaceUp(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyRemoveAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyRemoveAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::PrintRoutingTable(ns3::Ptr<ns3::OutputStreamWrapper> stream, ns3::Time::Unit unit=::ns3::Time::Unit::S) const [member function]
cls.add_method('PrintRoutingTable',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('ns3::Time::Unit', 'unit', default_value='::ns3::Time::Unit::S')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): bool ns3::Ipv4RoutingProtocol::RouteInput(ns3::Ptr<const ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<const ns3::NetDevice> idev, ns3::Ipv4RoutingProtocol::UnicastForwardCallback ucb, ns3::Ipv4RoutingProtocol::MulticastForwardCallback mcb, ns3::Ipv4RoutingProtocol::LocalDeliverCallback lcb, ns3::Ipv4RoutingProtocol::ErrorCallback ecb) [member function]
cls.add_method('RouteInput',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice const >', 'idev'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4Route> ns3::Ipv4RoutingProtocol::RouteOutput(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::NetDevice> oif, ns3::Socket::SocketErrno & sockerr) [member function]
cls.add_method('RouteOutput',
'ns3::Ptr< ns3::Ipv4Route >',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice >', 'oif'), param('ns3::Socket::SocketErrno &', 'sockerr')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::SetIpv4(ns3::Ptr<ns3::Ipv4> ipv4) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ptr< ns3::Ipv4 >', 'ipv4')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3Ipv6_methods(root_module, cls):
## ipv6.h (module 'internet'): ns3::Ipv6::Ipv6(ns3::Ipv6 const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6 const &', 'arg0')])
## ipv6.h (module 'internet'): ns3::Ipv6::Ipv6() [constructor]
cls.add_constructor([])
## ipv6.h (module 'internet'): bool ns3::Ipv6::AddAddress(uint32_t interface, ns3::Ipv6InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv6InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): uint32_t ns3::Ipv6::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6::GetAddress(uint32_t interface, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv6InterfaceAddress',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): int32_t ns3::Ipv6::GetInterfaceForAddress(ns3::Ipv6Address address) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv6Address', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): int32_t ns3::Ipv6::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): int32_t ns3::Ipv6::GetInterfaceForPrefix(ns3::Ipv6Address address, ns3::Ipv6Prefix mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv6Address', 'address'), param('ns3::Ipv6Prefix', 'mask')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): uint16_t ns3::Ipv6::GetMetric(uint32_t interface) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): uint16_t ns3::Ipv6::GetMtu(uint32_t interface) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): uint32_t ns3::Ipv6::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): uint32_t ns3::Ipv6::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv6::GetNetDevice(uint32_t interface) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv6::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv6::GetProtocol(int protocolNumber, int32_t interfaceIndex) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber'), param('int32_t', 'interfaceIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ptr<ns3::Ipv6RoutingProtocol> ns3::Ipv6::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv6RoutingProtocol >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): static ns3::TypeId ns3::Ipv6::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::IsForwarding(uint32_t interface) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::IsUp(uint32_t interface) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::RegisterExtensions() [member function]
cls.add_method('RegisterExtensions',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::RegisterOptions() [member function]
cls.add_method('RegisterOptions',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::RemoveAddress(uint32_t interface, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::RemoveAddress(uint32_t interface, ns3::Ipv6Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv6Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv6Address source, ns3::Ipv6Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv6Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv6Address', 'source'), param('ns3::Ipv6Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv6Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetDown(uint32_t interface) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetForwarding(uint32_t interface, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'interface'), param('bool', 'val')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetMetric(uint32_t interface, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'interface'), param('uint16_t', 'metric')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetPmtu(ns3::Ipv6Address dst, uint32_t pmtu) [member function]
cls.add_method('SetPmtu',
'void',
[param('ns3::Ipv6Address', 'dst'), param('uint32_t', 'pmtu')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetRoutingProtocol(ns3::Ptr<ns3::Ipv6RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv6RoutingProtocol >', 'routingProtocol')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetUp(uint32_t interface) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6::SourceAddressSelection(uint32_t interface, ns3::Ipv6Address dest) [member function]
cls.add_method('SourceAddressSelection',
'ns3::Ipv6Address',
[param('uint32_t', 'interface'), param('ns3::Ipv6Address', 'dest')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ipv6::IF_ANY [variable]
cls.add_static_attribute('IF_ANY', 'uint32_t const', is_const=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::GetMtuDiscover() const [member function]
cls.add_method('GetMtuDiscover',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetMtuDiscover(bool mtuDiscover) [member function]
cls.add_method('SetMtuDiscover',
'void',
[param('bool', 'mtuDiscover')],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6FlowClassifier_methods(root_module, cls):
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::Ipv6FlowClassifier() [constructor]
cls.add_constructor([])
## ipv6-flow-classifier.h (module 'flow-monitor'): bool ns3::Ipv6FlowClassifier::Classify(ns3::Ipv6Header const & ipHeader, ns3::Ptr<const ns3::Packet> ipPayload, uint32_t * out_flowId, uint32_t * out_packetId) [member function]
cls.add_method('Classify',
'bool',
[param('ns3::Ipv6Header const &', 'ipHeader'), param('ns3::Ptr< ns3::Packet const >', 'ipPayload'), param('uint32_t *', 'out_flowId'), param('uint32_t *', 'out_packetId')])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple ns3::Ipv6FlowClassifier::FindFlow(ns3::FlowId flowId) const [member function]
cls.add_method('FindFlow',
'ns3::Ipv6FlowClassifier::FiveTuple',
[param('uint32_t', 'flowId')],
is_const=True)
## ipv6-flow-classifier.h (module 'flow-monitor'): std::vector<std::pair<ns3::Ipv6Header::DscpType, unsigned int>, std::allocator<std::pair<ns3::Ipv6Header::DscpType, unsigned int> > > ns3::Ipv6FlowClassifier::GetDscpCounts(ns3::FlowId flowId) const [member function]
cls.add_method('GetDscpCounts',
'std::vector< std::pair< ns3::Ipv6Header::DscpType, unsigned int > >',
[param('uint32_t', 'flowId')],
is_const=True)
## ipv6-flow-classifier.h (module 'flow-monitor'): void ns3::Ipv6FlowClassifier::SerializeToXmlStream(std::ostream & os, uint16_t indent) const [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'indent')],
is_const=True, is_virtual=True)
return
def register_Ns3Ipv6FlowClassifierFiveTuple_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('==')
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::FiveTuple() [constructor]
cls.add_constructor([])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::FiveTuple(ns3::Ipv6FlowClassifier::FiveTuple const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6FlowClassifier::FiveTuple const &', 'arg0')])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::destinationAddress [variable]
cls.add_instance_attribute('destinationAddress', 'ns3::Ipv6Address', is_const=False)
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::destinationPort [variable]
cls.add_instance_attribute('destinationPort', 'uint16_t', is_const=False)
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::protocol [variable]
cls.add_instance_attribute('protocol', 'uint8_t', is_const=False)
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::sourceAddress [variable]
cls.add_instance_attribute('sourceAddress', 'ns3::Ipv6Address', is_const=False)
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::sourcePort [variable]
cls.add_instance_attribute('sourcePort', 'uint16_t', is_const=False)
return
def register_Ns3Ipv6FlowClassifierSortByCount_methods(root_module, cls):
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::SortByCount::SortByCount() [constructor]
cls.add_constructor([])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::SortByCount::SortByCount(ns3::Ipv6FlowClassifier::SortByCount const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6FlowClassifier::SortByCount const &', 'arg0')])
## ipv6-flow-classifier.h (module 'flow-monitor'): bool ns3::Ipv6FlowClassifier::SortByCount::operator()(std::pair<ns3::Ipv6Header::DscpType, unsigned int> left, std::pair<ns3::Ipv6Header::DscpType, unsigned int> right) [member operator]
cls.add_method('operator()',
'bool',
[param('std::pair< ns3::Ipv6Header::DscpType, unsigned int >', 'left'), param('std::pair< ns3::Ipv6Header::DscpType, unsigned int >', 'right')],
custom_name=u'__call__')
return
def register_Ns3Ipv6FlowProbe_methods(root_module, cls):
## ipv6-flow-probe.h (module 'flow-monitor'): ns3::Ipv6FlowProbe::Ipv6FlowProbe(ns3::Ptr<ns3::FlowMonitor> monitor, ns3::Ptr<ns3::Ipv6FlowClassifier> classifier, ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::FlowMonitor >', 'monitor'), param('ns3::Ptr< ns3::Ipv6FlowClassifier >', 'classifier'), param('ns3::Ptr< ns3::Node >', 'node')])
## ipv6-flow-probe.h (module 'flow-monitor'): static ns3::TypeId ns3::Ipv6FlowProbe::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-flow-probe.h (module 'flow-monitor'): void ns3::Ipv6FlowProbe::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3Ipv6L3Protocol_methods(root_module, cls):
## ipv6-l3-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv6L3Protocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6L3Protocol::PROT_NUMBER [variable]
cls.add_static_attribute('PROT_NUMBER', 'uint16_t const', is_const=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6L3Protocol::Ipv6L3Protocol() [constructor]
cls.add_constructor([])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv6L3Protocol::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv6L3Protocol::GetProtocol(int protocolNumber, int32_t interfaceIndex) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber'), param('int32_t', 'interfaceIndex')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv6L3Protocol::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetDefaultTtl(uint8_t ttl) [member function]
cls.add_method('SetDefaultTtl',
'void',
[param('uint8_t', 'ttl')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetDefaultTclass(uint8_t tclass) [member function]
cls.add_method('SetDefaultTclass',
'void',
[param('uint8_t', 'tclass')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Receive(ns3::Ptr<ns3::NetDevice> device, ns3::Ptr<const ns3::Packet> p, uint16_t protocol, ns3::Address const & from, ns3::Address const & to, ns3::NetDevice::PacketType packetType) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device'), param('ns3::Ptr< ns3::Packet const >', 'p'), param('uint16_t', 'protocol'), param('ns3::Address const &', 'from'), param('ns3::Address const &', 'to'), param('ns3::NetDevice::PacketType', 'packetType')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv6Address source, ns3::Ipv6Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv6Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv6Address', 'source'), param('ns3::Ipv6Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv6Route >', 'route')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetRoutingProtocol(ns3::Ptr<ns3::Ipv6RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv6RoutingProtocol >', 'routingProtocol')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv6RoutingProtocol> ns3::Ipv6L3Protocol::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv6RoutingProtocol >',
[],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv6L3Protocol::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv6Interface> ns3::Ipv6L3Protocol::GetInterface(uint32_t i) const [member function]
cls.add_method('GetInterface',
'ns3::Ptr< ns3::Ipv6Interface >',
[param('uint32_t', 'i')],
is_const=True)
## ipv6-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv6L3Protocol::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): int32_t ns3::Ipv6L3Protocol::GetInterfaceForAddress(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv6Address', 'addr')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): int32_t ns3::Ipv6L3Protocol::GetInterfaceForPrefix(ns3::Ipv6Address addr, ns3::Ipv6Prefix mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv6Address', 'addr'), param('ns3::Ipv6Prefix', 'mask')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): int32_t ns3::Ipv6L3Protocol::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::AddAddress(uint32_t i, ns3::Ipv6InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'i'), param('ns3::Ipv6InterfaceAddress', 'address')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6L3Protocol::GetAddress(uint32_t interfaceIndex, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv6InterfaceAddress',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv6L3Protocol::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::RemoveAddress(uint32_t interfaceIndex, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::RemoveAddress(uint32_t interfaceIndex, ns3::Ipv6Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interfaceIndex'), param('ns3::Ipv6Address', 'address')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetMetric(uint32_t i, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'i'), param('uint16_t', 'metric')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv6L3Protocol::GetMetric(uint32_t i) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv6L3Protocol::GetMtu(uint32_t i) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetPmtu(ns3::Ipv6Address dst, uint32_t pmtu) [member function]
cls.add_method('SetPmtu',
'void',
[param('ns3::Ipv6Address', 'dst'), param('uint32_t', 'pmtu')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::IsUp(uint32_t i) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetUp(uint32_t i) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetDown(uint32_t i) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::IsForwarding(uint32_t i) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetForwarding(uint32_t i, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'i'), param('bool', 'val')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6L3Protocol::SourceAddressSelection(uint32_t interface, ns3::Ipv6Address dest) [member function]
cls.add_method('SourceAddressSelection',
'ns3::Ipv6Address',
[param('uint32_t', 'interface'), param('ns3::Ipv6Address', 'dest')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv6L3Protocol::GetNetDevice(uint32_t i) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Icmpv6L4Protocol> ns3::Ipv6L3Protocol::GetIcmpv6() const [member function]
cls.add_method('GetIcmpv6',
'ns3::Ptr< ns3::Icmpv6L4Protocol >',
[],
is_const=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::AddAutoconfiguredAddress(uint32_t interface, ns3::Ipv6Address network, ns3::Ipv6Prefix mask, uint8_t flags, uint32_t validTime, uint32_t preferredTime, ns3::Ipv6Address defaultRouter=ns3::Ipv6Address::GetZero()) [member function]
cls.add_method('AddAutoconfiguredAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv6Address', 'network'), param('ns3::Ipv6Prefix', 'mask'), param('uint8_t', 'flags'), param('uint32_t', 'validTime'), param('uint32_t', 'preferredTime'), param('ns3::Ipv6Address', 'defaultRouter', default_value='ns3::Ipv6Address::GetZero()')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::RemoveAutoconfiguredAddress(uint32_t interface, ns3::Ipv6Address network, ns3::Ipv6Prefix mask, ns3::Ipv6Address defaultRouter) [member function]
cls.add_method('RemoveAutoconfiguredAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv6Address', 'network'), param('ns3::Ipv6Prefix', 'mask'), param('ns3::Ipv6Address', 'defaultRouter')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::RegisterExtensions() [member function]
cls.add_method('RegisterExtensions',
'void',
[],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::RegisterOptions() [member function]
cls.add_method('RegisterOptions',
'void',
[],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::ReportDrop(ns3::Ipv6Header ipHeader, ns3::Ptr<ns3::Packet> p, ns3::Ipv6L3Protocol::DropReason dropReason) [member function]
cls.add_method('ReportDrop',
'void',
[param('ns3::Ipv6Header', 'ipHeader'), param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv6L3Protocol::DropReason', 'dropReason')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::AddMulticastAddress(ns3::Ipv6Address address) [member function]
cls.add_method('AddMulticastAddress',
'void',
[param('ns3::Ipv6Address', 'address')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::AddMulticastAddress(ns3::Ipv6Address address, uint32_t interface) [member function]
cls.add_method('AddMulticastAddress',
'void',
[param('ns3::Ipv6Address', 'address'), param('uint32_t', 'interface')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::RemoveMulticastAddress(ns3::Ipv6Address address) [member function]
cls.add_method('RemoveMulticastAddress',
'void',
[param('ns3::Ipv6Address', 'address')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::RemoveMulticastAddress(ns3::Ipv6Address address, uint32_t interface) [member function]
cls.add_method('RemoveMulticastAddress',
'void',
[param('ns3::Ipv6Address', 'address'), param('uint32_t', 'interface')])
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::IsRegisteredMulticastAddress(ns3::Ipv6Address address) const [member function]
cls.add_method('IsRegisteredMulticastAddress',
'bool',
[param('ns3::Ipv6Address', 'address')],
is_const=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::IsRegisteredMulticastAddress(ns3::Ipv6Address address, uint32_t interface) const [member function]
cls.add_method('IsRegisteredMulticastAddress',
'bool',
[param('ns3::Ipv6Address', 'address'), param('uint32_t', 'interface')],
is_const=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
visibility='private', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetMtuDiscover(bool mtuDiscover) [member function]
cls.add_method('SetMtuDiscover',
'void',
[param('bool', 'mtuDiscover')],
visibility='private', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::GetMtuDiscover() const [member function]
cls.add_method('GetMtuDiscover',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetSendIcmpv6Redirect(bool sendIcmpv6Redirect) [member function]
cls.add_method('SetSendIcmpv6Redirect',
'void',
[param('bool', 'sendIcmpv6Redirect')],
visibility='private', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::GetSendIcmpv6Redirect() const [member function]
cls.add_method('GetSendIcmpv6Redirect',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv6PmtuCache_methods(root_module, cls):
## ipv6-pmtu-cache.h (module 'internet'): ns3::Ipv6PmtuCache::Ipv6PmtuCache(ns3::Ipv6PmtuCache const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6PmtuCache const &', 'arg0')])
## ipv6-pmtu-cache.h (module 'internet'): ns3::Ipv6PmtuCache::Ipv6PmtuCache() [constructor]
cls.add_constructor([])
## ipv6-pmtu-cache.h (module 'internet'): void ns3::Ipv6PmtuCache::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## ipv6-pmtu-cache.h (module 'internet'): uint32_t ns3::Ipv6PmtuCache::GetPmtu(ns3::Ipv6Address dst) [member function]
cls.add_method('GetPmtu',
'uint32_t',
[param('ns3::Ipv6Address', 'dst')])
## ipv6-pmtu-cache.h (module 'internet'): ns3::Time ns3::Ipv6PmtuCache::GetPmtuValidityTime() const [member function]
cls.add_method('GetPmtuValidityTime',
'ns3::Time',
[],
is_const=True)
## ipv6-pmtu-cache.h (module 'internet'): static ns3::TypeId ns3::Ipv6PmtuCache::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-pmtu-cache.h (module 'internet'): void ns3::Ipv6PmtuCache::SetPmtu(ns3::Ipv6Address dst, uint32_t pmtu) [member function]
cls.add_method('SetPmtu',
'void',
[param('ns3::Ipv6Address', 'dst'), param('uint32_t', 'pmtu')])
## ipv6-pmtu-cache.h (module 'internet'): bool ns3::Ipv6PmtuCache::SetPmtuValidityTime(ns3::Time validity) [member function]
cls.add_method('SetPmtuValidityTime',
'bool',
[param('ns3::Time', 'validity')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3Mac48AddressChecker_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker(ns3::Mac48AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac48AddressChecker const &', 'arg0')])
return
def register_Ns3Mac48AddressValue_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48Address const & value) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'value')])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac48AddressValue const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac48AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): bool ns3::Mac48AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## mac48-address.h (module 'network'): ns3::Mac48Address ns3::Mac48AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Mac48Address',
[],
is_const=True)
## mac48-address.h (module 'network'): std::string ns3::Mac48AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): void ns3::Mac48AddressValue::Set(ns3::Mac48Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Mac48Address const &', 'value')])
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::NetDevice::PromiscReceiveCallback cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::NetDevice::ReceiveCallback cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor]
cls.add_constructor([])
## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [constructor]
cls.add_constructor([param('ns3::NixVector const &', 'o')])
## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function]
cls.add_method('AddNeighborIndex',
'void',
[param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function]
cls.add_method('BitCount',
'uint32_t',
[param('uint32_t', 'numberOfNeighbors')],
is_const=True)
## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint32_t const *', 'buffer'), param('uint32_t', 'size')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function]
cls.add_method('ExtractNeighborIndex',
'uint32_t',
[param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function]
cls.add_method('GetRemainingBits',
'uint32_t',
[])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): ns3::Time ns3::Node::GetLocalTime() const [member function]
cls.add_method('GetLocalTime',
'ns3::Time',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Node::DeviceAdditionListener listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Node::ProtocolHandler handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Node::DeviceAdditionListener listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Node::ProtocolHandler handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3OutputStreamWrapper_methods(root_module, cls):
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(ns3::OutputStreamWrapper const & arg0) [constructor]
cls.add_constructor([param('ns3::OutputStreamWrapper const &', 'arg0')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::string filename, std::ios_base::openmode filemode) [constructor]
cls.add_constructor([param('std::string', 'filename'), param('std::ios_base::openmode', 'filemode')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::ostream * os) [constructor]
cls.add_constructor([param('std::ostream *', 'os')])
## output-stream-wrapper.h (module 'network'): std::ostream * ns3::OutputStreamWrapper::GetStream() [member function]
cls.add_method('GetStream',
'std::ostream *',
[])
return
def register_Ns3Packet_methods(root_module, cls):
cls.add_output_stream_operator()
## packet.h (module 'network'): ns3::Packet::Packet() [constructor]
cls.add_constructor([])
## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [constructor]
cls.add_constructor([param('ns3::Packet const &', 'o')])
## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor]
cls.add_constructor([param('uint32_t', 'size')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<const ns3::Packet> packet) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddByteTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header')])
## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddPacketTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer')])
## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function]
cls.add_method('EnablePrinting',
'void',
[],
is_static=True)
## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function]
cls.add_method('FindFirstMatchingByteTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function]
cls.add_method('GetByteTagIterator',
'ns3::ByteTagIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function]
cls.add_method('GetNixVector',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function]
cls.add_method('GetPacketTagIterator',
'ns3::PacketTagIterator',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header, uint32_t size) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function]
cls.add_method('PeekPacketTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('PeekTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function]
cls.add_method('PrintByteTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function]
cls.add_method('PrintPacketTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function]
cls.add_method('RemoveAllByteTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function]
cls.add_method('RemoveAllPacketTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header'), param('uint32_t', 'size')])
## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function]
cls.add_method('RemovePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('RemoveTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): bool ns3::Packet::ReplacePacketTag(ns3::Tag & tag) [member function]
cls.add_method('ReplacePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> nixVector) [member function]
cls.add_method('SetNixVector',
'void',
[param('ns3::Ptr< ns3::NixVector >', 'nixVector')])
## packet.h (module 'network'): std::string ns3::Packet::ToString() const [member function]
cls.add_method('ToString',
'std::string',
[],
is_const=True)
return
def register_Ns3QueueItem_methods(root_module, cls):
cls.add_output_stream_operator()
## queue-item.h (module 'network'): ns3::QueueItem::QueueItem(ns3::Ptr<ns3::Packet> p) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Packet >', 'p')])
## queue-item.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::QueueItem::GetPacket() const [member function]
cls.add_method('GetPacket',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## queue-item.h (module 'network'): uint32_t ns3::QueueItem::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## queue-item.h (module 'network'): bool ns3::QueueItem::GetUint8Value(ns3::QueueItem::Uint8Values field, uint8_t & value) const [member function]
cls.add_method('GetUint8Value',
'bool',
[param('ns3::QueueItem::Uint8Values', 'field'), param('uint8_t &', 'value')],
is_const=True, is_virtual=True)
## queue-item.h (module 'network'): void ns3::QueueItem::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3CallbackImpl__Bool_Ns3Ptr__lt__ns3Socket__gt___Const_ns3Address___amp___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< bool, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackImpl<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::Socket> arg0, ns3::Address const & arg1) [member operator]
cls.add_method('operator()',
'bool',
[param('ns3::Ptr< ns3::Socket >', 'arg0'), param('ns3::Address const &', 'arg1')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Ns3ObjectBase___star___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): ns3::ObjectBase * ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()() [member operator]
cls.add_method('operator()',
'ns3::ObjectBase *',
[],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Const_ns3Ipv4Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ipv4L3ProtocolDropReason_Ns3Ptr__lt__ns3Ipv4__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ipv4Header const & arg0, ns3::Ptr<const ns3::Packet> arg1, ns3::Ipv4L3Protocol::DropReason arg2, ns3::Ptr<ns3::Ipv4> arg3, unsigned int arg4) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ipv4Header const &', 'arg0'), param('ns3::Ptr< ns3::Packet const >', 'arg1'), param('ns3::Ipv4L3Protocol::DropReason', 'arg2'), param('ns3::Ptr< ns3::Ipv4 >', 'arg3'), param('unsigned int', 'arg4')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Const_ns3Ipv4Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ipv4Header const & arg0, ns3::Ptr<const ns3::Packet> arg1, unsigned int arg2) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ipv4Header const &', 'arg0'), param('ns3::Ptr< ns3::Packet const >', 'arg1'), param('unsigned int', 'arg2')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Const_ns3Ipv6Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ipv6L3ProtocolDropReason_Ns3Ptr__lt__ns3Ipv6__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ipv6Header const & arg0, ns3::Ptr<const ns3::Packet> arg1, ns3::Ipv6L3Protocol::DropReason arg2, ns3::Ptr<ns3::Ipv6> arg3, unsigned int arg4) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ipv6Header const &', 'arg0'), param('ns3::Ptr< ns3::Packet const >', 'arg1'), param('ns3::Ipv6L3Protocol::DropReason', 'arg2'), param('ns3::Ptr< ns3::Ipv6 >', 'arg3'), param('unsigned int', 'arg4')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Const_ns3Ipv6Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ipv6Header const & arg0, ns3::Ptr<const ns3::Packet> arg1, unsigned int arg2) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ipv6Header const &', 'arg0'), param('ns3::Ptr< ns3::Packet const >', 'arg1'), param('unsigned int', 'arg2')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ptr__lt__ns3Ipv4__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<const ns3::Packet> arg0, ns3::Ptr<ns3::Ipv4> arg1, unsigned int arg2) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'arg0'), param('ns3::Ptr< ns3::Ipv4 >', 'arg1'), param('unsigned int', 'arg2')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ptr__lt__ns3Ipv6__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<const ns3::Packet> arg0, ns3::Ptr<ns3::Ipv6> arg1, unsigned int arg2) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'arg0'), param('ns3::Ptr< ns3::Ipv6 >', 'arg1'), param('unsigned int', 'arg2')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_short_Const_ns3Address___amp___Const_ns3Address___amp___Ns3NetDevicePacketType_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::NetDevice> arg0, ns3::Ptr<const ns3::Packet> arg1, short unsigned int arg2, ns3::Address const & arg3, ns3::Address const & arg4, ns3::NetDevice::PacketType arg5) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'arg0'), param('ns3::Ptr< ns3::Packet const >', 'arg1'), param('short unsigned int', 'arg2'), param('ns3::Address const &', 'arg3'), param('ns3::Address const &', 'arg4'), param('ns3::NetDevice::PacketType', 'arg5')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::NetDevice> arg0) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'arg0')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Socket__gt___Const_ns3Address___amp___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::Socket> arg0, ns3::Address const & arg1) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::Socket >', 'arg0'), param('ns3::Address const &', 'arg1')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Socket__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::Socket> arg0) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::Socket >', 'arg0')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Socket__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::Socket> arg0, unsigned int arg1) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::Socket >', 'arg0'), param('unsigned int', 'arg1')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3QueueDiscItem_methods(root_module, cls):
## queue-item.h (module 'network'): ns3::QueueDiscItem::QueueDiscItem(ns3::Ptr<ns3::Packet> p, ns3::Address const & addr, uint16_t protocol) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Address const &', 'addr'), param('uint16_t', 'protocol')])
## queue-item.h (module 'network'): ns3::Address ns3::QueueDiscItem::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True)
## queue-item.h (module 'network'): uint16_t ns3::QueueDiscItem::GetProtocol() const [member function]
cls.add_method('GetProtocol',
'uint16_t',
[],
is_const=True)
## queue-item.h (module 'network'): uint8_t ns3::QueueDiscItem::GetTxQueueIndex() const [member function]
cls.add_method('GetTxQueueIndex',
'uint8_t',
[],
is_const=True)
## queue-item.h (module 'network'): void ns3::QueueDiscItem::SetTxQueueIndex(uint8_t txq) [member function]
cls.add_method('SetTxQueueIndex',
'void',
[param('uint8_t', 'txq')])
## queue-item.h (module 'network'): ns3::Time ns3::QueueDiscItem::GetTimeStamp() const [member function]
cls.add_method('GetTimeStamp',
'ns3::Time',
[],
is_const=True)
## queue-item.h (module 'network'): void ns3::QueueDiscItem::SetTimeStamp(ns3::Time t) [member function]
cls.add_method('SetTimeStamp',
'void',
[param('ns3::Time', 't')])
## queue-item.h (module 'network'): void ns3::QueueDiscItem::AddHeader() [member function]
cls.add_method('AddHeader',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## queue-item.h (module 'network'): void ns3::QueueDiscItem::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## queue-item.h (module 'network'): bool ns3::QueueDiscItem::Mark() [member function]
cls.add_method('Mark',
'bool',
[],
is_pure_virtual=True, is_virtual=True)
## queue-item.h (module 'network'): uint32_t ns3::QueueDiscItem::Hash(uint32_t perturbation=0) const [member function]
cls.add_method('Hash',
'uint32_t',
[param('uint32_t', 'perturbation', default_value='0')],
is_const=True, is_virtual=True)
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.add_cpp_namespace('FatalImpl'), root_module)
register_functions_ns3_Hash(module.add_cpp_namespace('Hash'), root_module)
register_functions_ns3_TracedValueCallback(module.add_cpp_namespace('TracedValueCallback'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.add_cpp_namespace('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def register_functions_ns3_TracedValueCallback(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
| 69.094568
| 934
| 0.631931
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.flow_monitor', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
'ns.network')
le['ns3::Address'], import_from_module='ns.network')
uteConstructionList'])
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator', u'ns3::AttributeConstructionList::CIterator')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator*', u'ns3::AttributeConstructionList::CIterator*')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator&', u'ns3::AttributeConstructionList::CIterator&')
='ns.network')
work', outer_class=root_module['ns3::Buffer'])
='ns.network')
ter_class=root_module['ns3::ByteTagIterator'])
twork')
_class=root_module['ns3::ByteTagList'])
_module['ns3::ByteTagList::Iterator'])
e='ns.core')
=['ns3::AttributeAccessor'])
s=['ns3::AttributeChecker'])
ers=['ns3::AttributeValue'])
s=['ns3::CallbackImplBase'])
rameters=['ns3::EventImpl'])
ns3::Hash::Implementation'])
rameters=['ns3::NixVector'])
_parameters=['ns3::Packet'])
'ns3::TraceSourceAccessor'])
e='ns.core')
ule['ns3::Address'])
etwork')
oot_module['ns3::Address'])
terfaceAddress'], import_from_module='ns.internet')
etwork')
etwork')
oot_module['ns3::Address'])
SS', 'TENTATIVE_OPTIMISTIC', 'INVALID'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
pv6InterfaceAddress'], import_from_module='ns.internet')
etwork')
twork')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )', u'ns3::Mac48Address::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )*', u'ns3::Mac48Address::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )&', u'ns3::Mac48Address::TracedCallback&')
ot_module['ns3::Address'])
etwork')
oot_module['ns3::Address'])
work')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator', u'ns3::NodeContainer::Iterator')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator*', u'ns3::NodeContainer::Iterator*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator&', u'ns3::NodeContainer::Iterator&')
ue, import_from_module='ns.core')
le='ns.core')
core')
ork')
=root_module['ns3::PacketMetadata'])
e['ns3::PacketMetadata::Item'], import_from_module='ns.network')
s=root_module['ns3::PacketMetadata'])
='ns.network')
r_class=root_module['ns3::PacketTagIterator'])
ork')
s=root_module['ns3::PacketTagList'])
eters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
ity='private', import_from_module='ns.core')
odule['ns3::Simulator'], import_from_module='ns.core')
ule='ns.network', parent=root_module['ns3::ObjectBase'])
.network')
ule='ns.core')
le='ns.core')
CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
SOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
re', outer_class=root_module['ns3::TypeId'])
re', outer_class=root_module['ns3::TypeId'])
typehandlers.add_type_alias(u'uint32_t', u'ns3::TypeId::hash_t')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::TypeId::hash_t*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::TypeId::hash_t&')
dule='ns.core')
s.core')
, outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core')
e='ns.network', parent=root_module['ns3::ObjectBase'])
='ns.network', parent=root_module['ns3::Chunk'])
nternet', parent=root_module['ns3::Header'])
SCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
_CE'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
nternet', parent=root_module['ns3::Header'])
SCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
TCP', 'IPV6_UDP', 'IPV6_IPV6', 'IPV6_EXT_ROUTING', 'IPV6_EXT_FRAGMENTATION', 'IPV6_EXT_CONFIDENTIALITY', 'IPV6_EXT_AUTHENTIFICATION', 'IPV6_ICMPV6', 'IPV6_EXT_END', 'IPV6_EXT_DESTINATION', 'IPV6_SCTP', 'IPV6_EXT_MOBILITY', 'IPV6_UDP_LITE'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
_CE'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
ule='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
core', outer_class=root_module['ns3::Object'])
'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
, 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
e', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
, 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
ntImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
tDeleter<ns3::FlowClassifier>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
s3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4MulticastRoute>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
4Route', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4Route>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
Vector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
ueItem', 'ns3::empty', 'ns3::DefaultDeleter<ns3::QueueItem>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
='ns.network', parent=root_module['ns3::Object'])
, 'ERROR_NOTCONN', 'ERROR_MSGSIZE', 'ERROR_AGAIN', 'ERROR_SHUTDOWN', 'ERROR_OPNOTSUPP', 'ERROR_AFNOSUPPORT', 'ERROR_INVAL', 'ERROR_BADF', 'ERROR_NOROUTETOHOST', 'ERROR_NODEV', 'ERROR_ADDRNOTAVAIL', 'ERROR_ADDRINUSE', 'SOCKET_ERRNO_LAST'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
PACKET', 'NS3_SOCK_DGRAM', 'NS3_SOCK_RAW'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
_FILLER', 'NS3_PRIO_BULK', 'NS3_PRIO_INTERACTIVE_BULK', 'NS3_PRIO_INTERACTIVE', 'NS3_PRIO_CONTROL'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
ss=root_module['ns3::Socket'], import_from_module='ns.network')
='ns.network', parent=root_module['ns3::Tag'])
='ns.network', parent=root_module['ns3::Tag'])
='ns.network', parent=root_module['ns3::Tag'])
='ns.network', parent=root_module['ns3::Tag'])
='ns.network', parent=root_module['ns3::Tag'])
='ns.network', parent=root_module['ns3::Tag'])
ule='ns.core')
'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )', u'ns3::Time::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )*', u'ns3::Time::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )&', u'ns3::Time::TracedCallback&')
erts_to(root_module['ns3::int64x64_t'])
parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
'ns.network', parent=root_module['ns3::Chunk'])
='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
e='ns.core', parent=root_module['ns3::AttributeChecker'])
e='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
e='ns.core', parent=root_module['ns3::AttributeValue'])
='ns.core', parent=root_module['ns3::AttributeAccessor'])
='ns.core', parent=root_module['ns3::AttributeChecker'])
='ns.core', parent=root_module['ns3::AttributeValue'])
'ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
efCount< ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> >'])
ct'])
)
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats >', u'ns3::FlowMonitor::FlowStatsContainer')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats >*', u'ns3::FlowMonitor::FlowStatsContainer*')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats >&', u'ns3::FlowMonitor::FlowStatsContainer&')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > iterator', u'ns3::FlowMonitor::FlowStatsContainerI')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > iterator*', u'ns3::FlowMonitor::FlowStatsContainerI*')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > iterator&', u'ns3::FlowMonitor::FlowStatsContainerI&')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > const_iterator', u'ns3::FlowMonitor::FlowStatsContainerCI')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > const_iterator*', u'ns3::FlowMonitor::FlowStatsContainerCI*')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > const_iterator&', u'ns3::FlowMonitor::FlowStatsContainerCI&')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > >', u'ns3::FlowMonitor::FlowProbeContainer')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > >*', u'ns3::FlowMonitor::FlowProbeContainer*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > >&', u'ns3::FlowMonitor::FlowProbeContainer&')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > iterator', u'ns3::FlowMonitor::FlowProbeContainerI')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > iterator*', u'ns3::FlowMonitor::FlowProbeContainerI*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > iterator&', u'ns3::FlowMonitor::FlowProbeContainerI&')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > const_iterator', u'ns3::FlowMonitor::FlowProbeContainerCI')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > const_iterator*', u'ns3::FlowMonitor::FlowProbeContainerCI*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > const_iterator&', u'ns3::FlowMonitor::FlowProbeContainerCI&')
ject'])
'])
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowProbe::FlowStats >', u'ns3::FlowProbe::Stats')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowProbe::FlowStats >*', u'ns3::FlowProbe::Stats*')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowProbe::FlowStats >&', u'ns3::FlowProbe::Stats&')
e='ns.internet', parent=root_module['ns3::Object'])
etwork', parent=root_module['ns3::AttributeChecker'])
etwork', parent=root_module['ns3::AttributeValue'])
er'])
])
M', 'DROP_QUEUE', 'DROP_QUEUE_DISC', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_FRAGMENT_TIMEOUT', 'DROP_INVALID_REASON'], outer_class=root_module['ns3::Ipv4FlowProbe'])
et', parent=root_module['ns3::Ipv4'])
KSUM', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_FRAGMENT_TIMEOUT'], outer_class=root_module['ns3::Ipv4L3Protocol'], import_from_module='ns.internet')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )', u'ns3::Ipv4L3Protocol::SentTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )*', u'ns3::Ipv4L3Protocol::SentTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )&', u'ns3::Ipv4L3Protocol::SentTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, uint32_t )', u'ns3::Ipv4L3Protocol::TxRxTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, uint32_t )*', u'ns3::Ipv4L3Protocol::TxRxTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, uint32_t )&', u'ns3::Ipv4L3Protocol::TxRxTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, uint32_t )', u'ns3::Ipv4L3Protocol::DropTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, uint32_t )*', u'ns3::Ipv4L3Protocol::DropTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, uint32_t )&', u'ns3::Ipv4L3Protocol::DropTracedCallback&')
etwork', parent=root_module['ns3::AttributeChecker'])
etwork', parent=root_module['ns3::AttributeValue'])
internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::ErrorCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::ErrorCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::ErrorCallback&')
e='ns.internet', parent=root_module['ns3::Object'])
etwork', parent=root_module['ns3::AttributeChecker'])
etwork', parent=root_module['ns3::AttributeValue'])
er'])
])
M', 'DROP_QUEUE', 'DROP_QUEUE_DISC', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_UNKNOWN_PROTOCOL', 'DROP_UNKNOWN_OPTION', 'DROP_MALFORMED_HEADER', 'DROP_FRAGMENT_TIMEOUT', 'DROP_INVALID_REASON'], outer_class=root_module['ns3::Ipv6FlowProbe'])
et', parent=root_module['ns3::Ipv6'])
E_DOWN', 'DROP_ROUTE_ERROR', 'DROP_UNKNOWN_PROTOCOL', 'DROP_UNKNOWN_OPTION', 'DROP_MALFORMED_HEADER', 'DROP_FRAGMENT_TIMEOUT'], outer_class=root_module['ns3::Ipv6L3Protocol'], import_from_module='ns.internet')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )', u'ns3::Ipv6L3Protocol::SentTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )*', u'ns3::Ipv6L3Protocol::SentTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )&', u'ns3::Ipv6L3Protocol::SentTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, uint32_t )', u'ns3::Ipv6L3Protocol::TxRxTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, uint32_t )*', u'ns3::Ipv6L3Protocol::TxRxTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, uint32_t )&', u'ns3::Ipv6L3Protocol::TxRxTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, uint32_t )', u'ns3::Ipv6L3Protocol::DropTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, uint32_t )*', u'ns3::Ipv6L3Protocol::DropTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, uint32_t )&', u'ns3::Ipv6L3Protocol::DropTracedCallback&')
net', parent=root_module['ns3::Object'])
etwork', parent=root_module['ns3::AttributeChecker'])
etwork', parent=root_module['ns3::AttributeValue'])
twork', parent=root_module['ns3::AttributeChecker'])
twork', parent=root_module['ns3::AttributeValue'])
.network', parent=root_module['ns3::Object'])
ET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( )', u'ns3::NetDevice::LinkChangeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( )*', u'ns3::NetDevice::LinkChangeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( )&', u'ns3::NetDevice::LinkChangeTracedCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::ReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::ReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::ReceiveCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::PromiscReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::PromiscReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::PromiscReceiveCallback&')
.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
le='ns.network', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::ProtocolHandler')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::ProtocolHandler*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::ProtocolHandler&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::DeviceAdditionListener')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::DeviceAdditionListener*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::DeviceAdditionListener&')
core', parent=root_module['ns3::AttributeChecker'])
core', parent=root_module['ns3::AttributeValue'])
parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )', u'ns3::Packet::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )*', u'ns3::Packet::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )&', u'ns3::Packet::TracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )', u'ns3::Packet::AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )*', u'ns3::Packet::AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )&', u'ns3::Packet::AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )', u'ns3::Packet::TwoAddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )*', u'ns3::Packet::TwoAddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )&', u'ns3::Packet::TwoAddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )', u'ns3::Packet::Mac48AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )*', u'ns3::Packet::Mac48AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )&', u'ns3::Packet::Mac48AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )', u'ns3::Packet::SizeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )*', u'ns3::Packet::SizeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )&', u'ns3::Packet::SizeTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )', u'ns3::Packet::SinrTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )*', u'ns3::Packet::SinrTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )&', u'ns3::Packet::SinrTracedCallback&')
.network', parent=root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >'])
'ns3::QueueItem'], import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )', u'ns3::QueueItem::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )*', u'ns3::QueueItem::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )&', u'ns3::QueueItem::TracedCallback&')
ule='ns.core', parent=root_module['ns3::AttributeValue'])
le='ns.core', parent=root_module['ns3::AttributeChecker'])
le='ns.core', parent=root_module['ns3::AttributeValue'])
'ns.network', parent=root_module['ns3::AttributeChecker'])
'ns.network', parent=root_module['ns3::AttributeValue'])
s3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
s3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
s3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
s3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
s3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
s3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
s3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
s3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
acketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
s3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
s3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
s3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
s3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
.network', parent=root_module['ns3::QueueItem'])
module.add_container('std::vector< ns3::Ipv6Address >', 'ns3::Ipv6Address', container_type=u'vector')
module.add_container('std::vector< unsigned int >', 'unsigned int', container_type=u'vector')
module.add_container('std::vector< unsigned long long >', 'long unsigned int', container_type=u'vector')
module.add_container('std::map< unsigned int, ns3::FlowMonitor::FlowStats >', ('unsigned int', 'ns3::FlowMonitor::FlowStats'), container_type=u'map')
module.add_container('std::vector< ns3::Ptr< ns3::FlowProbe > >', 'ns3::Ptr< ns3::FlowProbe >', container_type=u'vector')
module.add_container('std::map< unsigned int, ns3::FlowProbe::FlowStats >', ('unsigned int', 'ns3::FlowProbe::FlowStats'), container_type=u'map')
module.add_container('ns3::FlowProbe::Stats', ('unsigned int', 'ns3::FlowProbe::FlowStats'), container_type=u'map')
module.add_container('std::vector< std::pair< ns3::Ipv4Header::DscpType, unsigned int > >', 'std::pair< ns3::Ipv4Header::DscpType, unsigned int >', container_type=u'vector')
module.add_container('std::map< unsigned int, unsigned int >', ('unsigned int', 'unsigned int'), container_type=u'map')
module.add_container('std::vector< std::pair< ns3::Ipv6Header::DscpType, unsigned int > >', 'std::pair< ns3::Ipv6Header::DscpType, unsigned int >', container_type=u'vector')
typehandlers.add_type_alias(u'uint32_t', u'ns3::FlowId')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::FlowId*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::FlowId&')
typehandlers.add_type_alias(u'uint32_t', u'ns3::FlowPacketId')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::FlowPacketId*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::FlowPacketId&')
talImpl')
register_types_ns3_FatalImpl(nested_module)
e('Hash')
register_types_ns3_Hash(nested_module)
allback')
register_types_ns3_TracedValueCallback(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
, parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, std::size_t const )', u'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, std::size_t const )*', u'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, std::size_t const )&', u'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, std::size_t const )', u'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, std::size_t const )*', u'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, std::size_t const )&', u'ns3::Hash::Hash64Function_ptr&')
unction')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
ent=root_module['ns3::Hash::Implementation'])
oot_module['ns3::Hash::Implementation'])
oot_module['ns3::Hash::Implementation'])
root_module['ns3::Hash::Implementation'])
def register_types_ns3_TracedValueCallback(module):
root_module = module.get_root()
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time )', u'ns3::TracedValueCallback::Time')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time )*', u'ns3::TracedValueCallback::Time*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time )&', u'ns3::TracedValueCallback::Time&')
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3DefaultDeleter__Ns3AttributeAccessor_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeAccessor >'])
register_Ns3DefaultDeleter__Ns3AttributeChecker_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeChecker >'])
register_Ns3DefaultDeleter__Ns3AttributeValue_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeValue >'])
register_Ns3DefaultDeleter__Ns3CallbackImplBase_methods(root_module, root_module['ns3::DefaultDeleter< ns3::CallbackImplBase >'])
register_Ns3DefaultDeleter__Ns3EventImpl_methods(root_module, root_module['ns3::DefaultDeleter< ns3::EventImpl >'])
register_Ns3DefaultDeleter__Ns3FlowClassifier_methods(root_module, root_module['ns3::DefaultDeleter< ns3::FlowClassifier >'])
register_Ns3DefaultDeleter__Ns3HashImplementation_methods(root_module, root_module['ns3::DefaultDeleter< ns3::Hash::Implementation >'])
register_Ns3DefaultDeleter__Ns3NixVector_methods(root_module, root_module['ns3::DefaultDeleter< ns3::NixVector >'])
register_Ns3DefaultDeleter__Ns3Packet_methods(root_module, root_module['ns3::DefaultDeleter< ns3::Packet >'])
register_Ns3DefaultDeleter__Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::DefaultDeleter< ns3::TraceSourceAccessor >'])
register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])
register_Ns3FlowMonitorHelper_methods(root_module, root_module['ns3::FlowMonitorHelper'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3Histogram_methods(root_module, root_module['ns3::Histogram'])
register_Ns3Inet6SocketAddress_methods(root_module, root_module['ns3::Inet6SocketAddress'])
register_Ns3InetSocketAddress_methods(root_module, root_module['ns3::InetSocketAddress'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4InterfaceAddress_methods(root_module, root_module['ns3::Ipv4InterfaceAddress'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6InterfaceAddress_methods(root_module, root_module['ns3::Ipv6InterfaceAddress'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address'])
register_Ns3Mac8Address_methods(root_module, root_module['ns3::Mac8Address'])
register_Ns3NodeContainer_methods(root_module, root_module['ns3::NodeContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TimeWithUnit_methods(root_module, root_module['ns3::TimeWithUnit'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3Ipv4Header_methods(root_module, root_module['ns3::Ipv4Header'])
register_Ns3Ipv6Header_methods(root_module, root_module['ns3::Ipv6Header'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
register_Ns3SimpleRefCount__Ns3FlowClassifier_Ns3Empty_Ns3DefaultDeleter__lt__ns3FlowClassifier__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3Ipv4MulticastRoute_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4MulticastRoute__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
register_Ns3SimpleRefCount__Ns3Ipv4Route_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4Route__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3QueueItem_Ns3Empty_Ns3DefaultDeleter__lt__ns3QueueItem__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Socket_methods(root_module, root_module['ns3::Socket'])
register_Ns3SocketIpTosTag_methods(root_module, root_module['ns3::SocketIpTosTag'])
register_Ns3SocketIpTtlTag_methods(root_module, root_module['ns3::SocketIpTtlTag'])
register_Ns3SocketIpv6HopLimitTag_methods(root_module, root_module['ns3::SocketIpv6HopLimitTag'])
register_Ns3SocketIpv6TclassTag_methods(root_module, root_module['ns3::SocketIpv6TclassTag'])
register_Ns3SocketPriorityTag_methods(root_module, root_module['ns3::SocketPriorityTag'])
register_Ns3SocketSetDontFragmentTag_methods(root_module, root_module['ns3::SocketSetDontFragmentTag'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3EmptyAttributeAccessor_methods(root_module, root_module['ns3::EmptyAttributeAccessor'])
register_Ns3EmptyAttributeChecker_methods(root_module, root_module['ns3::EmptyAttributeChecker'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])
register_Ns3FlowClassifier_methods(root_module, root_module['ns3::FlowClassifier'])
register_Ns3FlowMonitor_methods(root_module, root_module['ns3::FlowMonitor'])
register_Ns3FlowMonitorFlowStats_methods(root_module, root_module['ns3::FlowMonitor::FlowStats'])
register_Ns3FlowProbe_methods(root_module, root_module['ns3::FlowProbe'])
register_Ns3FlowProbeFlowStats_methods(root_module, root_module['ns3::FlowProbe::FlowStats'])
register_Ns3Ipv4_methods(root_module, root_module['ns3::Ipv4'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4FlowClassifier_methods(root_module, root_module['ns3::Ipv4FlowClassifier'])
register_Ns3Ipv4FlowClassifierFiveTuple_methods(root_module, root_module['ns3::Ipv4FlowClassifier::FiveTuple'])
register_Ns3Ipv4FlowClassifierSortByCount_methods(root_module, root_module['ns3::Ipv4FlowClassifier::SortByCount'])
register_Ns3Ipv4FlowProbe_methods(root_module, root_module['ns3::Ipv4FlowProbe'])
register_Ns3Ipv4L3Protocol_methods(root_module, root_module['ns3::Ipv4L3Protocol'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv4MulticastRoute_methods(root_module, root_module['ns3::Ipv4MulticastRoute'])
register_Ns3Ipv4Route_methods(root_module, root_module['ns3::Ipv4Route'])
register_Ns3Ipv4RoutingProtocol_methods(root_module, root_module['ns3::Ipv4RoutingProtocol'])
register_Ns3Ipv6_methods(root_module, root_module['ns3::Ipv6'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6FlowClassifier_methods(root_module, root_module['ns3::Ipv6FlowClassifier'])
register_Ns3Ipv6FlowClassifierFiveTuple_methods(root_module, root_module['ns3::Ipv6FlowClassifier::FiveTuple'])
register_Ns3Ipv6FlowClassifierSortByCount_methods(root_module, root_module['ns3::Ipv6FlowClassifier::SortByCount'])
register_Ns3Ipv6FlowProbe_methods(root_module, root_module['ns3::Ipv6FlowProbe'])
register_Ns3Ipv6L3Protocol_methods(root_module, root_module['ns3::Ipv6L3Protocol'])
register_Ns3Ipv6PmtuCache_methods(root_module, root_module['ns3::Ipv6PmtuCache'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker'])
register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3OutputStreamWrapper_methods(root_module, root_module['ns3::OutputStreamWrapper'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3QueueItem_methods(root_module, root_module['ns3::QueueItem'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3CallbackImpl__Bool_Ns3Ptr__lt__ns3Socket__gt___Const_ns3Address___amp___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Ns3ObjectBase___star___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Const_ns3Ipv4Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ipv4L3ProtocolDropReason_Ns3Ptr__lt__ns3Ipv4__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Const_ns3Ipv4Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Const_ns3Ipv6Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ipv6L3ProtocolDropReason_Ns3Ptr__lt__ns3Ipv6__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Const_ns3Ipv6Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ptr__lt__ns3Ipv4__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ptr__lt__ns3Ipv6__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_short_Const_ns3Address___amp___Const_ns3Address___amp___Ns3NetDevicePacketType_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Socket__gt___Const_ns3Address___amp___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Socket__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Socket__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3QueueDiscItem_methods(root_module, root_module['ns3::QueueDiscItem'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
), param('uint8_t', 'len')])
am('uint8_t', 'len')],
is_const=True)
), param('uint8_t', 'len')])
buffer')],
is_const=True)
er', 'buffer')])
is_const=True)
is_const=True)
is_const=True)
'type')],
is_const=True)
is_static=True)
buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
('uint32_t', 'length')],
is_const=True)
), param('uint32_t', 'size')])
is_const=True)
is_const=True)
is_const=True)
is_const=True)
int32_t', 'end')])
t32_t', 'start')])
'uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
is_const=True)
is_const=True)
is_const=True)
is_const=True)
elta')])
elta')])
uint32_t', 'size')])
uint32_t', 'size')])
uint32_t', 'size')])
:Iterator', 'end')])
data')])
data')])
data')])
data')])
data')])
data')])
data')])
data')])
data')])
data')])
'uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
const=True)
is_const=True)
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
ffsetEnd')],
is_const=True)
eturn
def register_Ns3ByteTagListIterator_methods(root_module, cls):
_Ns3ByteTagListIteratorItem_methods(root_module, cls):
MonitorHelper_methods(root_module, cls):
aram('std::size_t const', 'size')])
::string const', 's')])
[])
return
def register_Ns3Histogram_methods(root_module, cls):
string', 'elementName')],
is_const=True)
dth')])
return
def register_Ns3Inet6SocketAddress_methods(root_module, cls):
is_const=True)
is_const=True)
,
is_const=True)
is_const=True)
is_const=True)
is_static=True)
is_const=True)
,
is_const=True)
,
is_const=True)
,
is_const=True)
ess')])
ess')])
return
def register_Ns3Ipv4InterfaceAddress_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
is_static=True)
')],
is_const=True)
Address', 'b')],
is_const=True)
')],
is_const=True)
'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
ic=True)
,
is_const=True)
is_const=True)
is_static=True)
is_static=True)
is_static=True)
deprecated=True, is_const=True)
is_const=True)
is_const=True)
is_const=True)
is_const=True)
,
is_const=True)
is_const=True)
is_const=True)
is_const=True)
is_const=True)
is_static=True)
is_const=True)
is_const=True)
'prefix')],
is_static=True)
'prefix')],
is_static=True)
'prefix')],
is_static=True)
'prefix')],
is_static=True)
is_static=True)
is_static=True)
is_static=True)
is_static=True)
is_static=True)
is_static=True)
,
is_const=True)
,
is_const=True)
ess')])
ess')])
return
def register_Ns3Ipv6InterfaceAddress_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
,
is_const=True)
return
def register_Ns3Mac48Address_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
True)
is_static=True)
is_static=True)
is_static=True)
is_static=True)
is_const=True)
is_const=True)
is_static=True)
return
def register_Ns3Mac8Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
is_static=True)
is_static=True)
return
def register_Ns3NodeContainer_methods(root_module, cls):
rn
def register_Ns3ObjectBase_methods(root_module, cls):
],
is_const=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
is_static=True)
lue const &', 'value')])
lue const &', 'value')])
'ns3::CallbackBase const &', 'cb')])
ckBase const &', 'cb')])
'ns3::CallbackBase const &', 'cb')])
ckBase const &', 'cb')])
tributes')],
visibility='protected')
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
ctFactory_methods(root_module, cls):
cls.add_output_stream_operator()
er_Ns3PacketMetadata_methods(root_module, cls):
end')],
is_const=True)
t', 'size')])
is_static=True)
is_static=True)
is_const=True)
is_const=True)
)
)
t', 'size')])
t', 'size')])
ize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
turn
def register_Ns3PacketTagList_methods(root_module, cls):
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
is_static=True)
is_static=True)
is_static=True)
is_static=True)
'id')],
is_static=True)
is_static=True)
is_static=True)
'id')],
is_static=True)
'impl')],
is_static=True)
tory')],
is_static=True)
is_static=True)
elay')],
is_static=True)
return
def register_Ns3Tag_methods(root_module, cls):
is_pure_virtual=True, is_virtual=True)
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
[],
is_static=True)
stream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
, 'trim')])
m('uint32_t', 'size')])
le', 'v')])
_t', 'v')])
_t', 'v')])
_t', 'v')])
_t', 'v')])
return
def register_Ns3TimeWithUnit_methods(root_module, cls):
cls.add_output_stream_operator()
le, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<')
am('std::string const &', 'supportMsg', default_value='""')])
el', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
3::TraceSourceAccessor const >', 'accessor')],
deprecated=True)
lt_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
ize_t', 'i')],
is_const=True)
ize_t', 'i')],
is_const=True)
],
is_const=True)
[],
is_const=True)
],
is_const=True)
],
is_const=True)
],
is_const=True)
],
is_const=True)
16_t', 'i')],
is_static=True)
,
is_static=True)
],
is_const=True)
ize_t', 'i')],
is_const=True)
],
is_const=True)
],
is_const=True)
],
is_const=True)
],
is_const=True)
[])
d', 'other')],
is_const=True)
eInformation *', 'info', transfer_ownership=False)],
is_const=True)
],
is_static=True)
d *', 'tid')],
is_static=True)
g', 'name')],
is_static=True)
ring', 'name')],
is_const=True)
rceInformation *', 'info')],
is_const=True)
],
is_const=True)
eValue const >', 'initialValue')])
ing', 'groupName')])
3::TypeId', 'tid')])
::size_t', 'size')])
'uint16_t', 'uid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
t=False)
_const=False)
alse)
rn
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
root_module['ns3::Time'], root_module['ns3::int64x64_t'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('>=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', u'right'))
cls.add_unary_numeric_operator('-')
)
,
is_static=True)
am &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
is_static=True)
&', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
, 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Ipv4Header_methods(root_module, cls):
True)
],
is_const=True)
is_const=True)
is_const=True)
is_const=True)
is_const=True)
is_const=True)
is_const=True, is_virtual=True)
is_const=True)
is_const=True)
is_const=True, is_virtual=True)
is_const=True)
is_const=True)
is_const=True)
is_static=True)
is_const=True)
is_const=True)
is_const=True)
],
is_const=True, is_virtual=True)
],
is_const=True, is_virtual=True)
tion')])
dscp')])
'ecn')])
ytes')])
tion')])
size')])
'num')])
urce')])
'tos')])
'ttl')])
return
def register_Ns3Ipv6Header_methods(root_module, cls):
True)
],
is_const=True)
is_const=True)
is_const=True)
is_const=True)
is_const=True)
is_const=True)
is_const=True, is_virtual=True)
is_const=True)
is_const=True)
is_const=True, is_virtual=True)
is_const=True)
is_const=True)
is_static=True)
],
is_const=True, is_virtual=True)
],
is_const=True, is_virtual=True)
'dst')])
dscp')])
'ecn')])
flow')])
imit')])
next')])
'len')])
'src')])
ffic')])
return
def register_Ns3Object_methods(root_module, cls):
[])
[],
is_const=True)
[],
is_const=True, is_virtual=True)
],
is_static=True)
[])
[],
is_const=True)
visibility='protected')
[],
visibility='protected', is_virtual=True)
[],
visibility='protected', is_virtual=True)
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
=True)
[],
is_virtual=True)
is_const=True)
is_const=True)
is_const=True)
is_const=True)
is_const=True)
[],
is_pure_virtual=True, is_virtual=True)
'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
[])
_t', 'size'), param('uint32_t', 'flags')])
'), param('ns3::Address &', 'fromAddress')],
is_pure_virtual=True, is_virtual=True)
&', 'fromAddress')])
t', 'flags'), param('ns3::Address &', 'fromAddress')])
'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
3::Packet >', 'p')])
_t', 'size'), param('uint32_t', 'flags')])
param('ns3::Address const &', 'toAddress')],
is_pure_virtual=True, is_virtual=True)
, 'flags'), param('ns3::Address const &', 'address')])
3::empty >', 'newConnectionCreated')])
allowBroadcast')],
is_pure_virtual=True, is_virtual=True)
:empty, ns3::empty >', 'errorClose')])
, ns3::empty >', 'connectionFailed')])
mpty >', 'dataSent')])
, 'ipv4RecvTos')])
, 'ipv4RecvTtl')])
nt8_t', 'ipTos')])
nt8_t', 'ipTtl')],
is_virtual=True)
', 'ipHopLimit')],
is_virtual=True)
v6RecvHopLimit')])
ipv6RecvTclass')])
nt', 'ipTclass')])
_t', 'priority')])
3::empty >', 'arg0')])
('bool', 'flag')])
:empty >', 'sendCb')])
[],
is_pure_virtual=True, is_virtual=True)
[],
is_pure_virtual=True, is_virtual=True)
[],
visibility='protected', is_virtual=True)
is_const=True, visibility='protected')
is_const=True, visibility='protected')
is_const=True, visibility='protected')
[],
visibility='protected')
onst &', 'from')],
visibility='protected')
[],
visibility='protected')
[],
visibility='protected')
nt32_t', 'size')],
visibility='protected')
[],
visibility='protected')
ns3::Address const &', 'from')],
visibility='protected')
[],
visibility='protected')
spaceAvailable')],
visibility='protected')
return
def register_Ns3SocketIpTosTag_methods(root_module, cls):
tual=True)
is_const=True, is_virtual=True)
is_const=True)
is_static=True)
')],
is_const=True, is_virtual=True)
')],
is_const=True, is_virtual=True)
, 'tos')])
return
def register_Ns3SocketIpTtlTag_methods(root_module, cls):
tual=True)
is_const=True, is_virtual=True)
is_const=True)
is_static=True)
')],
is_const=True, is_virtual=True)
')],
is_const=True, is_virtual=True)
, 'ttl')])
return
def register_Ns3SocketIpv6HopLimitTag_methods(root_module, cls):
is_const=True, is_virtual=True)
is_static=True)
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
)])
return
def register_Ns3SocketIpv6TclassTag_methods(root_module, cls):
t=True, is_virtual=True)
is_const=True)
is_static=True)
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
s')])
return
def register_Ns3SocketPriorityTag_methods(root_module, cls):
is_const=True)
is_const=True, is_virtual=True)
is_static=True)
,
is_const=True, is_virtual=True)
,
is_const=True, is_virtual=True)
ity')])
return
def register_Ns3SocketSetDontFragmentTag_methods(root_module, cls):
is_const=True)
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('>=')
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', u'right'))
cls.add_output_stream_operator()
'ns3::Time::Unit', 'unit')],
is_static=True)
'ns3::Time::Unit', 'unit')],
is_static=True)
'ns3::Time::Unit', 'unit')],
is_static=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_static=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_static=True)
[],
is_static=True)
'resolution')],
is_static=True)
[],
is_static=True)
:Unit', 'unit')],
is_const=True)
:Unit', 'unit')],
is_const=True)
:Unit', 'unit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
&', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
ackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
s_virtual=True)
:Buffer::Iterator', 'end')],
is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
is_static=True)
', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
is_pure_virtual=True, is_const=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
s3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
,
is_pure_virtual=True, is_const=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
')],
is_const=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
checker')],
is_pure_virtual=True, is_virtual=True)
er')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
ual=True, is_const=True, is_virtual=True)
],
is_static=True, visibility='protected')
is_static=True, visibility='protected', template_parameters=[u'ns3::ObjectBase*'])
is_static=True, visibility='protected', template_parameters=[u'void'])
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::NetDevice> '])
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Packet const> '])
is_static=True, visibility='protected', template_parameters=[u'unsigned short'])
is_static=True, visibility='protected', template_parameters=[u'ns3::Address const&'])
is_static=True, visibility='protected', template_parameters=[u'ns3::NetDevice::PacketType'])
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Socket> '])
is_static=True, visibility='protected', template_parameters=[u'bool'])
is_static=True, visibility='protected', template_parameters=[u'unsigned int'])
is_static=True, visibility='protected', template_parameters=[u'ns3::Ipv4Header const&'])
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Ipv4> '])
is_static=True, visibility='protected', template_parameters=[u'ns3::Ipv4L3Protocol::DropReason'])
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Ipv6> '])
is_static=True, visibility='protected', template_parameters=[u'ns3::Ipv6Header const&'])
is_static=True, visibility='protected', template_parameters=[u'ns3::Ipv6L3Protocol::DropReason'])
return
def register_Ns3CallbackValue_methods(root_module, cls):
e)
cker')],
is_const=True, is_virtual=True)
', 'base')])
return
def register_Ns3EmptyAttributeAccessor_methods(root_module, cls):
t=True, is_virtual=True)
alue')],
is_const=True, is_virtual=True)
return
def register_Ns3EmptyAttributeChecker_methods(root_module, cls):
)
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
visibility='private', is_virtual=True)
,
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3EventImpl_methods(root_module, cls):
sibility='protected', is_virtual=True)
return
def register_Ns3FlowClassifier_methods(root_module, cls):
virtual=True)
visibility='protected')
],
is_const=True, visibility='protected')
return
def register_Ns3FlowMonitor_methods(root_module, cls):
al=True)
is_static=True)
ize'), param('uint32_t', 'reasonCode')])
('uint32_t', 'packetSize')])
('uint32_t', 'packetSize')])
('uint32_t', 'packetSize')])
('bool', 'enableProbes')])
ams'), param('bool', 'enableProbes')])
('bool', 'enableProbes')])
])
visibility='protected', is_virtual=True)
visibility='protected', is_virtual=True)
return
def register_Ns3FlowMonitorFlowStats_methods(root_module, cls):
_virtual=True)
return
def register_Ns3FlowProbeFlowStats_methods(root_module, cls):
tDevice >', 'device')],
is_pure_virtual=True, is_virtual=True)
[],
is_pure_virtual=True, is_virtual=True)
:Socket >', 'socket')],
is_pure_virtual=True, is_virtual=True)
int32_t', 'addressIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
t >', 'device')],
is_pure_virtual=True, is_const=True, is_virtual=True)
('ns3::Ipv4Mask', 'mask')],
is_pure_virtual=True, is_const=True, is_virtual=True)
'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
nt32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
rotocolNumber')],
is_pure_virtual=True, is_const=True, is_virtual=True)
int32_t', 'interfaceIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
],
is_static=True)
tocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
ram('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
param('uint32_t', 'iif')],
is_pure_virtual=True, is_const=True, is_virtual=True)
'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
tocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
ram('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
ram('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_virtual=True)
('ns3::Ipv4Address', 'address')],
is_pure_virtual=True, is_virtual=True)
ceAddress::InterfaceAddressScope_e', 'scope')],
is_pure_virtual=True, is_virtual=True)
am('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
terface'), param('bool', 'val')],
is_pure_virtual=True, is_virtual=True)
'), param('uint16_t', 'metric')],
is_pure_virtual=True, is_virtual=True)
', 'routingProtocol')],
is_pure_virtual=True, is_virtual=True)
32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
ram('ns3::Ipv4Address', 'dest')],
is_pure_virtual=True, is_virtual=True)
is_const=True)
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
('bool', 'forward')],
is_pure_virtual=True, visibility='private', is_virtual=True)
am('bool', 'model')],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
def register_Ns3Ipv4FlowClassifier_methods(root_module, cls):
v4FlowClassifierFiveTuple_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('==')
')],
is_const=True, is_virtual=True)
is_const=True)
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
k')],
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
,
is_virtual=True)
is_const=True, is_virtual=True)
dex')],
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
is_static=True)
,
is_virtual=True)
faceIndex')],
is_virtual=True)
f')],
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
is_const=True)
is_const=True, is_virtual=True)
&', 'to'), param('ns3::NetDevice::PacketType', 'packetType')])
,
is_virtual=True)
faceIndex')],
is_virtual=True)
ssIndex')],
is_virtual=True)
address')],
is_virtual=True)
dressScope_e', 'scope')],
is_virtual=True)
l'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_virtual=True)
3::Ipv4Route >', 'route')],
is_virtual=True)
is_virtual=True)
', 'val')],
is_virtual=True)
'metric')],
is_virtual=True)
)
,
is_virtual=True)
is_virtual=True)
, 'dest')],
is_virtual=True)
visibility='protected', is_virtual=True)
visibility='protected', is_virtual=True)
is_const=True, visibility='private', is_virtual=True)
is_const=True, visibility='private', is_virtual=True)
visibility='private', is_virtual=True)
visibility='private', is_virtual=True)
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
e, is_virtual=True)
e')])
return
def register_Ns3Ipv4MulticastRoute_methods(root_module, cls):
_output_stream_operator()
is_const=True)
is_const=True)
'dest')])
', 'gw')])
utDevice')])
, 'src')])
return
def register_Ns3Ipv4RoutingProtocol_methods(root_module, cls):
is_pure_virtual=True, is_virtual=True)
,
is_pure_virtual=True, is_virtual=True)
e::Unit::S')],
is_pure_virtual=True, is_const=True, is_virtual=True)
b'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')],
is_pure_virtual=True, is_virtual=True)
t::SocketErrno &', 'sockerr')],
is_pure_virtual=True, is_virtual=True)
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3Ipv6_methods(root_module, cls):
is_pure_virtual=True, is_virtual=True)
tDevice >', 'device')],
is_pure_virtual=True, is_virtual=True)
int32_t', 'addressIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
t >', 'device')],
is_pure_virtual=True, is_const=True, is_virtual=True)
ns3::Ipv6Prefix', 'mask')],
is_pure_virtual=True, is_const=True, is_virtual=True)
'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
nt32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
rotocolNumber')],
is_pure_virtual=True, is_const=True, is_virtual=True)
int32_t', 'interfaceIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
],
is_static=True)
tocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
ram('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
[],
is_pure_virtual=True, is_virtual=True)
[],
is_pure_virtual=True, is_virtual=True)
tocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
ram('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
ram('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_virtual=True)
('ns3::Ipv6Address', 'address')],
is_pure_virtual=True, is_virtual=True)
am('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv6Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
terface'), param('bool', 'val')],
is_pure_virtual=True, is_virtual=True)
'), param('uint16_t', 'metric')],
is_pure_virtual=True, is_virtual=True)
st'), param('uint32_t', 'pmtu')],
is_pure_virtual=True, is_virtual=True)
', 'routingProtocol')],
is_pure_virtual=True, is_virtual=True)
32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
ram('ns3::Ipv6Address', 'dest')],
is_pure_virtual=True, is_virtual=True)
is_const=True)
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
('bool', 'forward')],
is_pure_virtual=True, visibility='private', is_virtual=True)
ol', 'mtuDiscover')],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
def register_Ns3Ipv6FlowClassifier_methods(root_module, cls):
v6FlowClassifierFiveTuple_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('==')
dex')],
is_virtual=True)
is_const=True, is_virtual=True)
dex')],
is_const=True, is_virtual=True)
&', 'to'), param('ns3::NetDevice::PacketType', 'packetType')])
l'), param('ns3::Ptr< ns3::Ipv6Route >', 'route')],
is_virtual=True)
,
is_virtual=True)
is_const=True, is_virtual=True)
,
is_virtual=True)
is_const=True)
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
k')],
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
address')],
is_virtual=True)
x')],
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
ssIndex')],
is_virtual=True)
address')],
is_virtual=True)
'metric')],
is_virtual=True)
is_const=True, is_virtual=True)
is_const=True, is_virtual=True)
, 'pmtu')],
is_virtual=True)
is_const=True, is_virtual=True)
is_virtual=True)
is_virtual=True)
is_const=True, is_virtual=True)
', 'val')],
is_virtual=True)
, 'dest')],
is_virtual=True)
,
is_virtual=True)
is_const=True)
aram('ns3::Ipv6Address', 'defaultRouter', default_value='ns3::Ipv6Address::GetZero()')])
3::Ipv6Address', 'defaultRouter')])
is_virtual=True)
is_virtual=True)
pReason', 'dropReason')],
is_virtual=True)
terface')])
terface')])
is_const=True)
e')],
is_const=True)
visibility='protected', is_virtual=True)
visibility='protected', is_virtual=True)
visibility='private', is_virtual=True)
is_const=True, visibility='private', is_virtual=True)
visibility='private', is_virtual=True)
is_const=True, visibility='private', is_virtual=True)
visibility='private', is_virtual=True)
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv6PmtuCache_methods(root_module, cls):
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
)
return
def register_Ns3Mac48AddressChecker_methods(root_module, cls):
e_methods(root_module, cls):
is_pure_virtual=True, is_const=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
p')],
is_pure_virtual=True, is_const=True, is_virtual=True)
r')],
is_pure_virtual=True, is_const=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
is_static=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
address')],
is_pure_virtual=True, is_virtual=True)
'index')],
is_pure_virtual=True, is_virtual=True)
', 'mtu')],
is_pure_virtual=True, is_virtual=True)
>', 'node')],
is_pure_virtual=True, is_virtual=True)
etDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
is_const=True)
m('uint32_t', 'size')])
rOfBits')])
is_const=True)
_t', 'maxSize')],
is_const=True)
return
def register_Ns3Node_methods(root_module, cls):
[],
is_static=True)
t32_t', 'index')],
is_const=True)
t32_t', 'index')],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_const=True)
[],
is_static=True)
tr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
tr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
[],
visibility='protected', is_virtual=True)
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
tputStreamWrapper_methods(root_module, cls):
t &', 'trailer')])
is_const=True)
],
is_const=True)
am('uint32_t', 'size')],
is_const=True)
am('uint32_t', 'size')],
is_const=True)
am('uint32_t', 'length')],
is_const=True)
is_static=True)
is_static=True)
&', 'tag')],
is_const=True)
is_const=True)
],
is_const=True)
is_const=True)
is_const=True)
is_const=True)
is_const=True)
'header')],
is_const=True)
am('uint32_t', 'size')],
is_const=True)
&', 'tag')],
is_const=True)
r &', 'trailer')])
&', 'os')],
is_const=True)
&', 'os')],
is_const=True)
&', 'os')],
is_const=True)
[])
[])
nt32_t', 'size')])
nt32_t', 'size')])
er &', 'header')])
), param('uint32_t', 'size')])
::Tag &', 'tag')])
r &', 'trailer')])
::Tag &', 'tag')])
'uint32_t', 'maxSize')],
is_const=True)
r >', 'nixVector')])
is_const=True)
return
def register_Ns3QueueItem_methods(root_module, cls):
cls.add_output_stream_operator()
)
is_const=True, is_virtual=True)
_t &', 'value')],
is_const=True, is_virtual=True)
s')],
is_const=True, is_virtual=True)
return
def register_Ns3TimeValue_methods(root_module, cls):
ker')],
is_virtual=True)
is_const=True)
, 'checker')],
is_const=True, is_virtual=True)
nst &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
irtual=True)
is_const=True)
checker')],
is_const=True, is_virtual=True)
&', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
s_const=True)
ker')],
is_const=True, is_virtual=True)
'value')])
return
def register_Ns3CallbackImpl__Bool_Ns3Ptr__lt__ns3Socket__gt___Const_ns3Address___amp___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
| true
| true
|
79065e5eadad7bc0e642e51294c0672d7f8e4553
| 13,148
|
py
|
Python
|
gensim/gensim/corpora/hashdictionary.py
|
Abas-Khan/thesis
|
b733bd4382371203cc4992571890619a2e314047
|
[
"MIT"
] | null | null | null |
gensim/gensim/corpora/hashdictionary.py
|
Abas-Khan/thesis
|
b733bd4382371203cc4992571890619a2e314047
|
[
"MIT"
] | null | null | null |
gensim/gensim/corpora/hashdictionary.py
|
Abas-Khan/thesis
|
b733bd4382371203cc4992571890619a2e314047
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Homer Strong, Radim Rehurek
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""This module implements the "hashing trick" [1]_ -- a mapping between words and their integer ids
using a fixed and static mapping.
Notes
-----
The static mapping has a constant memory footprint, regardless of the number of word-types (features) in your corpus,
so it's suitable for processing extremely large corpora. The ids are computed as `hash(word) % id_range`,
where `hash` is a user-configurable function (`zlib.adler32` by default).
Advantages:
* New words can be represented immediately, without an extra pass through the corpus
to collect all the ids first.
* Can be used with non-repeatable (once-only) streams of documents.
* All tokens will be used (not only that you see in documents), typical problem
for :class:`~gensim.corpora.dictionary.Dictionary`.
Disadvantages:
* Words may map to the same id, causing hash collisions. The word <-> id mapping is no longer a bijection.
References
----------
.. [1] http://en.wikipedia.org/wiki/Hashing-Trick
"""
from __future__ import with_statement
import logging
import itertools
import zlib
from gensim import utils
from six import iteritems, iterkeys
logger = logging.getLogger(__name__)
class HashDictionary(utils.SaveLoad, dict):
"""Encapsulates the mapping between normalized words and their integer ids.
Notes
-----
Unlike :class:`~gensim.corpora.dictionary.Dictionary`,
building a :class:`~gensim.corpora.hashdictionary.HashDictionary` before using it **isn't a necessary step**.
The documents can be computed immediately, from an uninitialized
:class:`~gensim.corpora.hashdictionary.HashDictionary` without seeing the rest of the corpus first.
Examples
--------
>>> from gensim.corpora import HashDictionary
>>>
>>> texts = [['human', 'interface', 'computer']]
>>> dct = HashDictionary(texts)
>>> dct.doc2bow(texts[0])
[(10608, 1), (12466, 1), (31002, 1)]
"""
def __init__(self, documents=None, id_range=32000, myhash=zlib.adler32, debug=True):
"""
Parameters
----------
documents : iterable of iterable of str
Iterable of documents, if given - use them to initialization.
id_range : int, optional
Number of hash-values in table, used as `id = myhash(key) % id_range`.
myhash : function
Hash function, should support interface myhash(str) -> int, used `zlib.adler32` by default.
debug : bool
If True - store raw tokens mapping (as str <-> id).
If you find yourself running out of memory (or not sure that you really need raw tokens), set `debug=False`.
"""
self.myhash = myhash # hash fnc: string->integer
self.id_range = id_range # hash range: id = myhash(key) % id_range
self.debug = debug
# the following (potentially massive!) dictionaries are only formed if `debug` is True
self.token2id = {}
self.id2token = {} # reverse mapping int->set(words)
self.dfs = {} # token_id -> how many documents this token_id appeared in
self.dfs_debug = {} # token_string->how many documents this word appeared in
self.num_docs = 0 # number of documents processed
self.num_pos = 0 # total number of corpus positions
self.num_nnz = 0 # total number of non-zeroes in the BOW matrix
self.allow_update = True
if documents is not None:
self.add_documents(documents)
def __getitem__(self, tokenid):
"""Get all words that have mapped to the given id so far, as a set.
Warnings
--------
Works only if `debug=True`.
Parameters
----------
tokenid : int
Token identifier (result of hashing).
Return
------
set of str
Set of all corresponding words.
"""
return self.id2token.get(tokenid, set())
def restricted_hash(self, token):
"""Calculate id of the given token.
Also keep track of what words were mapped to what ids, for debugging reasons.
Parameters
----------
token : str
Input token.
Return
------
int
Hash value of `token`.
"""
h = self.myhash(utils.to_utf8(token)) % self.id_range
if self.debug:
self.token2id[token] = h
self.id2token.setdefault(h, set()).add(token)
return h
def __len__(self):
"""Get the number of distinct ids = the entire dictionary size."""
return self.id_range
def keys(self):
"""Get a list of all token ids."""
return range(len(self))
def __str__(self):
return "HashDictionary(%i id range)" % len(self)
@staticmethod
def from_documents(*args, **kwargs):
return HashDictionary(*args, **kwargs)
def add_documents(self, documents):
"""Build dictionary from a collection of documents.
Notes
-----
This is only a convenience wrapper for calling `doc2bow` on each document with `allow_update=True`.
Parameters
----------
documents : iterable of list of str
Collection of documents.
Examples
--------
>>> from gensim.corpora import HashDictionary
>>>
>>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]]
>>> dct = HashDictionary(corpus)
>>> "sparta" in dct.token2id
False
>>> dct.add_documents([["this","is","sparta"],["just","joking"]]) # add more documents in dictionary
>>> "sparta" in dct.token2id
True
"""
for docno, document in enumerate(documents):
if docno % 10000 == 0:
logger.info("adding document #%i to %s", docno, self)
self.doc2bow(document, allow_update=True) # ignore the result, here we only care about updating token ids
logger.info(
"built %s from %i documents (total %i corpus positions)",
self, self.num_docs, self.num_pos
)
def doc2bow(self, document, allow_update=False, return_missing=False):
"""Convert `document` into the bag-of-words format, like [(1, 4), (150, 1), (2005, 2)].
Notes
-----
Each word is assumed to be a **tokenized and normalized** utf-8 encoded string. No further preprocessing
is done on the words in `document` (apply tokenization, stemming etc) before calling this method.
If `allow_update` or `self.allow_update` is set, then also update dictionary in the process: update overall
corpus statistics and document frequencies. For each id appearing in this document, increase its document
frequency (`self.dfs`) by one.
Parameters
----------
document : list of str
Is a list of tokens = **tokenized and normalized** strings (either utf8 or unicode).
allow_update : bool, optional
If True - update dictionary in the process.
return_missing : bool, optional
Show token_count for missing words. HAVE NO SENSE FOR THIS CLASS, BECAUSE WE USING HASHING-TRICK.
Return
------
list of (int, int)
Document in Bag-of-words (BoW) format.
list of (int, int), dict
If `return_missing=True`, return document in Bag-of-words (BoW) format + empty dictionary.
Examples
--------
>>> from gensim.corpora import HashDictionary
>>>
>>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]]
>>> dct = HashDictionary(corpus)
>>> dct.doc2bow(["this","is","máma"])
[(1721, 1), (5280, 1), (22493, 1)]
>>> dct.doc2bow(["this","is","máma"], return_missing=True)
([(1721, 1), (5280, 1), (22493, 1)], {})
"""
result = {}
missing = {}
document = sorted(document) # convert the input to plain list (needed below)
for word_norm, group in itertools.groupby(document):
frequency = len(list(group)) # how many times does this word appear in the input document
tokenid = self.restricted_hash(word_norm)
result[tokenid] = result.get(tokenid, 0) + frequency
if self.debug:
# increment document count for each unique token that appeared in the document
self.dfs_debug[word_norm] = self.dfs_debug.get(word_norm, 0) + 1
if allow_update or self.allow_update:
self.num_docs += 1
self.num_pos += len(document)
self.num_nnz += len(result)
if self.debug:
# increment document count for each unique tokenid that appeared in the document
# done here, because several words may map to the same tokenid
for tokenid in iterkeys(result):
self.dfs[tokenid] = self.dfs.get(tokenid, 0) + 1
# return tokenids, in ascending id order
result = sorted(iteritems(result))
if return_missing:
return result, missing
else:
return result
def filter_extremes(self, no_below=5, no_above=0.5, keep_n=100000):
"""Filter tokens in dictionary by frequency.
Parameters
----------
no_below : int, optional
Keep tokens which are contained in at least `no_below` documents.
no_above : float, optional
Keep tokens which are contained in no more than `no_above` documents
(fraction of total corpus size, not an absolute number).
keep_n : int, optional
Keep only the first `keep_n` most frequent tokens.
Notes
-----
For tokens that appear in:
#. Less than `no_below` documents (absolute number) or \n
#. More than `no_above` documents (fraction of total corpus size, **not absolute number**).
#. After (1) and (2), keep only the first `keep_n` most frequent tokens (or keep all if `None`).
Since :class:`~gensim.corpora.hashdictionary.HashDictionary` id range is fixed and doesn't depend on the number
of tokens seen, this doesn't really "remove" anything.
It only clears some supplementary statistics, for easier debugging and a smaller RAM footprint.
Examples
--------
>>> from gensim.corpora import HashDictionary
>>>
>>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]]
>>> dct = HashDictionary(corpus)
>>> dct.filter_extremes(no_below=1, no_above=0.5, keep_n=1)
>>> print dct.token2id
{'maso': 15025}
"""
no_above_abs = int(no_above * self.num_docs) # convert fractional threshold to absolute threshold
ok = [item for item in iteritems(self.dfs_debug) if no_below <= item[1] <= no_above_abs]
ok = frozenset(word for word, freq in sorted(ok, key=lambda x: -x[1])[:keep_n])
self.dfs_debug = {word: freq for word, freq in iteritems(self.dfs_debug) if word in ok}
self.token2id = {token: tokenid for token, tokenid in iteritems(self.token2id) if token in self.dfs_debug}
self.id2token = {
tokenid: {token for token in tokens if token in self.dfs_debug}
for tokenid, tokens in iteritems(self.id2token)
}
self.dfs = {tokenid: freq for tokenid, freq in iteritems(self.dfs) if self.id2token.get(tokenid, set())}
# for word->document frequency
logger.info(
"kept statistics for which were in no less than %i and no more than %i (=%.1f%%) documents",
no_below, no_above_abs, 100.0 * no_above
)
def save_as_text(self, fname):
"""Save this HashDictionary to a text file.
Parameters
----------
fname : str
Path to output file.
Notes
-----
The format is:
`id[TAB]document frequency of this id[TAB]tab-separated set of words in UTF8 that map to this id[NEWLINE]`.
Examples
--------
>>> from gensim.corpora import HashDictionary
>>> from gensim.test.utils import get_tmpfile
>>>
>>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]]
>>> data = HashDictionary(corpus)
>>> data.save_as_text(get_tmpfile("dictionary_in_text_format"))
"""
logger.info("saving HashDictionary mapping to %s" % fname)
with utils.smart_open(fname, 'wb') as fout:
for tokenid in self.keys():
words = sorted(self[tokenid])
if words:
words_df = [(word, self.dfs_debug.get(word, 0)) for word in words]
words_df = ["%s(%i)" % item for item in sorted(words_df, key=lambda x: -x[1])]
words_df = '\t'.join(words_df)
fout.write(utils.to_utf8("%i\t%i\t%s\n" % (tokenid, self.dfs.get(tokenid, 0), words_df)))
| 37.673352
| 120
| 0.604198
|
from __future__ import with_statement
import logging
import itertools
import zlib
from gensim import utils
from six import iteritems, iterkeys
logger = logging.getLogger(__name__)
class HashDictionary(utils.SaveLoad, dict):
def __init__(self, documents=None, id_range=32000, myhash=zlib.adler32, debug=True):
self.myhash = myhash
self.id_range = id_range
self.debug = debug
self.token2id = {}
self.id2token = {}
self.dfs = {}
self.dfs_debug = {}
self.num_docs = 0
self.num_pos = 0
self.num_nnz = 0
self.allow_update = True
if documents is not None:
self.add_documents(documents)
def __getitem__(self, tokenid):
return self.id2token.get(tokenid, set())
def restricted_hash(self, token):
h = self.myhash(utils.to_utf8(token)) % self.id_range
if self.debug:
self.token2id[token] = h
self.id2token.setdefault(h, set()).add(token)
return h
def __len__(self):
return self.id_range
def keys(self):
return range(len(self))
def __str__(self):
return "HashDictionary(%i id range)" % len(self)
@staticmethod
def from_documents(*args, **kwargs):
return HashDictionary(*args, **kwargs)
def add_documents(self, documents):
for docno, document in enumerate(documents):
if docno % 10000 == 0:
logger.info("adding document #%i to %s", docno, self)
self.doc2bow(document, allow_update=True)
logger.info(
"built %s from %i documents (total %i corpus positions)",
self, self.num_docs, self.num_pos
)
def doc2bow(self, document, allow_update=False, return_missing=False):
result = {}
missing = {}
document = sorted(document)
for word_norm, group in itertools.groupby(document):
frequency = len(list(group))
tokenid = self.restricted_hash(word_norm)
result[tokenid] = result.get(tokenid, 0) + frequency
if self.debug:
self.dfs_debug[word_norm] = self.dfs_debug.get(word_norm, 0) + 1
if allow_update or self.allow_update:
self.num_docs += 1
self.num_pos += len(document)
self.num_nnz += len(result)
if self.debug:
for tokenid in iterkeys(result):
self.dfs[tokenid] = self.dfs.get(tokenid, 0) + 1
result = sorted(iteritems(result))
if return_missing:
return result, missing
else:
return result
def filter_extremes(self, no_below=5, no_above=0.5, keep_n=100000):
no_above_abs = int(no_above * self.num_docs)
ok = [item for item in iteritems(self.dfs_debug) if no_below <= item[1] <= no_above_abs]
ok = frozenset(word for word, freq in sorted(ok, key=lambda x: -x[1])[:keep_n])
self.dfs_debug = {word: freq for word, freq in iteritems(self.dfs_debug) if word in ok}
self.token2id = {token: tokenid for token, tokenid in iteritems(self.token2id) if token in self.dfs_debug}
self.id2token = {
tokenid: {token for token in tokens if token in self.dfs_debug}
for tokenid, tokens in iteritems(self.id2token)
}
self.dfs = {tokenid: freq for tokenid, freq in iteritems(self.dfs) if self.id2token.get(tokenid, set())}
logger.info(
"kept statistics for which were in no less than %i and no more than %i (=%.1f%%) documents",
no_below, no_above_abs, 100.0 * no_above
)
def save_as_text(self, fname):
logger.info("saving HashDictionary mapping to %s" % fname)
with utils.smart_open(fname, 'wb') as fout:
for tokenid in self.keys():
words = sorted(self[tokenid])
if words:
words_df = [(word, self.dfs_debug.get(word, 0)) for word in words]
words_df = ["%s(%i)" % item for item in sorted(words_df, key=lambda x: -x[1])]
words_df = '\t'.join(words_df)
fout.write(utils.to_utf8("%i\t%i\t%s\n" % (tokenid, self.dfs.get(tokenid, 0), words_df)))
| true
| true
|
79065f7ba534a9c1105db44f908ee2d05be5fa9f
| 2,957
|
py
|
Python
|
homeassistant/components/statsd/__init__.py
|
itewk/home-assistant
|
769cf19052f8c9ef374d8ba8ae7705ccc7bf4cf4
|
[
"Apache-2.0"
] | 23
|
2017-11-15T21:03:53.000Z
|
2021-03-29T21:33:48.000Z
|
homeassistant/components/statsd/__init__.py
|
itewk/home-assistant
|
769cf19052f8c9ef374d8ba8ae7705ccc7bf4cf4
|
[
"Apache-2.0"
] | 9
|
2022-01-27T06:32:10.000Z
|
2022-03-31T07:07:51.000Z
|
homeassistant/components/statsd/__init__.py
|
itewk/home-assistant
|
769cf19052f8c9ef374d8ba8ae7705ccc7bf4cf4
|
[
"Apache-2.0"
] | 10
|
2018-01-01T00:12:51.000Z
|
2021-12-21T23:08:05.000Z
|
"""Support for sending data to StatsD."""
import logging
import statsd
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_PREFIX, EVENT_STATE_CHANGED
from homeassistant.helpers import state as state_helper
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_ATTR = "log_attributes"
CONF_RATE = "rate"
CONF_VALUE_MAP = "value_mapping"
DEFAULT_HOST = "localhost"
DEFAULT_PORT = 8125
DEFAULT_PREFIX = "hass"
DEFAULT_RATE = 1
DOMAIN = "statsd"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_ATTR, default=False): cv.boolean,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PREFIX, default=DEFAULT_PREFIX): cv.string,
vol.Optional(CONF_RATE, default=DEFAULT_RATE): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
vol.Optional(CONF_VALUE_MAP): dict,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the StatsD component."""
conf = config[DOMAIN]
host = conf.get(CONF_HOST)
port = conf.get(CONF_PORT)
sample_rate = conf.get(CONF_RATE)
prefix = conf.get(CONF_PREFIX)
value_mapping = conf.get(CONF_VALUE_MAP)
show_attribute_flag = conf.get(CONF_ATTR)
statsd_client = statsd.StatsClient(host=host, port=port, prefix=prefix)
def statsd_event_listener(event):
"""Listen for new messages on the bus and sends them to StatsD."""
state = event.data.get("new_state")
if state is None:
return
try:
if value_mapping and state.state in value_mapping:
_state = float(value_mapping[state.state])
else:
_state = state_helper.state_as_number(state)
except ValueError:
# Set the state to none and continue for any numeric attributes.
_state = None
states = dict(state.attributes)
_LOGGER.debug("Sending %s", state.entity_id)
if show_attribute_flag is True:
if isinstance(_state, (float, int)):
statsd_client.gauge("%s.state" % state.entity_id, _state, sample_rate)
# Send attribute values
for key, value in states.items():
if isinstance(value, (float, int)):
stat = "%s.%s" % (state.entity_id, key.replace(" ", "_"))
statsd_client.gauge(stat, value, sample_rate)
else:
if isinstance(_state, (float, int)):
statsd_client.gauge(state.entity_id, _state, sample_rate)
# Increment the count
statsd_client.incr(state.entity_id, rate=sample_rate)
hass.bus.listen(EVENT_STATE_CHANGED, statsd_event_listener)
return True
| 31.126316
| 86
| 0.630369
|
import logging
import statsd
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_PREFIX, EVENT_STATE_CHANGED
from homeassistant.helpers import state as state_helper
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_ATTR = "log_attributes"
CONF_RATE = "rate"
CONF_VALUE_MAP = "value_mapping"
DEFAULT_HOST = "localhost"
DEFAULT_PORT = 8125
DEFAULT_PREFIX = "hass"
DEFAULT_RATE = 1
DOMAIN = "statsd"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_ATTR, default=False): cv.boolean,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PREFIX, default=DEFAULT_PREFIX): cv.string,
vol.Optional(CONF_RATE, default=DEFAULT_RATE): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
vol.Optional(CONF_VALUE_MAP): dict,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
conf = config[DOMAIN]
host = conf.get(CONF_HOST)
port = conf.get(CONF_PORT)
sample_rate = conf.get(CONF_RATE)
prefix = conf.get(CONF_PREFIX)
value_mapping = conf.get(CONF_VALUE_MAP)
show_attribute_flag = conf.get(CONF_ATTR)
statsd_client = statsd.StatsClient(host=host, port=port, prefix=prefix)
def statsd_event_listener(event):
state = event.data.get("new_state")
if state is None:
return
try:
if value_mapping and state.state in value_mapping:
_state = float(value_mapping[state.state])
else:
_state = state_helper.state_as_number(state)
except ValueError:
_state = None
states = dict(state.attributes)
_LOGGER.debug("Sending %s", state.entity_id)
if show_attribute_flag is True:
if isinstance(_state, (float, int)):
statsd_client.gauge("%s.state" % state.entity_id, _state, sample_rate)
for key, value in states.items():
if isinstance(value, (float, int)):
stat = "%s.%s" % (state.entity_id, key.replace(" ", "_"))
statsd_client.gauge(stat, value, sample_rate)
else:
if isinstance(_state, (float, int)):
statsd_client.gauge(state.entity_id, _state, sample_rate)
statsd_client.incr(state.entity_id, rate=sample_rate)
hass.bus.listen(EVENT_STATE_CHANGED, statsd_event_listener)
return True
| true
| true
|
7906604bab7a983563a8a3143f954fce57d6797d
| 557
|
py
|
Python
|
examples_old/run_executable_redis/basic_wait.py
|
shepilov-vladislav/aiotasks
|
be888895b6509d062ee4e1564b7eec05477f5a7d
|
[
"BSD-3-Clause"
] | 462
|
2017-03-28T08:01:28.000Z
|
2022-03-17T19:13:55.000Z
|
examples_old/run_executable_redis/basic_wait.py
|
shepilov-vladislav/aiotasks
|
be888895b6509d062ee4e1564b7eec05477f5a7d
|
[
"BSD-3-Clause"
] | 12
|
2017-04-02T10:20:57.000Z
|
2020-10-06T12:18:55.000Z
|
examples_old/run_executable_redis/basic_wait.py
|
shepilov-vladislav/aiotasks
|
be888895b6509d062ee4e1564b7eec05477f5a7d
|
[
"BSD-3-Clause"
] | 51
|
2017-03-31T17:43:34.000Z
|
2022-03-31T13:42:35.000Z
|
import asyncio
from aiotasks import build_manager
loop = asyncio.get_event_loop()
loop.set_debug(True)
manager = build_manager(loop=loop)
@manager.task()
async def task_01(num):
print("Task 01 starting: {}".format(num))
await asyncio.sleep(2, loop=loop)
print("Task 01 stopping")
return "a"
async def main_async():
manager.run()
async with task_01.wait(1) as f:
print(f)
await manager.wait(5)
manager.stop()
if __name__ == '__main__':
loop.run_until_complete(main_async())
| 15.472222
| 45
| 0.644524
|
import asyncio
from aiotasks import build_manager
loop = asyncio.get_event_loop()
loop.set_debug(True)
manager = build_manager(loop=loop)
@manager.task()
async def task_01(num):
print("Task 01 starting: {}".format(num))
await asyncio.sleep(2, loop=loop)
print("Task 01 stopping")
return "a"
async def main_async():
manager.run()
async with task_01.wait(1) as f:
print(f)
await manager.wait(5)
manager.stop()
if __name__ == '__main__':
loop.run_until_complete(main_async())
| true
| true
|
79066068c67dd1791d3f4736391e42f5f8a64968
| 29,153
|
py
|
Python
|
kornia/geometry/conversions.py
|
lferraz/kornia
|
c30ef6149bd92054d482339a2b0cd18f8272f5f5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
kornia/geometry/conversions.py
|
lferraz/kornia
|
c30ef6149bd92054d482339a2b0cd18f8272f5f5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
kornia/geometry/conversions.py
|
lferraz/kornia
|
c30ef6149bd92054d482339a2b0cd18f8272f5f5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
from typing import Tuple
import torch
import torch.nn as nn
import torch.nn.functional as F
from kornia.constants import pi
__all__ = [
# functional api
"rad2deg",
"deg2rad",
"pol2cart",
"cart2pol",
"convert_points_from_homogeneous",
"convert_points_to_homogeneous",
"convert_affinematrix_to_homography",
"convert_affinematrix_to_homography3d",
"angle_axis_to_rotation_matrix",
"angle_axis_to_quaternion",
"rotation_matrix_to_angle_axis",
"rotation_matrix_to_quaternion",
"quaternion_to_angle_axis",
"quaternion_to_rotation_matrix",
"quaternion_log_to_exp",
"quaternion_exp_to_log",
"denormalize_pixel_coordinates",
"normalize_pixel_coordinates",
"normalize_quaternion",
"denormalize_pixel_coordinates3d",
"normalize_pixel_coordinates3d",
]
def rad2deg(tensor: torch.Tensor) -> torch.Tensor:
r"""Function that converts angles from radians to degrees.
Args:
tensor (torch.Tensor): Tensor of arbitrary shape.
Returns:
torch.Tensor: Tensor with same shape as input.
Example:
>>> input = torch.tensor(3.1415926535) * torch.rand(1, 3, 3)
>>> output = rad2deg(input)
"""
if not isinstance(tensor, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(tensor)))
return 180. * tensor / pi.to(tensor.device).type(tensor.dtype)
def deg2rad(tensor: torch.Tensor) -> torch.Tensor:
r"""Function that converts angles from degrees to radians.
Args:
tensor (torch.Tensor): Tensor of arbitrary shape.
Returns:
torch.Tensor: tensor with same shape as input.
Examples::
>>> input = 360. * torch.rand(1, 3, 3)
>>> output = deg2rad(input)
"""
if not isinstance(tensor, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(tensor)))
return tensor * pi.to(tensor.device).type(tensor.dtype) / 180.
def pol2cart(rho: torch.Tensor, phi: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:
r"""Function that converts polar coordinates to cartesian coordinates.
Args:
rho (torch.Tensor): Tensor of arbitrary shape.
phi (torch.Tensor): Tensor of same arbitrary shape.
Returns:
torch.Tensor, torch.Tensor: Tensor with same shape as input.
Example:
>>> rho = torch.rand(1, 3, 3)
>>> phi = torch.rand(1, 3, 3)
>>> x, y = pol2cart(rho, phi)
"""
if not (isinstance(rho, torch.Tensor) & isinstance(phi, torch.Tensor)):
raise TypeError("Input type is not a torch.Tensor. Got {}, {}".format(
type(rho), type(phi)))
x = rho * torch.cos(phi)
y = rho * torch.sin(phi)
return x, y
def cart2pol(x: torch.Tensor, y: torch.Tensor, eps: float = 1e-8) -> Tuple[torch.Tensor, torch.Tensor]:
"""Function that converts cartesian coordinates to polar coordinates.
Args:
rho (torch.Tensor): Tensor of arbitrary shape.
phi (torch.Tensor): Tensor of same arbitrary shape.
eps (float): To avoid division by zero. Default is 1e-8
Returns:
torch.Tensor, torch.Tensor: Tensor with same shape as input.
Example:
>>> x = torch.rand(1, 3, 3)
>>> y = torch.rand(1, 3, 3)
>>> rho, phi = cart2pol(x, y)
"""
if not (isinstance(x, torch.Tensor) & isinstance(y, torch.Tensor)):
raise TypeError("Input type is not a torch.Tensor. Got {}, {}".format(
type(x), type(y)))
rho = torch.sqrt(x**2 + y**2 + eps)
phi = torch.atan2(y, x)
return rho, phi
def convert_points_from_homogeneous(
points: torch.Tensor, eps: float = 1e-8) -> torch.Tensor:
r"""Function that converts points from homogeneous to Euclidean space.
Examples::
>>> input = torch.rand(2, 4, 3) # BxNx3
>>> output = convert_points_from_homogeneous(input) # BxNx2
"""
if not isinstance(points, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(points)))
if len(points.shape) < 2:
raise ValueError("Input must be at least a 2D tensor. Got {}".format(
points.shape))
# we check for points at infinity
z_vec: torch.Tensor = points[..., -1:]
# set the results of division by zeror/near-zero to 1.0
# follow the convention of opencv:
# https://github.com/opencv/opencv/pull/14411/files
mask: torch.Tensor = torch.abs(z_vec) > eps
scale: torch.Tensor = torch.ones_like(z_vec).masked_scatter_(
mask, torch.tensor(1.0).to(points.device) / z_vec[mask])
return scale * points[..., :-1]
def convert_points_to_homogeneous(points: torch.Tensor) -> torch.Tensor:
r"""Function that converts points from Euclidean to homogeneous space.
Examples::
>>> input = torch.rand(2, 4, 3) # BxNx3
>>> output = convert_points_to_homogeneous(input) # BxNx4
"""
if not isinstance(points, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(points)))
if len(points.shape) < 2:
raise ValueError("Input must be at least a 2D tensor. Got {}".format(
points.shape))
return torch.nn.functional.pad(points, [0, 1], "constant", 1.0)
def _convert_affinematrix_to_homography_impl(A: torch.Tensor) -> torch.Tensor:
H: torch.Tensor = torch.nn.functional.pad(A, [0, 0, 0, 1], "constant", value=0.)
H[..., -1, -1] += 1.0
return H
def convert_affinematrix_to_homography(A: torch.Tensor) -> torch.Tensor:
r"""Function that converts batch of affine matrices from [Bx2x3] to [Bx3x3].
Examples::
>>> input = torch.rand(2, 2, 3) # Bx2x3
>>> output = convert_affinematrix_to_homography(input) # Bx3x3
"""
if not isinstance(A, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(A)))
if not (len(A.shape) == 3 and A.shape[-2:] == (2, 3)):
raise ValueError("Input matrix must be a Bx2x3 tensor. Got {}"
.format(A.shape))
return _convert_affinematrix_to_homography_impl(A)
def convert_affinematrix_to_homography3d(A: torch.Tensor) -> torch.Tensor:
r"""Function that converts batch of affine matrices from [Bx3x4] to [Bx4x4].
Examples::
>>> input = torch.rand(2, 3, 4) # Bx3x4
>>> output = convert_affinematrix_to_homography3d(input) # Bx4x4
"""
if not isinstance(A, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(A)))
if not (len(A.shape) == 3 and A.shape[-2:] == (3, 4)):
raise ValueError("Input matrix must be a Bx3x4 tensor. Got {}"
.format(A.shape))
return _convert_affinematrix_to_homography_impl(A)
def angle_axis_to_rotation_matrix(angle_axis: torch.Tensor) -> torch.Tensor:
r"""Convert 3d vector of axis-angle rotation to 3x3 rotation matrix
Args:
angle_axis (torch.Tensor): tensor of 3d vector of axis-angle rotations.
Returns:
torch.Tensor: tensor of 3x3 rotation matrices.
Shape:
- Input: :math:`(N, 3)`
- Output: :math:`(N, 3, 3)`
Example:
>>> input = torch.rand(1, 3) # Nx3
>>> output = angle_axis_to_rotation_matrix(input) # Nx3x3
"""
if not isinstance(angle_axis, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(angle_axis)))
if not angle_axis.shape[-1] == 3:
raise ValueError(
"Input size must be a (*, 3) tensor. Got {}".format(
angle_axis.shape))
def _compute_rotation_matrix(angle_axis, theta2, eps=1e-6):
# We want to be careful to only evaluate the square root if the
# norm of the angle_axis vector is greater than zero. Otherwise
# we get a division by zero.
k_one = 1.0
theta = torch.sqrt(theta2)
wxyz = angle_axis / (theta + eps)
wx, wy, wz = torch.chunk(wxyz, 3, dim=1)
cos_theta = torch.cos(theta)
sin_theta = torch.sin(theta)
r00 = cos_theta + wx * wx * (k_one - cos_theta)
r10 = wz * sin_theta + wx * wy * (k_one - cos_theta)
r20 = -wy * sin_theta + wx * wz * (k_one - cos_theta)
r01 = wx * wy * (k_one - cos_theta) - wz * sin_theta
r11 = cos_theta + wy * wy * (k_one - cos_theta)
r21 = wx * sin_theta + wy * wz * (k_one - cos_theta)
r02 = wy * sin_theta + wx * wz * (k_one - cos_theta)
r12 = -wx * sin_theta + wy * wz * (k_one - cos_theta)
r22 = cos_theta + wz * wz * (k_one - cos_theta)
rotation_matrix = torch.cat(
[r00, r01, r02, r10, r11, r12, r20, r21, r22], dim=1)
return rotation_matrix.view(-1, 3, 3)
def _compute_rotation_matrix_taylor(angle_axis):
rx, ry, rz = torch.chunk(angle_axis, 3, dim=1)
k_one = torch.ones_like(rx)
rotation_matrix = torch.cat(
[k_one, -rz, ry, rz, k_one, -rx, -ry, rx, k_one], dim=1)
return rotation_matrix.view(-1, 3, 3)
# stolen from ceres/rotation.h
_angle_axis = torch.unsqueeze(angle_axis, dim=1)
theta2 = torch.matmul(_angle_axis, _angle_axis.transpose(1, 2))
theta2 = torch.squeeze(theta2, dim=1)
# compute rotation matrices
rotation_matrix_normal = _compute_rotation_matrix(angle_axis, theta2)
rotation_matrix_taylor = _compute_rotation_matrix_taylor(angle_axis)
# create mask to handle both cases
eps = 1e-6
mask = (theta2 > eps).view(-1, 1, 1).to(theta2.device)
mask_pos = (mask).type_as(theta2)
mask_neg = (mask == False).type_as(theta2) # noqa
# create output pose matrix
batch_size = angle_axis.shape[0]
rotation_matrix = torch.eye(3).to(angle_axis.device).type_as(angle_axis)
rotation_matrix = rotation_matrix.view(1, 3, 3).repeat(batch_size, 1, 1)
# fill output matrix with masked values
rotation_matrix[..., :3, :3] = \
mask_pos * rotation_matrix_normal + mask_neg * rotation_matrix_taylor
return rotation_matrix # Nx3x3
def rotation_matrix_to_angle_axis(
rotation_matrix: torch.Tensor) -> torch.Tensor:
r"""Convert 3x3 rotation matrix to Rodrigues vector.
Args:
rotation_matrix (torch.Tensor): rotation matrix.
Returns:
torch.Tensor: Rodrigues vector transformation.
Shape:
- Input: :math:`(N, 3, 3)`
- Output: :math:`(N, 3)`
Example:
>>> input = torch.rand(2, 3, 3) # Nx3x3
>>> output = rotation_matrix_to_angle_axis(input) # Nx3
"""
if not isinstance(rotation_matrix, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(rotation_matrix)))
if not rotation_matrix.shape[-2:] == (3, 3):
raise ValueError(
"Input size must be a (*, 3, 3) tensor. Got {}".format(
rotation_matrix.shape))
quaternion: torch.Tensor = rotation_matrix_to_quaternion(rotation_matrix)
return quaternion_to_angle_axis(quaternion)
def rotation_matrix_to_quaternion(
rotation_matrix: torch.Tensor,
eps: float = 1e-8) -> torch.Tensor:
r"""Convert 3x3 rotation matrix to 4d quaternion vector.
The quaternion vector has components in (x, y, z, w) format.
Args:
rotation_matrix (torch.Tensor): the rotation matrix to convert.
eps (float): small value to avoid zero division. Default: 1e-8.
Return:
torch.Tensor: the rotation in quaternion.
Shape:
- Input: :math:`(*, 3, 3)`
- Output: :math:`(*, 4)`
Example:
>>> input = torch.rand(4, 3, 3) # Nx3x3
>>> output = rotation_matrix_to_quaternion(input) # Nx4
"""
if not isinstance(rotation_matrix, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(rotation_matrix)))
if not rotation_matrix.shape[-2:] == (3, 3):
raise ValueError(
"Input size must be a (*, 3, 3) tensor. Got {}".format(
rotation_matrix.shape))
def safe_zero_division(numerator: torch.Tensor,
denominator: torch.Tensor) -> torch.Tensor:
eps: float = torch.finfo(numerator.dtype).tiny # type: ignore
return numerator / torch.clamp(denominator, min=eps)
rotation_matrix_vec: torch.Tensor = rotation_matrix.view(
*rotation_matrix.shape[:-2], 9)
m00, m01, m02, m10, m11, m12, m20, m21, m22 = torch.chunk(
rotation_matrix_vec, chunks=9, dim=-1)
trace: torch.Tensor = m00 + m11 + m22
def trace_positive_cond():
sq = torch.sqrt(trace + 1.0) * 2. # sq = 4 * qw.
qw = 0.25 * sq
qx = safe_zero_division(m21 - m12, sq)
qy = safe_zero_division(m02 - m20, sq)
qz = safe_zero_division(m10 - m01, sq)
return torch.cat([qx, qy, qz, qw], dim=-1)
def cond_1():
sq = torch.sqrt(1.0 + m00 - m11 - m22 + eps) * 2. # sq = 4 * qx.
qw = safe_zero_division(m21 - m12, sq)
qx = 0.25 * sq
qy = safe_zero_division(m01 + m10, sq)
qz = safe_zero_division(m02 + m20, sq)
return torch.cat([qx, qy, qz, qw], dim=-1)
def cond_2():
sq = torch.sqrt(1.0 + m11 - m00 - m22 + eps) * 2. # sq = 4 * qy.
qw = safe_zero_division(m02 - m20, sq)
qx = safe_zero_division(m01 + m10, sq)
qy = 0.25 * sq
qz = safe_zero_division(m12 + m21, sq)
return torch.cat([qx, qy, qz, qw], dim=-1)
def cond_3():
sq = torch.sqrt(1.0 + m22 - m00 - m11 + eps) * 2. # sq = 4 * qz.
qw = safe_zero_division(m10 - m01, sq)
qx = safe_zero_division(m02 + m20, sq)
qy = safe_zero_division(m12 + m21, sq)
qz = 0.25 * sq
return torch.cat([qx, qy, qz, qw], dim=-1)
where_2 = torch.where(m11 > m22, cond_2(), cond_3())
where_1 = torch.where(
(m00 > m11) & (m00 > m22), cond_1(), where_2)
quaternion: torch.Tensor = torch.where(
trace > 0., trace_positive_cond(), where_1)
return quaternion
def normalize_quaternion(quaternion: torch.Tensor,
eps: float = 1e-12) -> torch.Tensor:
r"""Normalizes a quaternion.
The quaternion should be in (x, y, z, w) format.
Args:
quaternion (torch.Tensor): a tensor containing a quaternion to be
normalized. The tensor can be of shape :math:`(*, 4)`.
eps (Optional[bool]): small value to avoid division by zero.
Default: 1e-12.
Return:
torch.Tensor: the normalized quaternion of shape :math:`(*, 4)`.
Example:
>>> quaternion = torch.tensor([1., 0., 1., 0.])
>>> normalize_quaternion(quaternion)
tensor([0.7071, 0.0000, 0.7071, 0.0000])
"""
if not isinstance(quaternion, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(quaternion)))
if not quaternion.shape[-1] == 4:
raise ValueError(
"Input must be a tensor of shape (*, 4). Got {}".format(
quaternion.shape))
return F.normalize(quaternion, p=2, dim=-1, eps=eps)
# based on:
# https://github.com/matthew-brett/transforms3d/blob/8965c48401d9e8e66b6a8c37c65f2fc200a076fa/transforms3d/quaternions.py#L101
# https://github.com/tensorflow/graphics/blob/master/tensorflow_graphics/geometry/transformation/rotation_matrix_3d.py#L247
def quaternion_to_rotation_matrix(quaternion: torch.Tensor) -> torch.Tensor:
r"""Converts a quaternion to a rotation matrix.
The quaternion should be in (x, y, z, w) format.
Args:
quaternion (torch.Tensor): a tensor containing a quaternion to be
converted. The tensor can be of shape :math:`(*, 4)`.
Return:
torch.Tensor: the rotation matrix of shape :math:`(*, 3, 3)`.
Example:
>>> quaternion = torch.tensor([0., 0., 1., 0.])
>>> quaternion_to_rotation_matrix(quaternion)
tensor([[-1., 0., 0.],
[ 0., -1., 0.],
[ 0., 0., 1.]])
"""
if not isinstance(quaternion, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(quaternion)))
if not quaternion.shape[-1] == 4:
raise ValueError(
"Input must be a tensor of shape (*, 4). Got {}".format(
quaternion.shape))
# normalize the input quaternion
quaternion_norm: torch.Tensor = normalize_quaternion(quaternion)
# unpack the normalized quaternion components
x, y, z, w = torch.chunk(quaternion_norm, chunks=4, dim=-1)
# compute the actual conversion
tx: torch.Tensor = 2.0 * x
ty: torch.Tensor = 2.0 * y
tz: torch.Tensor = 2.0 * z
twx: torch.Tensor = tx * w
twy: torch.Tensor = ty * w
twz: torch.Tensor = tz * w
txx: torch.Tensor = tx * x
txy: torch.Tensor = ty * x
txz: torch.Tensor = tz * x
tyy: torch.Tensor = ty * y
tyz: torch.Tensor = tz * y
tzz: torch.Tensor = tz * z
one: torch.Tensor = torch.tensor(1.)
matrix: torch.Tensor = torch.stack([
one - (tyy + tzz), txy - twz, txz + twy,
txy + twz, one - (txx + tzz), tyz - twx,
txz - twy, tyz + twx, one - (txx + tyy)
], dim=-1).view(-1, 3, 3)
if len(quaternion.shape) == 1:
matrix = torch.squeeze(matrix, dim=0)
return matrix
def quaternion_to_angle_axis(quaternion: torch.Tensor) -> torch.Tensor:
"""Convert quaternion vector to angle axis of rotation.
The quaternion should be in (x, y, z, w) format.
Adapted from ceres C++ library: ceres-solver/include/ceres/rotation.h
Args:
quaternion (torch.Tensor): tensor with quaternions.
Return:
torch.Tensor: tensor with angle axis of rotation.
Shape:
- Input: :math:`(*, 4)` where `*` means, any number of dimensions
- Output: :math:`(*, 3)`
Example:
>>> quaternion = torch.rand(2, 4) # Nx4
>>> angle_axis = quaternion_to_angle_axis(quaternion) # Nx3
"""
if not torch.is_tensor(quaternion):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(quaternion)))
if not quaternion.shape[-1] == 4:
raise ValueError(
"Input must be a tensor of shape Nx4 or 4. Got {}".format(
quaternion.shape))
# unpack input and compute conversion
q1: torch.Tensor = quaternion[..., 1]
q2: torch.Tensor = quaternion[..., 2]
q3: torch.Tensor = quaternion[..., 3]
sin_squared_theta: torch.Tensor = q1 * q1 + q2 * q2 + q3 * q3
sin_theta: torch.Tensor = torch.sqrt(sin_squared_theta)
cos_theta: torch.Tensor = quaternion[..., 0]
two_theta: torch.Tensor = 2.0 * torch.where(
cos_theta < 0.0, torch.atan2(-sin_theta, -cos_theta),
torch.atan2(sin_theta, cos_theta))
k_pos: torch.Tensor = two_theta / sin_theta
k_neg: torch.Tensor = 2.0 * torch.ones_like(sin_theta)
k: torch.Tensor = torch.where(sin_squared_theta > 0.0, k_pos, k_neg)
angle_axis: torch.Tensor = torch.zeros_like(quaternion)[..., :3]
angle_axis[..., 0] += q1 * k
angle_axis[..., 1] += q2 * k
angle_axis[..., 2] += q3 * k
return angle_axis
def quaternion_log_to_exp(quaternion: torch.Tensor,
eps: float = 1e-8) -> torch.Tensor:
r"""Applies exponential map to log quaternion.
The quaternion should be in (x, y, z, w) format.
Args:
quaternion (torch.Tensor): a tensor containing a quaternion to be
converted. The tensor can be of shape :math:`(*, 3)`.
Return:
torch.Tensor: the quaternion exponential map of shape :math:`(*, 4)`.
Example:
>>> quaternion = torch.tensor([0., 0., 0.])
>>> quaternion_log_to_exp(quaternion)
tensor([0., 0., 0., 1.])
"""
if not isinstance(quaternion, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(quaternion)))
if not quaternion.shape[-1] == 3:
raise ValueError(
"Input must be a tensor of shape (*, 3). Got {}".format(
quaternion.shape))
# compute quaternion norm
norm_q: torch.Tensor = torch.norm(
quaternion, p=2, dim=-1, keepdim=True).clamp(min=eps)
# compute scalar and vector
quaternion_vector: torch.Tensor = quaternion * torch.sin(norm_q) / norm_q
quaternion_scalar: torch.Tensor = torch.cos(norm_q)
# compose quaternion and return
quaternion_exp: torch.Tensor = torch.cat(
[quaternion_vector, quaternion_scalar], dim=-1)
return quaternion_exp
def quaternion_exp_to_log(quaternion: torch.Tensor,
eps: float = 1e-8) -> torch.Tensor:
r"""Applies the log map to a quaternion.
The quaternion should be in (x, y, z, w) format.
Args:
quaternion (torch.Tensor): a tensor containing a quaternion to be
converted. The tensor can be of shape :math:`(*, 4)`.
Return:
torch.Tensor: the quaternion log map of shape :math:`(*, 3)`.
Example:
>>> quaternion = torch.tensor([0., 0., 0., 1.])
>>> quaternion_exp_to_log(quaternion)
tensor([0., 0., 0.])
"""
if not isinstance(quaternion, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(quaternion)))
if not quaternion.shape[-1] == 4:
raise ValueError(
"Input must be a tensor of shape (*, 4). Got {}".format(
quaternion.shape))
# unpack quaternion vector and scalar
quaternion_vector: torch.Tensor = quaternion[..., 0:3]
quaternion_scalar: torch.Tensor = quaternion[..., 3:4]
# compute quaternion norm
norm_q: torch.Tensor = torch.norm(
quaternion_vector, p=2, dim=-1, keepdim=True).clamp(min=eps)
# apply log map
quaternion_log: torch.Tensor = quaternion_vector * torch.acos(
torch.clamp(quaternion_scalar, min=-1.0, max=1.0)) / norm_q
return quaternion_log
# based on:
# https://github.com/facebookresearch/QuaterNet/blob/master/common/quaternion.py#L138
def angle_axis_to_quaternion(angle_axis: torch.Tensor) -> torch.Tensor:
r"""Convert an angle axis to a quaternion.
The quaternion vector has components in (x, y, z, w) format.
Adapted from ceres C++ library: ceres-solver/include/ceres/rotation.h
Args:
angle_axis (torch.Tensor): tensor with angle axis.
Return:
torch.Tensor: tensor with quaternion.
Shape:
- Input: :math:`(*, 3)` where `*` means, any number of dimensions
- Output: :math:`(*, 4)`
Example:
>>> angle_axis = torch.rand(2, 3) # Nx3
>>> quaternion = angle_axis_to_quaternion(angle_axis) # Nx4
"""
if not torch.is_tensor(angle_axis):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(angle_axis)))
if not angle_axis.shape[-1] == 3:
raise ValueError(
"Input must be a tensor of shape Nx3 or 3. Got {}".format(
angle_axis.shape))
# unpack input and compute conversion
a0: torch.Tensor = angle_axis[..., 0:1]
a1: torch.Tensor = angle_axis[..., 1:2]
a2: torch.Tensor = angle_axis[..., 2:3]
theta_squared: torch.Tensor = a0 * a0 + a1 * a1 + a2 * a2
theta: torch.Tensor = torch.sqrt(theta_squared)
half_theta: torch.Tensor = theta * 0.5
mask: torch.Tensor = theta_squared > 0.0
ones: torch.Tensor = torch.ones_like(half_theta)
k_neg: torch.Tensor = 0.5 * ones
k_pos: torch.Tensor = torch.sin(half_theta) / theta
k: torch.Tensor = torch.where(mask, k_pos, k_neg)
w: torch.Tensor = torch.where(mask, torch.cos(half_theta), ones)
quaternion: torch.Tensor = torch.zeros_like(angle_axis)
quaternion[..., 0:1] += a0 * k
quaternion[..., 1:2] += a1 * k
quaternion[..., 2:3] += a2 * k
return torch.cat([w, quaternion], dim=-1)
# based on:
# https://github.com/ClementPinard/SfmLearner-Pytorch/blob/master/inverse_warp.py#L65-L71
def normalize_pixel_coordinates(
pixel_coordinates: torch.Tensor,
height: int,
width: int,
eps: float = 1e-8) -> torch.Tensor:
r"""Normalize pixel coordinates between -1 and 1.
Normalized, -1 if on extreme left, 1 if on extreme right (x = w-1).
Args:
pixel_coordinates (torch.Tensor): the grid with pixel coordinates.
Shape can be :math:`(*, 2)`.
width (int): the maximum width in the x-axis.
height (int): the maximum height in the y-axis.
eps (float): safe division by zero. (default 1e-8).
Return:
torch.Tensor: the normalized pixel coordinates.
"""
if pixel_coordinates.shape[-1] != 2:
raise ValueError("Input pixel_coordinates must be of shape (*, 2). "
"Got {}".format(pixel_coordinates.shape))
# compute normalization factor
hw: torch.Tensor = torch.stack([
torch.tensor(width, device=pixel_coordinates.device, dtype=pixel_coordinates.dtype),
torch.tensor(height, device=pixel_coordinates.device, dtype=pixel_coordinates.dtype)
])
factor: torch.Tensor = torch.tensor(
2., device=pixel_coordinates.device, dtype=pixel_coordinates.dtype) / (hw - 1).clamp(eps)
return factor * pixel_coordinates - 1
def denormalize_pixel_coordinates(
pixel_coordinates: torch.Tensor,
height: int,
width: int,
eps: float = 1e-8) -> torch.Tensor:
r"""Denormalize pixel coordinates.
The input is assumed to be -1 if on extreme left, 1 if on
extreme right (x = w-1).
Args:
pixel_coordinates (torch.Tensor): the normalized grid coordinates.
Shape can be :math:`(*, 2)`.
width (int): the maximum width in the x-axis.
height (int): the maximum height in the y-axis.
eps (float): safe division by zero. (default 1e-8).
Return:
torch.Tensor: the denormalized pixel coordinates.
"""
if pixel_coordinates.shape[-1] != 2:
raise ValueError("Input pixel_coordinates must be of shape (*, 2). "
"Got {}".format(pixel_coordinates.shape))
# compute normalization factor
hw: torch.Tensor = torch.stack([
torch.tensor(width), torch.tensor(height)
]).to(pixel_coordinates.device).to(pixel_coordinates.dtype)
factor: torch.Tensor = torch.tensor(2.) / (hw - 1).clamp(eps)
return torch.tensor(1.) / factor * (pixel_coordinates + 1)
def normalize_pixel_coordinates3d(
pixel_coordinates: torch.Tensor,
depth: int,
height: int,
width: int,
eps: float = 1e-8) -> torch.Tensor:
r"""Normalize pixel coordinates between -1 and 1.
Normalized, -1 if on extreme left, 1 if on extreme right (x = w-1).
Args:
pixel_coordinates (torch.Tensor): the grid with pixel coordinates.
Shape can be :math:`(*, 3)`.
depth (int): the maximum depth in the z-axis.
height (int): the maximum height in the y-axis.
width (int): the maximum width in the x-axis.
eps (float): safe division by zero. (default 1e-8).
Return:
torch.Tensor: the normalized pixel coordinates.
"""
if pixel_coordinates.shape[-1] != 3:
raise ValueError("Input pixel_coordinates must be of shape (*, 3). "
"Got {}".format(pixel_coordinates.shape))
# compute normalization factor
dhw: torch.Tensor = torch.stack([
torch.tensor(depth), torch.tensor(width), torch.tensor(height)
]).to(pixel_coordinates.device).to(pixel_coordinates.dtype)
factor: torch.Tensor = torch.tensor(2.) / (dhw - 1).clamp(eps)
return factor * pixel_coordinates - 1
def denormalize_pixel_coordinates3d(
pixel_coordinates: torch.Tensor,
depth: int,
height: int,
width: int,
eps: float = 1e-8) -> torch.Tensor:
r"""Denormalize pixel coordinates.
The input is assumed to be -1 if on extreme left, 1 if on
extreme right (x = w-1).
Args:
pixel_coordinates (torch.Tensor): the normalized grid coordinates.
Shape can be :math:`(*, 3)`.
depth (int): the maximum depth in the x-axis.
height (int): the maximum height in the y-axis.
width (int): the maximum width in the x-axis.
eps (float): safe division by zero. (default 1e-8).
Return:
torch.Tensor: the denormalized pixel coordinates.
"""
if pixel_coordinates.shape[-1] != 3:
raise ValueError("Input pixel_coordinates must be of shape (*, 3). "
"Got {}".format(pixel_coordinates.shape))
# compute normalization factor
dhw: torch.Tensor = torch.stack([
torch.tensor(depth), torch.tensor(width), torch.tensor(height)
]).to(pixel_coordinates.device).to(pixel_coordinates.dtype)
factor: torch.Tensor = torch.tensor(2.) / (dhw - 1).clamp(eps)
return torch.tensor(1.) / factor * (pixel_coordinates + 1)
| 35.379854
| 126
| 0.618941
|
from typing import Tuple
import torch
import torch.nn as nn
import torch.nn.functional as F
from kornia.constants import pi
__all__ = [
"rad2deg",
"deg2rad",
"pol2cart",
"cart2pol",
"convert_points_from_homogeneous",
"convert_points_to_homogeneous",
"convert_affinematrix_to_homography",
"convert_affinematrix_to_homography3d",
"angle_axis_to_rotation_matrix",
"angle_axis_to_quaternion",
"rotation_matrix_to_angle_axis",
"rotation_matrix_to_quaternion",
"quaternion_to_angle_axis",
"quaternion_to_rotation_matrix",
"quaternion_log_to_exp",
"quaternion_exp_to_log",
"denormalize_pixel_coordinates",
"normalize_pixel_coordinates",
"normalize_quaternion",
"denormalize_pixel_coordinates3d",
"normalize_pixel_coordinates3d",
]
def rad2deg(tensor: torch.Tensor) -> torch.Tensor:
if not isinstance(tensor, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(tensor)))
return 180. * tensor / pi.to(tensor.device).type(tensor.dtype)
def deg2rad(tensor: torch.Tensor) -> torch.Tensor:
if not isinstance(tensor, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(tensor)))
return tensor * pi.to(tensor.device).type(tensor.dtype) / 180.
def pol2cart(rho: torch.Tensor, phi: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:
if not (isinstance(rho, torch.Tensor) & isinstance(phi, torch.Tensor)):
raise TypeError("Input type is not a torch.Tensor. Got {}, {}".format(
type(rho), type(phi)))
x = rho * torch.cos(phi)
y = rho * torch.sin(phi)
return x, y
def cart2pol(x: torch.Tensor, y: torch.Tensor, eps: float = 1e-8) -> Tuple[torch.Tensor, torch.Tensor]:
if not (isinstance(x, torch.Tensor) & isinstance(y, torch.Tensor)):
raise TypeError("Input type is not a torch.Tensor. Got {}, {}".format(
type(x), type(y)))
rho = torch.sqrt(x**2 + y**2 + eps)
phi = torch.atan2(y, x)
return rho, phi
def convert_points_from_homogeneous(
points: torch.Tensor, eps: float = 1e-8) -> torch.Tensor:
if not isinstance(points, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(points)))
if len(points.shape) < 2:
raise ValueError("Input must be at least a 2D tensor. Got {}".format(
points.shape))
z_vec: torch.Tensor = points[..., -1:]
mask: torch.Tensor = torch.abs(z_vec) > eps
scale: torch.Tensor = torch.ones_like(z_vec).masked_scatter_(
mask, torch.tensor(1.0).to(points.device) / z_vec[mask])
return scale * points[..., :-1]
def convert_points_to_homogeneous(points: torch.Tensor) -> torch.Tensor:
if not isinstance(points, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(points)))
if len(points.shape) < 2:
raise ValueError("Input must be at least a 2D tensor. Got {}".format(
points.shape))
return torch.nn.functional.pad(points, [0, 1], "constant", 1.0)
def _convert_affinematrix_to_homography_impl(A: torch.Tensor) -> torch.Tensor:
H: torch.Tensor = torch.nn.functional.pad(A, [0, 0, 0, 1], "constant", value=0.)
H[..., -1, -1] += 1.0
return H
def convert_affinematrix_to_homography(A: torch.Tensor) -> torch.Tensor:
if not isinstance(A, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(A)))
if not (len(A.shape) == 3 and A.shape[-2:] == (2, 3)):
raise ValueError("Input matrix must be a Bx2x3 tensor. Got {}"
.format(A.shape))
return _convert_affinematrix_to_homography_impl(A)
def convert_affinematrix_to_homography3d(A: torch.Tensor) -> torch.Tensor:
if not isinstance(A, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(A)))
if not (len(A.shape) == 3 and A.shape[-2:] == (3, 4)):
raise ValueError("Input matrix must be a Bx3x4 tensor. Got {}"
.format(A.shape))
return _convert_affinematrix_to_homography_impl(A)
def angle_axis_to_rotation_matrix(angle_axis: torch.Tensor) -> torch.Tensor:
if not isinstance(angle_axis, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(angle_axis)))
if not angle_axis.shape[-1] == 3:
raise ValueError(
"Input size must be a (*, 3) tensor. Got {}".format(
angle_axis.shape))
def _compute_rotation_matrix(angle_axis, theta2, eps=1e-6):
k_one = 1.0
theta = torch.sqrt(theta2)
wxyz = angle_axis / (theta + eps)
wx, wy, wz = torch.chunk(wxyz, 3, dim=1)
cos_theta = torch.cos(theta)
sin_theta = torch.sin(theta)
r00 = cos_theta + wx * wx * (k_one - cos_theta)
r10 = wz * sin_theta + wx * wy * (k_one - cos_theta)
r20 = -wy * sin_theta + wx * wz * (k_one - cos_theta)
r01 = wx * wy * (k_one - cos_theta) - wz * sin_theta
r11 = cos_theta + wy * wy * (k_one - cos_theta)
r21 = wx * sin_theta + wy * wz * (k_one - cos_theta)
r02 = wy * sin_theta + wx * wz * (k_one - cos_theta)
r12 = -wx * sin_theta + wy * wz * (k_one - cos_theta)
r22 = cos_theta + wz * wz * (k_one - cos_theta)
rotation_matrix = torch.cat(
[r00, r01, r02, r10, r11, r12, r20, r21, r22], dim=1)
return rotation_matrix.view(-1, 3, 3)
def _compute_rotation_matrix_taylor(angle_axis):
rx, ry, rz = torch.chunk(angle_axis, 3, dim=1)
k_one = torch.ones_like(rx)
rotation_matrix = torch.cat(
[k_one, -rz, ry, rz, k_one, -rx, -ry, rx, k_one], dim=1)
return rotation_matrix.view(-1, 3, 3)
_angle_axis = torch.unsqueeze(angle_axis, dim=1)
theta2 = torch.matmul(_angle_axis, _angle_axis.transpose(1, 2))
theta2 = torch.squeeze(theta2, dim=1)
rotation_matrix_normal = _compute_rotation_matrix(angle_axis, theta2)
rotation_matrix_taylor = _compute_rotation_matrix_taylor(angle_axis)
eps = 1e-6
mask = (theta2 > eps).view(-1, 1, 1).to(theta2.device)
mask_pos = (mask).type_as(theta2)
mask_neg = (mask == False).type_as(theta2)
batch_size = angle_axis.shape[0]
rotation_matrix = torch.eye(3).to(angle_axis.device).type_as(angle_axis)
rotation_matrix = rotation_matrix.view(1, 3, 3).repeat(batch_size, 1, 1)
rotation_matrix[..., :3, :3] = \
mask_pos * rotation_matrix_normal + mask_neg * rotation_matrix_taylor
return rotation_matrix
def rotation_matrix_to_angle_axis(
rotation_matrix: torch.Tensor) -> torch.Tensor:
if not isinstance(rotation_matrix, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(rotation_matrix)))
if not rotation_matrix.shape[-2:] == (3, 3):
raise ValueError(
"Input size must be a (*, 3, 3) tensor. Got {}".format(
rotation_matrix.shape))
quaternion: torch.Tensor = rotation_matrix_to_quaternion(rotation_matrix)
return quaternion_to_angle_axis(quaternion)
def rotation_matrix_to_quaternion(
rotation_matrix: torch.Tensor,
eps: float = 1e-8) -> torch.Tensor:
if not isinstance(rotation_matrix, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(rotation_matrix)))
if not rotation_matrix.shape[-2:] == (3, 3):
raise ValueError(
"Input size must be a (*, 3, 3) tensor. Got {}".format(
rotation_matrix.shape))
def safe_zero_division(numerator: torch.Tensor,
denominator: torch.Tensor) -> torch.Tensor:
eps: float = torch.finfo(numerator.dtype).tiny
return numerator / torch.clamp(denominator, min=eps)
rotation_matrix_vec: torch.Tensor = rotation_matrix.view(
*rotation_matrix.shape[:-2], 9)
m00, m01, m02, m10, m11, m12, m20, m21, m22 = torch.chunk(
rotation_matrix_vec, chunks=9, dim=-1)
trace: torch.Tensor = m00 + m11 + m22
def trace_positive_cond():
sq = torch.sqrt(trace + 1.0) * 2.
qw = 0.25 * sq
qx = safe_zero_division(m21 - m12, sq)
qy = safe_zero_division(m02 - m20, sq)
qz = safe_zero_division(m10 - m01, sq)
return torch.cat([qx, qy, qz, qw], dim=-1)
def cond_1():
sq = torch.sqrt(1.0 + m00 - m11 - m22 + eps) * 2.
qw = safe_zero_division(m21 - m12, sq)
qx = 0.25 * sq
qy = safe_zero_division(m01 + m10, sq)
qz = safe_zero_division(m02 + m20, sq)
return torch.cat([qx, qy, qz, qw], dim=-1)
def cond_2():
sq = torch.sqrt(1.0 + m11 - m00 - m22 + eps) * 2.
qw = safe_zero_division(m02 - m20, sq)
qx = safe_zero_division(m01 + m10, sq)
qy = 0.25 * sq
qz = safe_zero_division(m12 + m21, sq)
return torch.cat([qx, qy, qz, qw], dim=-1)
def cond_3():
sq = torch.sqrt(1.0 + m22 - m00 - m11 + eps) * 2.
qw = safe_zero_division(m10 - m01, sq)
qx = safe_zero_division(m02 + m20, sq)
qy = safe_zero_division(m12 + m21, sq)
qz = 0.25 * sq
return torch.cat([qx, qy, qz, qw], dim=-1)
where_2 = torch.where(m11 > m22, cond_2(), cond_3())
where_1 = torch.where(
(m00 > m11) & (m00 > m22), cond_1(), where_2)
quaternion: torch.Tensor = torch.where(
trace > 0., trace_positive_cond(), where_1)
return quaternion
def normalize_quaternion(quaternion: torch.Tensor,
eps: float = 1e-12) -> torch.Tensor:
if not isinstance(quaternion, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(quaternion)))
if not quaternion.shape[-1] == 4:
raise ValueError(
"Input must be a tensor of shape (*, 4). Got {}".format(
quaternion.shape))
return F.normalize(quaternion, p=2, dim=-1, eps=eps)
ternion_to_rotation_matrix(quaternion: torch.Tensor) -> torch.Tensor:
if not isinstance(quaternion, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(quaternion)))
if not quaternion.shape[-1] == 4:
raise ValueError(
"Input must be a tensor of shape (*, 4). Got {}".format(
quaternion.shape))
quaternion_norm: torch.Tensor = normalize_quaternion(quaternion)
x, y, z, w = torch.chunk(quaternion_norm, chunks=4, dim=-1)
tx: torch.Tensor = 2.0 * x
ty: torch.Tensor = 2.0 * y
tz: torch.Tensor = 2.0 * z
twx: torch.Tensor = tx * w
twy: torch.Tensor = ty * w
twz: torch.Tensor = tz * w
txx: torch.Tensor = tx * x
txy: torch.Tensor = ty * x
txz: torch.Tensor = tz * x
tyy: torch.Tensor = ty * y
tyz: torch.Tensor = tz * y
tzz: torch.Tensor = tz * z
one: torch.Tensor = torch.tensor(1.)
matrix: torch.Tensor = torch.stack([
one - (tyy + tzz), txy - twz, txz + twy,
txy + twz, one - (txx + tzz), tyz - twx,
txz - twy, tyz + twx, one - (txx + tyy)
], dim=-1).view(-1, 3, 3)
if len(quaternion.shape) == 1:
matrix = torch.squeeze(matrix, dim=0)
return matrix
def quaternion_to_angle_axis(quaternion: torch.Tensor) -> torch.Tensor:
if not torch.is_tensor(quaternion):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(quaternion)))
if not quaternion.shape[-1] == 4:
raise ValueError(
"Input must be a tensor of shape Nx4 or 4. Got {}".format(
quaternion.shape))
q1: torch.Tensor = quaternion[..., 1]
q2: torch.Tensor = quaternion[..., 2]
q3: torch.Tensor = quaternion[..., 3]
sin_squared_theta: torch.Tensor = q1 * q1 + q2 * q2 + q3 * q3
sin_theta: torch.Tensor = torch.sqrt(sin_squared_theta)
cos_theta: torch.Tensor = quaternion[..., 0]
two_theta: torch.Tensor = 2.0 * torch.where(
cos_theta < 0.0, torch.atan2(-sin_theta, -cos_theta),
torch.atan2(sin_theta, cos_theta))
k_pos: torch.Tensor = two_theta / sin_theta
k_neg: torch.Tensor = 2.0 * torch.ones_like(sin_theta)
k: torch.Tensor = torch.where(sin_squared_theta > 0.0, k_pos, k_neg)
angle_axis: torch.Tensor = torch.zeros_like(quaternion)[..., :3]
angle_axis[..., 0] += q1 * k
angle_axis[..., 1] += q2 * k
angle_axis[..., 2] += q3 * k
return angle_axis
def quaternion_log_to_exp(quaternion: torch.Tensor,
eps: float = 1e-8) -> torch.Tensor:
if not isinstance(quaternion, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(quaternion)))
if not quaternion.shape[-1] == 3:
raise ValueError(
"Input must be a tensor of shape (*, 3). Got {}".format(
quaternion.shape))
norm_q: torch.Tensor = torch.norm(
quaternion, p=2, dim=-1, keepdim=True).clamp(min=eps)
quaternion_vector: torch.Tensor = quaternion * torch.sin(norm_q) / norm_q
quaternion_scalar: torch.Tensor = torch.cos(norm_q)
quaternion_exp: torch.Tensor = torch.cat(
[quaternion_vector, quaternion_scalar], dim=-1)
return quaternion_exp
def quaternion_exp_to_log(quaternion: torch.Tensor,
eps: float = 1e-8) -> torch.Tensor:
if not isinstance(quaternion, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(quaternion)))
if not quaternion.shape[-1] == 4:
raise ValueError(
"Input must be a tensor of shape (*, 4). Got {}".format(
quaternion.shape))
quaternion_vector: torch.Tensor = quaternion[..., 0:3]
quaternion_scalar: torch.Tensor = quaternion[..., 3:4]
norm_q: torch.Tensor = torch.norm(
quaternion_vector, p=2, dim=-1, keepdim=True).clamp(min=eps)
quaternion_log: torch.Tensor = quaternion_vector * torch.acos(
torch.clamp(quaternion_scalar, min=-1.0, max=1.0)) / norm_q
return quaternion_log
f angle_axis_to_quaternion(angle_axis: torch.Tensor) -> torch.Tensor:
if not torch.is_tensor(angle_axis):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(angle_axis)))
if not angle_axis.shape[-1] == 3:
raise ValueError(
"Input must be a tensor of shape Nx3 or 3. Got {}".format(
angle_axis.shape))
a0: torch.Tensor = angle_axis[..., 0:1]
a1: torch.Tensor = angle_axis[..., 1:2]
a2: torch.Tensor = angle_axis[..., 2:3]
theta_squared: torch.Tensor = a0 * a0 + a1 * a1 + a2 * a2
theta: torch.Tensor = torch.sqrt(theta_squared)
half_theta: torch.Tensor = theta * 0.5
mask: torch.Tensor = theta_squared > 0.0
ones: torch.Tensor = torch.ones_like(half_theta)
k_neg: torch.Tensor = 0.5 * ones
k_pos: torch.Tensor = torch.sin(half_theta) / theta
k: torch.Tensor = torch.where(mask, k_pos, k_neg)
w: torch.Tensor = torch.where(mask, torch.cos(half_theta), ones)
quaternion: torch.Tensor = torch.zeros_like(angle_axis)
quaternion[..., 0:1] += a0 * k
quaternion[..., 1:2] += a1 * k
quaternion[..., 2:3] += a2 * k
return torch.cat([w, quaternion], dim=-1)
rmalize_pixel_coordinates(
pixel_coordinates: torch.Tensor,
height: int,
width: int,
eps: float = 1e-8) -> torch.Tensor:
if pixel_coordinates.shape[-1] != 2:
raise ValueError("Input pixel_coordinates must be of shape (*, 2). "
"Got {}".format(pixel_coordinates.shape))
hw: torch.Tensor = torch.stack([
torch.tensor(width, device=pixel_coordinates.device, dtype=pixel_coordinates.dtype),
torch.tensor(height, device=pixel_coordinates.device, dtype=pixel_coordinates.dtype)
])
factor: torch.Tensor = torch.tensor(
2., device=pixel_coordinates.device, dtype=pixel_coordinates.dtype) / (hw - 1).clamp(eps)
return factor * pixel_coordinates - 1
def denormalize_pixel_coordinates(
pixel_coordinates: torch.Tensor,
height: int,
width: int,
eps: float = 1e-8) -> torch.Tensor:
if pixel_coordinates.shape[-1] != 2:
raise ValueError("Input pixel_coordinates must be of shape (*, 2). "
"Got {}".format(pixel_coordinates.shape))
hw: torch.Tensor = torch.stack([
torch.tensor(width), torch.tensor(height)
]).to(pixel_coordinates.device).to(pixel_coordinates.dtype)
factor: torch.Tensor = torch.tensor(2.) / (hw - 1).clamp(eps)
return torch.tensor(1.) / factor * (pixel_coordinates + 1)
def normalize_pixel_coordinates3d(
pixel_coordinates: torch.Tensor,
depth: int,
height: int,
width: int,
eps: float = 1e-8) -> torch.Tensor:
if pixel_coordinates.shape[-1] != 3:
raise ValueError("Input pixel_coordinates must be of shape (*, 3). "
"Got {}".format(pixel_coordinates.shape))
dhw: torch.Tensor = torch.stack([
torch.tensor(depth), torch.tensor(width), torch.tensor(height)
]).to(pixel_coordinates.device).to(pixel_coordinates.dtype)
factor: torch.Tensor = torch.tensor(2.) / (dhw - 1).clamp(eps)
return factor * pixel_coordinates - 1
def denormalize_pixel_coordinates3d(
pixel_coordinates: torch.Tensor,
depth: int,
height: int,
width: int,
eps: float = 1e-8) -> torch.Tensor:
if pixel_coordinates.shape[-1] != 3:
raise ValueError("Input pixel_coordinates must be of shape (*, 3). "
"Got {}".format(pixel_coordinates.shape))
dhw: torch.Tensor = torch.stack([
torch.tensor(depth), torch.tensor(width), torch.tensor(height)
]).to(pixel_coordinates.device).to(pixel_coordinates.dtype)
factor: torch.Tensor = torch.tensor(2.) / (dhw - 1).clamp(eps)
return torch.tensor(1.) / factor * (pixel_coordinates + 1)
| true
| true
|
790660d2c32c54d06613ec3e49f63111f96101b8
| 2,090
|
py
|
Python
|
advent/year2015/day20.py
|
davweb/advent-of-code
|
6d9ac52092f4aad26a84d7cfd2fcd8420f1ea612
|
[
"Unlicense"
] | null | null | null |
advent/year2015/day20.py
|
davweb/advent-of-code
|
6d9ac52092f4aad26a84d7cfd2fcd8420f1ea612
|
[
"Unlicense"
] | null | null | null |
advent/year2015/day20.py
|
davweb/advent-of-code
|
6d9ac52092f4aad26a84d7cfd2fcd8420f1ea612
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
from functools import cache
INPUT = 33100000
def sigma_pentagonal_numbers(limit):
"""
>>> list(sigma_pentagonal_numbers(16))
[1, 2, 5, 7, 12, 15]
"""
n = 1
p = 1
while p <= limit:
yield p
if n > 0:
n = -n
else:
n = -n + 1
p = (3 * n * n - n) // 2
def sigma_sign_generator():
while True:
yield 1
yield 1
yield -1
yield -1
@cache
def presents_for_house(house):
"""
https://math.stackexchange.com/a/22744
>>> presents_for_house(1)
10
>>> presents_for_house(2)
30
>>> presents_for_house(3)
40
>>> presents_for_house(8)
150
>>> presents_for_house(9)
130
"""
if house == 1:
return 10
presents = 0
sign = sigma_sign_generator()
for p in sigma_pentagonal_numbers(house):
n = house - p
if n == 0:
presents += house * next(sign) * 10
else:
presents += presents_for_house(n) * next(sign)
return presents
def part1(data):
"""
# Takes too long so commented out
# >>> part1(INPUT)
# 776160
"""
house = 0
presents = 0
max = 0
while presents < data:
house += 1
presents = presents_for_house(house)
if presents > max:
max = presents
print(max)
return house
def part2(data):
"""
>>> part2(INPUT)
786240
"""
upper_limit = INPUT
house = [0] * (upper_limit + 1)
elf = 1
while elf <= upper_limit:
elf_end = min(elf * 50, upper_limit)
for number in range(elf, elf_end + 1, elf):
index = number - 1
house[index] += 11 * elf
if house[index] >= data:
upper_limit = min(number, upper_limit)
elf += 1
for i, value in enumerate(house):
if value >= data:
return i + 1
raise ValueError()
def main():
print(part1(INPUT))
print(part2(INPUT))
if __name__ == "__main__":
main()
| 16.076923
| 58
| 0.507177
|
from functools import cache
INPUT = 33100000
def sigma_pentagonal_numbers(limit):
n = 1
p = 1
while p <= limit:
yield p
if n > 0:
n = -n
else:
n = -n + 1
p = (3 * n * n - n) // 2
def sigma_sign_generator():
while True:
yield 1
yield 1
yield -1
yield -1
@cache
def presents_for_house(house):
if house == 1:
return 10
presents = 0
sign = sigma_sign_generator()
for p in sigma_pentagonal_numbers(house):
n = house - p
if n == 0:
presents += house * next(sign) * 10
else:
presents += presents_for_house(n) * next(sign)
return presents
def part1(data):
house = 0
presents = 0
max = 0
while presents < data:
house += 1
presents = presents_for_house(house)
if presents > max:
max = presents
print(max)
return house
def part2(data):
upper_limit = INPUT
house = [0] * (upper_limit + 1)
elf = 1
while elf <= upper_limit:
elf_end = min(elf * 50, upper_limit)
for number in range(elf, elf_end + 1, elf):
index = number - 1
house[index] += 11 * elf
if house[index] >= data:
upper_limit = min(number, upper_limit)
elf += 1
for i, value in enumerate(house):
if value >= data:
return i + 1
raise ValueError()
def main():
print(part1(INPUT))
print(part2(INPUT))
if __name__ == "__main__":
main()
| true
| true
|
790660f1ba7f0dd430d2a6d62416017cafec0be7
| 4,197
|
py
|
Python
|
TIDALDL-PY/tidal_dl/lang/spanish.py
|
joyel24/Tidal-Media-Downloader
|
a73a7a0923dbbfad50c8faa64f75cc64ec568c02
|
[
"Apache-2.0"
] | 2
|
2020-11-01T09:41:14.000Z
|
2021-12-15T22:51:37.000Z
|
TIDALDL-PY/tidal_dl/lang/spanish.py
|
joyel24/Tidal-Media-Downloader
|
a73a7a0923dbbfad50c8faa64f75cc64ec568c02
|
[
"Apache-2.0"
] | null | null | null |
TIDALDL-PY/tidal_dl/lang/spanish.py
|
joyel24/Tidal-Media-Downloader
|
a73a7a0923dbbfad50c8faa64f75cc64ec568c02
|
[
"Apache-2.0"
] | 1
|
2020-11-03T10:14:02.000Z
|
2020-11-03T10:14:02.000Z
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
'''
@File : spanish.py
@Time : 2020/08/21
@Author : JavierSC
@Version : 1.0
@Contact :
@Desc :
'''
class LangSpanish(object):
SETTING = "AJUSTES"
VALUE = "VALORES"
SETTING_DOWNLOAD_PATH = "Ruta de descarga"
SETTING_ONLY_M4A = "Convertir mp4 a m4a"
SETTING_ADD_EXPLICIT_TAG = "Añadir tag de 'Contenido explícito'"
SETTING_ADD_HYPHEN = "Agregar guión"
SETTING_ADD_YEAR = "Agregar año en la carpeta del álbum"
SETTING_USE_TRACK_NUM = "Agregar número de la pista"
SETTING_AUDIO_QUALITY = "Calidad de audio"
SETTING_VIDEO_QUALITY = "Calidad de video"
SETTING_CHECK_EXIST = "Verificar si existe"
SETTING_ARTIST_BEFORE_TITLE = "Nombre del artista en el título de la pista"
SETTING_ALBUMID_BEFORE_FOLDER = "Añadir ID de la carpeta del álbum"
SETTING_INCLUDE_EP = "Incluir Sencillos y EPs"
SETTING_SAVE_COVERS = "Guardar carátulas"
SETTING_LANGUAGE = "Idioma"
SETTING_USE_PLAYLIST_FOLDER = "Usar directorio de la lista de reproducción"
SETTING_MULITHREAD_DOWNLOAD = "Descarga Multi-hilo"
SETTING_ALBUM_FOLDER_FORMAT = "Formato del nombre de carpeta del álbum"
SETTING_TRACK_FILE_FORMAT = "Formato del nombre de archivo de la pista"
SETTING_SHOW_PROGRESS = "Mostrar progreso"
CHOICE = "SELECCIÓN"
FUNCTION = "FUNCIÓN"
CHOICE_ENTER = "Ingresar"
CHOICE_ENTER_URLID = "Ingresar 'Url/ID':"
CHOICE_EXIT = "Salir"
CHOICE_LOGIN = "Login"
CHOICE_SETTINGS = "Ajustes"
CHOICE_SET_ACCESS_TOKEN = "Establecer AccessToken"
CHOICE_DOWNLOAD_BY_URL = "Descargar por Url o ID"
PRINT_ERR = "[ERR]"
PRINT_INFO = "[INFO]"
PRINT_SUCCESS = "[EXITO]"
PRINT_ENTER_CHOICE = "Ingresar Selección:"
PRINT_LATEST_VERSION = "Ultima versión:"
PRINT_USERNAME = "nombre de usuario:"
PRINT_PASSWORD = "contraseña:"
CHANGE_START_SETTINGS = "Iniciar ajustes('0'-Volver,'1'-Si):"
CHANGE_DOWNLOAD_PATH = "Ruta de descarga('0' No modificar):"
CHANGE_AUDIO_QUALITY = "Calidad de audio('0'-Normal,'1'-High,'2'-HiFi,'3'-Master):"
CHANGE_VIDEO_QUALITY = "Calidad de video('0'-1080,'1'-720,'2'-480,'3'-360):"
CHANGE_ONLYM4A = "Convertir mp4 a m4a('0'-No,'1'-Si):"
CHANGE_ADD_EXPLICIT_TAG = "Agregar tag de contenido explícito a los nombres de archivo('0'-No,'1'-Si):"
CHANGE_ADD_HYPHEN = "Usar guiones en lugar de espacios en el nombre de los archivos('0'-No,'1'-Si):"
CHANGE_ADD_YEAR = "Agregar año a el nombre de las carpetas del álbum('0'-No,'1'-Si):"
CHANGE_USE_TRACK_NUM = "Agregar número de la pista('0'-No,'1'-Si):"
CHANGE_CHECK_EXIST = "Verificar si el archivo existe antes de descargar la pista('0'-No,'1'-Si):"
CHANGE_ARTIST_BEFORE_TITLE = "Añadir el nombre del artista en el título de la pista('0'-No,'1'-Si):"
CHANGE_INCLUDE_EP = "Incluir Sencillos y EPs al descargar el álbum del artista('0'-No,'1'-Si):"
CHANGE_ALBUMID_BEFORE_FOLDER = "Añadir ID de la carpeta del álbum('0'-No,'1'-Si):"
CHANGE_SAVE_COVERS = "Guardar carátulas('0'-No,'1'-Si):"
CHANGE_LANGUAGE = "Seleccione el idioma"
CHANGE_ALBUM_FOLDER_FORMAT = "Formato del nombre de carpeta del álbum('0' No modificar):"
CHANGE_TRACK_FILE_FORMAT = "Formato del nombre de archivo de la pista('0' No modificar):"
CHANGE_SHOW_PROGRESS = "Mostrar progreso('0'-No,'1'-Yes):"
MSG_INVAILD_ACCESSTOKEN = "AccessToken invalido! Por favor reinicie"
MSG_PATH_ERR = "La ruta no es correcta!"
MSG_INPUT_ERR = "Error de entrada!"
MODEL_ALBUM_PROPERTY = "PROPIEDAD-DE-ÁLBUM"
MODEL_TRACK_PROPERTY = "PROPIEDAD-DE-PISTA"
MODEL_VIDEO_PROPERTY = "PROPIEDAD-DE-VIDEO"
MODEL_ARTIST_PROPERTY = "PROPIEDAD-DE-ARTISTA"
MODEL_PLAYLIST_PROPERTY = "PROPIEDAD-DE-PLAYLIST"
MODEL_TITLE = 'Título'
MODEL_TRACK_NUMBER = 'Numero de pista'
MODEL_VIDEO_NUMBER = 'Numero de video'
MODEL_RELEASE_DATE = 'Fecha de lanzamiento'
MODEL_VERSION = 'Versión'
MODEL_EXPLICIT = 'Explícito'
MODEL_ALBUM = 'Álbum'
MODEL_ID = 'ID'
MODEL_NAME = 'Nombre'
MODEL_TYPE = 'Tipo'
| 45.129032
| 108
| 0.688825
|
class LangSpanish(object):
SETTING = "AJUSTES"
VALUE = "VALORES"
SETTING_DOWNLOAD_PATH = "Ruta de descarga"
SETTING_ONLY_M4A = "Convertir mp4 a m4a"
SETTING_ADD_EXPLICIT_TAG = "Añadir tag de 'Contenido explícito'"
SETTING_ADD_HYPHEN = "Agregar guión"
SETTING_ADD_YEAR = "Agregar año en la carpeta del álbum"
SETTING_USE_TRACK_NUM = "Agregar número de la pista"
SETTING_AUDIO_QUALITY = "Calidad de audio"
SETTING_VIDEO_QUALITY = "Calidad de video"
SETTING_CHECK_EXIST = "Verificar si existe"
SETTING_ARTIST_BEFORE_TITLE = "Nombre del artista en el título de la pista"
SETTING_ALBUMID_BEFORE_FOLDER = "Añadir ID de la carpeta del álbum"
SETTING_INCLUDE_EP = "Incluir Sencillos y EPs"
SETTING_SAVE_COVERS = "Guardar carátulas"
SETTING_LANGUAGE = "Idioma"
SETTING_USE_PLAYLIST_FOLDER = "Usar directorio de la lista de reproducción"
SETTING_MULITHREAD_DOWNLOAD = "Descarga Multi-hilo"
SETTING_ALBUM_FOLDER_FORMAT = "Formato del nombre de carpeta del álbum"
SETTING_TRACK_FILE_FORMAT = "Formato del nombre de archivo de la pista"
SETTING_SHOW_PROGRESS = "Mostrar progreso"
CHOICE = "SELECCIÓN"
FUNCTION = "FUNCIÓN"
CHOICE_ENTER = "Ingresar"
CHOICE_ENTER_URLID = "Ingresar 'Url/ID':"
CHOICE_EXIT = "Salir"
CHOICE_LOGIN = "Login"
CHOICE_SETTINGS = "Ajustes"
CHOICE_SET_ACCESS_TOKEN = "Establecer AccessToken"
CHOICE_DOWNLOAD_BY_URL = "Descargar por Url o ID"
PRINT_ERR = "[ERR]"
PRINT_INFO = "[INFO]"
PRINT_SUCCESS = "[EXITO]"
PRINT_ENTER_CHOICE = "Ingresar Selección:"
PRINT_LATEST_VERSION = "Ultima versión:"
PRINT_USERNAME = "nombre de usuario:"
PRINT_PASSWORD = "contraseña:"
CHANGE_START_SETTINGS = "Iniciar ajustes('0'-Volver,'1'-Si):"
CHANGE_DOWNLOAD_PATH = "Ruta de descarga('0' No modificar):"
CHANGE_AUDIO_QUALITY = "Calidad de audio('0'-Normal,'1'-High,'2'-HiFi,'3'-Master):"
CHANGE_VIDEO_QUALITY = "Calidad de video('0'-1080,'1'-720,'2'-480,'3'-360):"
CHANGE_ONLYM4A = "Convertir mp4 a m4a('0'-No,'1'-Si):"
CHANGE_ADD_EXPLICIT_TAG = "Agregar tag de contenido explícito a los nombres de archivo('0'-No,'1'-Si):"
CHANGE_ADD_HYPHEN = "Usar guiones en lugar de espacios en el nombre de los archivos('0'-No,'1'-Si):"
CHANGE_ADD_YEAR = "Agregar año a el nombre de las carpetas del álbum('0'-No,'1'-Si):"
CHANGE_USE_TRACK_NUM = "Agregar número de la pista('0'-No,'1'-Si):"
CHANGE_CHECK_EXIST = "Verificar si el archivo existe antes de descargar la pista('0'-No,'1'-Si):"
CHANGE_ARTIST_BEFORE_TITLE = "Añadir el nombre del artista en el título de la pista('0'-No,'1'-Si):"
CHANGE_INCLUDE_EP = "Incluir Sencillos y EPs al descargar el álbum del artista('0'-No,'1'-Si):"
CHANGE_ALBUMID_BEFORE_FOLDER = "Añadir ID de la carpeta del álbum('0'-No,'1'-Si):"
CHANGE_SAVE_COVERS = "Guardar carátulas('0'-No,'1'-Si):"
CHANGE_LANGUAGE = "Seleccione el idioma"
CHANGE_ALBUM_FOLDER_FORMAT = "Formato del nombre de carpeta del álbum('0' No modificar):"
CHANGE_TRACK_FILE_FORMAT = "Formato del nombre de archivo de la pista('0' No modificar):"
CHANGE_SHOW_PROGRESS = "Mostrar progreso('0'-No,'1'-Yes):"
MSG_INVAILD_ACCESSTOKEN = "AccessToken invalido! Por favor reinicie"
MSG_PATH_ERR = "La ruta no es correcta!"
MSG_INPUT_ERR = "Error de entrada!"
MODEL_ALBUM_PROPERTY = "PROPIEDAD-DE-ÁLBUM"
MODEL_TRACK_PROPERTY = "PROPIEDAD-DE-PISTA"
MODEL_VIDEO_PROPERTY = "PROPIEDAD-DE-VIDEO"
MODEL_ARTIST_PROPERTY = "PROPIEDAD-DE-ARTISTA"
MODEL_PLAYLIST_PROPERTY = "PROPIEDAD-DE-PLAYLIST"
MODEL_TITLE = 'Título'
MODEL_TRACK_NUMBER = 'Numero de pista'
MODEL_VIDEO_NUMBER = 'Numero de video'
MODEL_RELEASE_DATE = 'Fecha de lanzamiento'
MODEL_VERSION = 'Versión'
MODEL_EXPLICIT = 'Explícito'
MODEL_ALBUM = 'Álbum'
MODEL_ID = 'ID'
MODEL_NAME = 'Nombre'
MODEL_TYPE = 'Tipo'
| true
| true
|
79066141ab07cc37c365024fff632e3454036b72
| 462
|
py
|
Python
|
onegreek/rush_forms/urls.py
|
goldhand/onegreek
|
1ad105f15608284a9e80802734f0c6222413a4a0
|
[
"BSD-3-Clause"
] | 1
|
2019-06-13T11:46:08.000Z
|
2019-06-13T11:46:08.000Z
|
onegreek/rush_forms/urls.py
|
goldhand/onegreek
|
1ad105f15608284a9e80802734f0c6222413a4a0
|
[
"BSD-3-Clause"
] | null | null | null |
onegreek/rush_forms/urls.py
|
goldhand/onegreek
|
1ad105f15608284a9e80802734f0c6222413a4a0
|
[
"BSD-3-Clause"
] | null | null | null |
try:
from django.conf.urls import *
except ImportError: # django < 1.4
from django.conf.urls.defaults import *
urlpatterns = patterns("rush_forms.views",
#url(r"^(?P<pk>\d+)/$", 'form_view', name='form-detail'),
url(r"^(?P<pk>\d+)/$", 'rush_form_view', name='detail'),
url(r"^(?P<pk>\d+)/(?P<user_id>\d+)/$", 'rush_form_user_entry_view', name='user_entry'),
)
| 42
| 111
| 0.512987
|
try:
from django.conf.urls import *
except ImportError:
from django.conf.urls.defaults import *
urlpatterns = patterns("rush_forms.views",
url(r"^(?P<pk>\d+)/$", 'rush_form_view', name='detail'),
url(r"^(?P<pk>\d+)/(?P<user_id>\d+)/$", 'rush_form_user_entry_view', name='user_entry'),
)
| true
| true
|
790661648fbf214e95c84c634f4fa5493716091a
| 10,890
|
py
|
Python
|
classification/src/train.py
|
WayneGame/InformationExtraction
|
d593adc5ad02fa7046873c95a1b4af0befe08c5f
|
[
"MIT"
] | null | null | null |
classification/src/train.py
|
WayneGame/InformationExtraction
|
d593adc5ad02fa7046873c95a1b4af0befe08c5f
|
[
"MIT"
] | null | null | null |
classification/src/train.py
|
WayneGame/InformationExtraction
|
d593adc5ad02fa7046873c95a1b4af0befe08c5f
|
[
"MIT"
] | null | null | null |
import config
import pandas as pd
import pickle
import numpy as np
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from sklearn.model_selection import StratifiedKFold
from sklearn.metrics import classification_report
import tensorflow as tf
from keras import Sequential
from tensorflow.keras.layers import Embedding, SpatialDropout1D, LSTM, Dense
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras import regularizers
from keras.models import load_model
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.metrics import confusion_matrix
"""
Versuch #1
"""
# Gibt den classification-report aus
def evaluate(model, X_test, Y_test):
Y_pred = model.predict(X_test)
Y_pred = Y_pred.argmax(axis=-1)
Y_test = Y_test.argmax(axis=-1)
print(classification_report([Y_test], [Y_pred]))
# Nimmt ein history-Objekt und zeichnet den loss für
# sowohl testing als auch training Daten.
def plot_model(history, fold):
plt.title('Loss')
plt.plot(history.history['loss'], label='train_loss')
plt.plot(history.history['val_loss'], label='test_loss')
plt.legend()
plt.savefig(f"../plots/covid_model_without_vaccine_loss_{config.EPOCHS}epochs_{fold}v{config.K_FOLD_SPLITS}fold.png")
clear_plot()
plt.title('Accuracy')
plt.plot(history.history['accuracy'], label='train_acc', c="r")
plt.plot(history.history['val_accuracy'], label='test_acc', c="b")
plt.legend()
plt.savefig(f"../plots/covid_model_without_vaccine_accuracy_{config.EPOCHS}epochs_{fold}v{config.K_FOLD_SPLITS}fold.png")
clear_plot()
def clear_plot():
plt.close()
plt.cla()
plt.clf()
def plot_confusion_matrix(model, X_test, y_test, fold):
y_pred = model.predict(X_test)
y_pred = y_pred.argmax(axis=-1)
y_test = y_test.argmax(axis=-1)
cm = confusion_matrix(y_test, y_pred)
ax=plt.subplot()
sns.heatmap(cm, annot=True, fmt='g', ax=ax)
# labels, title and ticks
ax.set_xlabel('Predicted labels')
ax.set_ylabel('True labels')
ax.set_title(f'Confusion Matrix – {config.EPOCHS}|{fold}')
ax.xaxis.set_ticklabels(['Negative', 'Positive'])
ax.yaxis.set_ticklabels(['Negative', 'Positive'])
plt.savefig(f"../plots/covid_confusion_{config.EPOCHS}epochs_{fold}v{config.K_FOLD_SPLITS}fold.png")
clear_plot()
# Erstellen eines Tokenizers für das LSTM Modell
def create_tokenizer(df, save_path):
tokenizer = Tokenizer(num_words=config.MAX_NUM_WORDS, filters='!"#$%&()*+,-./:;<=>?@[\]^_`{|}~', lower=True)
words = df.link.values.tolist()
words.extend(df.meta_data.values.tolist())
words.extend(df.title.values.tolist())
words.extend(df.body.values.tolist())
tokenizer.fit_on_texts(words)
save_tokenizer(tokenizer, save_path)
return tokenizer
# Laden und speichern des Tokenizers
def save_tokenizer(tokenizer, filename):
with open(filename, 'wb') as f:
pickle.dump(tokenizer, f, protocol=pickle.HIGHEST_PROTOCOL)
def load_tokenizer(filename):
with open(filename, 'rb') as f:
tokenizer = pickle.load(f)
return tokenizer
"""
Die in Tokens verwandelte Texte sehen so aus:
[[1, 2, 3, 4], [5, 6, 7], [8, 9, 10, 11, 12]]
gepaddet sehen sie so aus:
[[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 3 4]
[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 6 7]
[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 8 9 10 11 12]]
werden danach die Covid Count Zahlen angefügt, sieht die Repräsentation beispielsweise so aus
[[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 3 4 10 20 30]
[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 6 7 40 50 60]
[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 8 9 10 11 12 70 80 90]]
Das np.expand ist notwendig, um das array in beispielsweise folgende Form zu bringen: [ 2 1 20] => [ [2] [1] [20]]
"""
def transform_text(tokenizer, df):
if (isinstance(tokenizer, str)):
tokenizer = load_tokenizer(tokenizer)
# Tokenizing der Link Informationen
X_input = tokenizer.texts_to_sequences(df['link'].values)
X_input = pad_sequences(X_input, maxlen=config.MAX_LINK_SEQUENCE_LENGTH)
# Tokenizing der Meta Informationen
X_meta = tokenizer.texts_to_sequences(df['meta_data'].values)
X_meta = pad_sequences(X_meta, maxlen=config.MAX_META_SEQUENCE_LENGTH)
# Tokenizing der Titel Informationen
X_title = tokenizer.texts_to_sequences(df['title'].values)
X_title = pad_sequences(X_title, maxlen=config.MAX_TITLE_SEQUENCE_LENGTH)
# Tokenizing des Seiteninhalts
X_body = tokenizer.texts_to_sequences(df['body'].values)
X_body = pad_sequences(X_body, maxlen=config.MAX_BODY_SEQUENCE_LENGTH)
covid_word_count = df['covid_word_count'].values
covid_word_count_url = df['covid_word_count_url'].values
restriction_word_count = df['restriction_word_count'].values
restriction_word_count_url = df['restriction_word_count_url'].values
X_input = np.concatenate([X_input, X_meta], axis=-1)
X_input = np.concatenate([X_input, X_title], axis=-1)
X_input = np.concatenate([X_input, X_body], axis=-1)
covid_word_count = np.expand_dims(covid_word_count, axis=(-1))
X_input = np.concatenate([X_input, covid_word_count], axis=-1)
covid_word_count_url = np.expand_dims(covid_word_count_url, axis=(-1))
X_input = np.concatenate([X_input, covid_word_count_url], axis=-1)
restriction_word_count = np.expand_dims(restriction_word_count, axis=(-1))
X_input = np.concatenate([X_input, restriction_word_count], axis=-1)
restriction_word_count_url = np.expand_dims(restriction_word_count_url, axis=(-1))
X_input = np.concatenate([X_input, restriction_word_count_url], axis=-1) # Schlussendlich alles zusammefügen
return X_input
def remove_stopwords(df):
ger = pd.read_csv(config.STOPWORDS_PATH)['stopwords'].values
df['link'] = df['link'].apply(lambda x: ' '.join([word for word in str(x).split() if word not in (ger)]))
df['meta_data'] = df['meta_data'].apply(lambda x: ' '.join([word for word in str(x).split() if word not in (ger)]))
df['title'] = df['title'].apply(lambda x: ' '.join([word for word in str(x).split() if word not in (ger)]))
df['body'] = df['body'].apply(lambda x: ' '.join([word for word in str(x).split() if word not in (ger)]))
return df
# Nimmt den input DataFrame und einen LabelEncoder Objekt,
# trainiert ein LSTM Modell, speichert es, evaluiert es
# und gibt den Loss aus.
def train_model(train_df, valid_df, tokenizer, fold):
X_train = transform_text(tokenizer, train_df)
X_valid = transform_text(tokenizer, valid_df)
Y_train = pd.get_dummies(train_df['label'])
Y_valid = pd.get_dummies(valid_df['label']).to_numpy()
model = Sequential()
optimizer = tf.keras.optimizers.Adam(1e-3) # 0.001
model.add(Embedding(config.MAX_NUM_WORDS, config.EMBEDDING_DIM, input_length=X_train.shape[1]))
model.add(SpatialDropout1D(0.2))
model.add(LSTM(100, dropout=0.2, recurrent_dropout=0.2, bias_regularizer=regularizers.l2(1e-4),)) # TODO: damit rumspielen
model.add(Dense(2, activation='softmax'))
loss='categorical_crossentropy'
model.compile(loss=loss, optimizer=optimizer, metrics=['accuracy'])
epochs = config.EPOCHS
batch_size = config.BATCH_SIZE # 64
#es = EarlyStopping(monitor='val_loss', patience=3, min_delta=0.0001)
history = model.fit(X_train, Y_train, epochs=epochs, batch_size=batch_size, validation_split=0.2) # , callbacks=[es]
accr = model.evaluate(X_valid,Y_valid)
print('Test set\n Loss: {:0.3f}\n Accuracy: {:0.3f}'.format(accr[0],accr[1]))
model.save(f"{config.MODEL_PATH}_without_vaccine_{fold}.h5")
evaluate(model, X_valid, Y_valid)
plot_model(history, fold)
plot_confusion_matrix(model, X_valid, Y_valid, fold)
# Laden und evaluieren eines existierenden Modells
def load_and_evaluate_existing_model(model_path, tokenizer_path, df, le):
model = load_model(model_path)
tokenizer = load_tokenizer(tokenizer_path)
X = transform_text(tokenizer, df['text'].values)
Y = pd.get_dummies(df['label']).values
evaluate(model, X, Y, le)
# Testen eines neuen Beispiels. Hauptsächlich zu Testzwecken während der Entwicklung
# Die Funktion nimmt einen String, den Classifier,
# den Vectorizer und einen LabelEncoder und
# gibt eine Vorhersage zurück.
def test_new_example(model, tokenizer, le, text_input):
X_example = transform_text(tokenizer, [text_input])
label_array = model.predict(X_example)
new_label = np.argmax(label_array, axis=-1)
print(new_label)
print(le.inverse_transform(new_label))
def run(df, fold, use_vaccine):
# der Trainingdataframe
train_df = df[df.kfold != fold].reset_index(drop=True)
print(f"Länge Traing_DF {len(train_df)}")
# Validation Dataframe
valid_df = df[df.kfold == fold].reset_index(drop=True)
print(f"Länge Valid_DF {len(valid_df)}")
# Das Validationset enthält weiterhin die Impf-Beispiele
# Bei 10 Folds sind die Sets folgendermaßen aufgeteil:
# 0 – 126
# 1 – 78
# 2 – 10
if not use_vaccine:
train_df = train_df[train_df['label'] != 2]
# Jetzt müssen alle 2 er noch in einsergewandelt werden
train_df['label'] = train_df['label'].apply(lambda x : 1 if x > 0 else 0)
valid_df['label'] = valid_df['label'].apply(lambda x : 1 if x > 0 else 0)
print("Fitting tokenizer")
# tf.keras Tokenizer
tokenizer = create_tokenizer(train_df, f"{config.TOKENIZER_SAVE_PATH}_{fold}.pickle")
train_model(train_df, valid_df, tokenizer, fold)
# load_and_evaluate_existing_model(f"{config.MODEL_PATH}_{fold}", config.TOKENIZER_PATH, df, le)
#model = load_model(config.MODEL_PATH)
#tokenizer = config.TOKENIZER_PATH
if (__name__ == "__main__"):
tf.get_logger().setLevel('ERROR')
# load data
df = pd.read_csv(config.DATASET_PATH).sample(frac=1)
df = remove_stopwords(df)
"""
# TODO: ein Test, Gleichverteilung
"""
df2 = df[df['label'] != 0]
# Wir nehmen einfach den hinteren Teil des Körpers und den Metadaten
df2['body'] = df2['body'].apply(lambda x : str(x)[config.MAX_BODY_SEQUENCE_LENGTH:])
df2['meta_data'] = df2['meta_data'].apply(lambda x : str(x)[config.MAX_META_SEQUENCE_LENGTH:])
df = df.append(df2, ignore_index=True).reset_index()
# initiate the kfold class from the model_selection module
kf = StratifiedKFold(n_splits=config.K_FOLD_SPLITS)
# füllen den kfold Spalte
for f, (t_, v_) in enumerate(kf.split(X=df, y=df.label.values)):
df.loc[v_, 'kfold'] = f
# training für alle Faltungen
for i in range(config.K_FOLD_SPLITS):
print(f"\n–––––––––––– FOLD {i} ––––––––––––\n")
run(df, fold=i, use_vaccine=config.USE_VACCINE)
| 39.314079
| 126
| 0.697062
|
import config
import pandas as pd
import pickle
import numpy as np
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from sklearn.model_selection import StratifiedKFold
from sklearn.metrics import classification_report
import tensorflow as tf
from keras import Sequential
from tensorflow.keras.layers import Embedding, SpatialDropout1D, LSTM, Dense
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras import regularizers
from keras.models import load_model
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.metrics import confusion_matrix
def evaluate(model, X_test, Y_test):
Y_pred = model.predict(X_test)
Y_pred = Y_pred.argmax(axis=-1)
Y_test = Y_test.argmax(axis=-1)
print(classification_report([Y_test], [Y_pred]))
def plot_model(history, fold):
plt.title('Loss')
plt.plot(history.history['loss'], label='train_loss')
plt.plot(history.history['val_loss'], label='test_loss')
plt.legend()
plt.savefig(f"../plots/covid_model_without_vaccine_loss_{config.EPOCHS}epochs_{fold}v{config.K_FOLD_SPLITS}fold.png")
clear_plot()
plt.title('Accuracy')
plt.plot(history.history['accuracy'], label='train_acc', c="r")
plt.plot(history.history['val_accuracy'], label='test_acc', c="b")
plt.legend()
plt.savefig(f"../plots/covid_model_without_vaccine_accuracy_{config.EPOCHS}epochs_{fold}v{config.K_FOLD_SPLITS}fold.png")
clear_plot()
def clear_plot():
plt.close()
plt.cla()
plt.clf()
def plot_confusion_matrix(model, X_test, y_test, fold):
y_pred = model.predict(X_test)
y_pred = y_pred.argmax(axis=-1)
y_test = y_test.argmax(axis=-1)
cm = confusion_matrix(y_test, y_pred)
ax=plt.subplot()
sns.heatmap(cm, annot=True, fmt='g', ax=ax)
ax.set_xlabel('Predicted labels')
ax.set_ylabel('True labels')
ax.set_title(f'Confusion Matrix – {config.EPOCHS}|{fold}')
ax.xaxis.set_ticklabels(['Negative', 'Positive'])
ax.yaxis.set_ticklabels(['Negative', 'Positive'])
plt.savefig(f"../plots/covid_confusion_{config.EPOCHS}epochs_{fold}v{config.K_FOLD_SPLITS}fold.png")
clear_plot()
def create_tokenizer(df, save_path):
tokenizer = Tokenizer(num_words=config.MAX_NUM_WORDS, filters='!"#$%&()*+,-./:;<=>?@[\]^_`{|}~', lower=True)
words = df.link.values.tolist()
words.extend(df.meta_data.values.tolist())
words.extend(df.title.values.tolist())
words.extend(df.body.values.tolist())
tokenizer.fit_on_texts(words)
save_tokenizer(tokenizer, save_path)
return tokenizer
# Laden und speichern des Tokenizers
def save_tokenizer(tokenizer, filename):
with open(filename, 'wb') as f:
pickle.dump(tokenizer, f, protocol=pickle.HIGHEST_PROTOCOL)
def load_tokenizer(filename):
with open(filename, 'rb') as f:
tokenizer = pickle.load(f)
return tokenizer
def transform_text(tokenizer, df):
if (isinstance(tokenizer, str)):
tokenizer = load_tokenizer(tokenizer)
# Tokenizing der Link Informationen
X_input = tokenizer.texts_to_sequences(df['link'].values)
X_input = pad_sequences(X_input, maxlen=config.MAX_LINK_SEQUENCE_LENGTH)
# Tokenizing der Meta Informationen
X_meta = tokenizer.texts_to_sequences(df['meta_data'].values)
X_meta = pad_sequences(X_meta, maxlen=config.MAX_META_SEQUENCE_LENGTH)
# Tokenizing der Titel Informationen
X_title = tokenizer.texts_to_sequences(df['title'].values)
X_title = pad_sequences(X_title, maxlen=config.MAX_TITLE_SEQUENCE_LENGTH)
# Tokenizing des Seiteninhalts
X_body = tokenizer.texts_to_sequences(df['body'].values)
X_body = pad_sequences(X_body, maxlen=config.MAX_BODY_SEQUENCE_LENGTH)
covid_word_count = df['covid_word_count'].values
covid_word_count_url = df['covid_word_count_url'].values
restriction_word_count = df['restriction_word_count'].values
restriction_word_count_url = df['restriction_word_count_url'].values
X_input = np.concatenate([X_input, X_meta], axis=-1)
X_input = np.concatenate([X_input, X_title], axis=-1)
X_input = np.concatenate([X_input, X_body], axis=-1)
covid_word_count = np.expand_dims(covid_word_count, axis=(-1))
X_input = np.concatenate([X_input, covid_word_count], axis=-1)
covid_word_count_url = np.expand_dims(covid_word_count_url, axis=(-1))
X_input = np.concatenate([X_input, covid_word_count_url], axis=-1)
restriction_word_count = np.expand_dims(restriction_word_count, axis=(-1))
X_input = np.concatenate([X_input, restriction_word_count], axis=-1)
restriction_word_count_url = np.expand_dims(restriction_word_count_url, axis=(-1))
X_input = np.concatenate([X_input, restriction_word_count_url], axis=-1) # Schlussendlich alles zusammefügen
return X_input
def remove_stopwords(df):
ger = pd.read_csv(config.STOPWORDS_PATH)['stopwords'].values
df['link'] = df['link'].apply(lambda x: ' '.join([word for word in str(x).split() if word not in (ger)]))
df['meta_data'] = df['meta_data'].apply(lambda x: ' '.join([word for word in str(x).split() if word not in (ger)]))
df['title'] = df['title'].apply(lambda x: ' '.join([word for word in str(x).split() if word not in (ger)]))
df['body'] = df['body'].apply(lambda x: ' '.join([word for word in str(x).split() if word not in (ger)]))
return df
# Nimmt den input DataFrame und einen LabelEncoder Objekt,
# trainiert ein LSTM Modell, speichert es, evaluiert es
# und gibt den Loss aus.
def train_model(train_df, valid_df, tokenizer, fold):
X_train = transform_text(tokenizer, train_df)
X_valid = transform_text(tokenizer, valid_df)
Y_train = pd.get_dummies(train_df['label'])
Y_valid = pd.get_dummies(valid_df['label']).to_numpy()
model = Sequential()
optimizer = tf.keras.optimizers.Adam(1e-3) # 0.001
model.add(Embedding(config.MAX_NUM_WORDS, config.EMBEDDING_DIM, input_length=X_train.shape[1]))
model.add(SpatialDropout1D(0.2))
model.add(LSTM(100, dropout=0.2, recurrent_dropout=0.2, bias_regularizer=regularizers.l2(1e-4),)) # TODO: damit rumspielen
model.add(Dense(2, activation='softmax'))
loss='categorical_crossentropy'
model.compile(loss=loss, optimizer=optimizer, metrics=['accuracy'])
epochs = config.EPOCHS
batch_size = config.BATCH_SIZE # 64
#es = EarlyStopping(monitor='val_loss', patience=3, min_delta=0.0001)
history = model.fit(X_train, Y_train, epochs=epochs, batch_size=batch_size, validation_split=0.2) # , callbacks=[es]
accr = model.evaluate(X_valid,Y_valid)
print('Test set\n Loss: {:0.3f}\n Accuracy: {:0.3f}'.format(accr[0],accr[1]))
model.save(f"{config.MODEL_PATH}_without_vaccine_{fold}.h5")
evaluate(model, X_valid, Y_valid)
plot_model(history, fold)
plot_confusion_matrix(model, X_valid, Y_valid, fold)
# Laden und evaluieren eines existierenden Modells
def load_and_evaluate_existing_model(model_path, tokenizer_path, df, le):
model = load_model(model_path)
tokenizer = load_tokenizer(tokenizer_path)
X = transform_text(tokenizer, df['text'].values)
Y = pd.get_dummies(df['label']).values
evaluate(model, X, Y, le)
# Testen eines neuen Beispiels. Hauptsächlich zu Testzwecken während der Entwicklung
# Die Funktion nimmt einen String, den Classifier,
# den Vectorizer und einen LabelEncoder und
# gibt eine Vorhersage zurück.
def test_new_example(model, tokenizer, le, text_input):
X_example = transform_text(tokenizer, [text_input])
label_array = model.predict(X_example)
new_label = np.argmax(label_array, axis=-1)
print(new_label)
print(le.inverse_transform(new_label))
def run(df, fold, use_vaccine):
# der Trainingdataframe
train_df = df[df.kfold != fold].reset_index(drop=True)
print(f"Länge Traing_DF {len(train_df)}")
# Validation Dataframe
valid_df = df[df.kfold == fold].reset_index(drop=True)
print(f"Länge Valid_DF {len(valid_df)}")
# Das Validationset enthält weiterhin die Impf-Beispiele
# Bei 10 Folds sind die Sets folgendermaßen aufgeteil:
# 0 – 126
# 1 – 78
# 2 – 10
if not use_vaccine:
train_df = train_df[train_df['label'] != 2]
# Jetzt müssen alle 2 er noch in einsergewandelt werden
train_df['label'] = train_df['label'].apply(lambda x : 1 if x > 0 else 0)
valid_df['label'] = valid_df['label'].apply(lambda x : 1 if x > 0 else 0)
print("Fitting tokenizer")
# tf.keras Tokenizer
tokenizer = create_tokenizer(train_df, f"{config.TOKENIZER_SAVE_PATH}_{fold}.pickle")
train_model(train_df, valid_df, tokenizer, fold)
# load_and_evaluate_existing_model(f"{config.MODEL_PATH}_{fold}", config.TOKENIZER_PATH, df, le)
#model = load_model(config.MODEL_PATH)
#tokenizer = config.TOKENIZER_PATH
if (__name__ == "__main__"):
tf.get_logger().setLevel('ERROR')
# load data
df = pd.read_csv(config.DATASET_PATH).sample(frac=1)
df = remove_stopwords(df)
df2 = df[df['label'] != 0]
# Wir nehmen einfach den hinteren Teil des Körpers und den Metadaten
df2['body'] = df2['body'].apply(lambda x : str(x)[config.MAX_BODY_SEQUENCE_LENGTH:])
df2['meta_data'] = df2['meta_data'].apply(lambda x : str(x)[config.MAX_META_SEQUENCE_LENGTH:])
df = df.append(df2, ignore_index=True).reset_index()
# initiate the kfold class from the model_selection module
kf = StratifiedKFold(n_splits=config.K_FOLD_SPLITS)
# füllen den kfold Spalte
for f, (t_, v_) in enumerate(kf.split(X=df, y=df.label.values)):
df.loc[v_, 'kfold'] = f
# training für alle Faltungen
for i in range(config.K_FOLD_SPLITS):
print(f"\n–––––––––––– FOLD {i} ––––––––––––\n")
run(df, fold=i, use_vaccine=config.USE_VACCINE)
| true
| true
|
790662547c95ddbae1a51a96718eeecea12c6c67
| 334
|
py
|
Python
|
students/K33401/Kunal_Shubham/lab3/hotel_project/hotel_app/router.py
|
ShubhamKunal/ITMO_ICT_WebDevelopment_2020-2021
|
bb91c91a56d21cec2b12ae4cc722eaa652a88420
|
[
"MIT"
] | 4
|
2020-09-03T15:41:42.000Z
|
2021-12-24T15:28:20.000Z
|
students/K33401/Kunal_Shubham/lab3/hotel_project/hotel_app/router.py
|
ShubhamKunal/ITMO_ICT_WebDevelopment_2020-2021
|
bb91c91a56d21cec2b12ae4cc722eaa652a88420
|
[
"MIT"
] | 48
|
2020-09-13T20:22:42.000Z
|
2021-04-30T11:13:30.000Z
|
students/K33401/Kunal_Shubham/lab3/hotel_project/hotel_app/router.py
|
ShubhamKunal/ITMO_ICT_WebDevelopment_2020-2021
|
bb91c91a56d21cec2b12ae4cc722eaa652a88420
|
[
"MIT"
] | 69
|
2020-09-06T10:32:37.000Z
|
2021-11-28T18:13:17.000Z
|
from hotel_app.views import *
from rest_framework import routers
router = routers.DefaultRouter()
router.register(r'rooms', RoomAPIView)
router.register(r'employee', EmployeeAPIView)
router.register(r'resident', ResidentAPIView)
router.register(r'booking', BookingRecordAPIView)
router.register(r'cleaning', CleaningScheduleAPIView)
| 33.4
| 53
| 0.826347
|
from hotel_app.views import *
from rest_framework import routers
router = routers.DefaultRouter()
router.register(r'rooms', RoomAPIView)
router.register(r'employee', EmployeeAPIView)
router.register(r'resident', ResidentAPIView)
router.register(r'booking', BookingRecordAPIView)
router.register(r'cleaning', CleaningScheduleAPIView)
| true
| true
|
790662d12902dbc2c3c011234e157734ef8e4ddf
| 3,307
|
py
|
Python
|
Spy-Game/code.py
|
umeshpal93/ga-learner-dsb-repo
|
c8da26325e65f62955b72b6b4449a5cac7ac00d3
|
[
"MIT"
] | null | null | null |
Spy-Game/code.py
|
umeshpal93/ga-learner-dsb-repo
|
c8da26325e65f62955b72b6b4449a5cac7ac00d3
|
[
"MIT"
] | null | null | null |
Spy-Game/code.py
|
umeshpal93/ga-learner-dsb-repo
|
c8da26325e65f62955b72b6b4449a5cac7ac00d3
|
[
"MIT"
] | null | null | null |
# --------------
##File path for the file
file_path
def read_file(path):
file = open(file_path , 'r')
sentence = file.readline()
file.close()
return sentence
sample_message = read_file(file_path)
print(sample_message)
#Code starts here
# --------------
#Code starts here
file_path_1
file_path_2
def read_file(path):
file = open(file_path_1 , 'r')
sentence = file.readline()
file.close()
return str(sentence)
message_1 = read_file(file_path_1)
print(message_1)
def read_file(path):
file = open(file_path_2 , 'r')
sentence = file.readline()
file.close()
return str(sentence)
message_2 = read_file(file_path_2)
print(message_2)
def fuse_msg(message_a , message_b):
quotient = int(message_b)//int(message_a)
return str(quotient)
secret_msg_1 = fuse_msg(message_1 , message_2)
print(secret_msg_1)
# --------------
#Code starts here
file_path_3
def read_file(path):
file = open(file_path_3 , 'r')
sentence = file.readline()
file.close()
return str(sentence)
message_3 = read_file(file_path_3)
print(message_3)
def substitute_msg(message_c):
if message_c == 'Red':
sub = 'Army General'
if message_c == 'Green':
sub = 'Data Scientist'
if message_c == 'Blue' :
sub = 'Marine Biologist'
return sub
secret_msg_2 = substitute_msg(message_3)
print(secret_msg_2)
# --------------
# File path for message 4 and message 5
file_path_4
file_path_5
#Code starts here
def read_file(path):
file = open(file_path_4 , 'r')
sentence = file.readline()
file.close()
return sentence
message_4 = read_file(file_path_4)
print(message_4)
def read_file(path):
file = open(file_path_5 , 'r')
sentence = file.readline()
file.close()
return sentence
message_5 = read_file(file_path_5)
print(message_5)
def compare_msg(message_d , message_e):
a_list = message_d.split()
b_list = message_e.split()
c_list = [x for x in a_list if x not in b_list]
final_msg = " ".join(c_list)
return final_msg
secret_msg_3 = compare_msg(message_4 , message_5)
print(secret_msg_3)
# --------------
#Code starts here
file_path_6
def read_file(path):
file = open(file_path_6 , 'r')
sentence = file.readline()
file.close()
return sentence
message_6 = read_file(file_path)
print(message_6)
def extract_msg(message_f):
a_list = message_f.split()
even_word = lambda x : (len(x) % 2 == 0)
b_list = filter(even_word , a_list)
final_msg = " ".join(b_list)
return final_msg
secret_msg_4 = extract_msg(message_6)
print(secret_msg_4)
# --------------
#Secret message parts in the correct order
message_parts=[secret_msg_3, secret_msg_1, secret_msg_4, secret_msg_2]
final_path= user_data_dir + '/secret_message.txt'
#Code starts here
secret_msg = secret_msg_3 + ' '+ secret_msg_1 + ' ' + secret_msg_4 + ' '+ secret_msg_2
def write_file(secret_msg , path):
file = open(final_path , 'a+')
sentence = file.write(secret_msg)
file.close()
return sentence
sample_message = write_file(secret_msg , final_path)
print(sample_message)
| 19.226744
| 87
| 0.638645
|
_file(path):
file = open(file_path , 'r')
sentence = file.readline()
file.close()
return sentence
sample_message = read_file(file_path)
print(sample_message)
file_path_1
file_path_2
def read_file(path):
file = open(file_path_1 , 'r')
sentence = file.readline()
file.close()
return str(sentence)
message_1 = read_file(file_path_1)
print(message_1)
def read_file(path):
file = open(file_path_2 , 'r')
sentence = file.readline()
file.close()
return str(sentence)
message_2 = read_file(file_path_2)
print(message_2)
def fuse_msg(message_a , message_b):
quotient = int(message_b)//int(message_a)
return str(quotient)
secret_msg_1 = fuse_msg(message_1 , message_2)
print(secret_msg_1)
file_path_3
def read_file(path):
file = open(file_path_3 , 'r')
sentence = file.readline()
file.close()
return str(sentence)
message_3 = read_file(file_path_3)
print(message_3)
def substitute_msg(message_c):
if message_c == 'Red':
sub = 'Army General'
if message_c == 'Green':
sub = 'Data Scientist'
if message_c == 'Blue' :
sub = 'Marine Biologist'
return sub
secret_msg_2 = substitute_msg(message_3)
print(secret_msg_2)
file_path_4
file_path_5
def read_file(path):
file = open(file_path_4 , 'r')
sentence = file.readline()
file.close()
return sentence
message_4 = read_file(file_path_4)
print(message_4)
def read_file(path):
file = open(file_path_5 , 'r')
sentence = file.readline()
file.close()
return sentence
message_5 = read_file(file_path_5)
print(message_5)
def compare_msg(message_d , message_e):
a_list = message_d.split()
b_list = message_e.split()
c_list = [x for x in a_list if x not in b_list]
final_msg = " ".join(c_list)
return final_msg
secret_msg_3 = compare_msg(message_4 , message_5)
print(secret_msg_3)
file_path_6
def read_file(path):
file = open(file_path_6 , 'r')
sentence = file.readline()
file.close()
return sentence
message_6 = read_file(file_path)
print(message_6)
def extract_msg(message_f):
a_list = message_f.split()
even_word = lambda x : (len(x) % 2 == 0)
b_list = filter(even_word , a_list)
final_msg = " ".join(b_list)
return final_msg
secret_msg_4 = extract_msg(message_6)
print(secret_msg_4)
message_parts=[secret_msg_3, secret_msg_1, secret_msg_4, secret_msg_2]
final_path= user_data_dir + '/secret_message.txt'
secret_msg = secret_msg_3 + ' '+ secret_msg_1 + ' ' + secret_msg_4 + ' '+ secret_msg_2
def write_file(secret_msg , path):
file = open(final_path , 'a+')
sentence = file.write(secret_msg)
file.close()
return sentence
sample_message = write_file(secret_msg , final_path)
print(sample_message)
| true
| true
|
790663da37099808d35615fd047642ed91a5fd01
| 6,800
|
py
|
Python
|
benchmark/IPLoM_agreement.py
|
dhetong/LogSampleTest
|
7ae4cffd43ba6c90f4a5cf164eb44072ece5f08d
|
[
"MIT"
] | 1
|
2022-02-12T15:09:15.000Z
|
2022-02-12T15:09:15.000Z
|
benchmark/IPLoM_agreement.py
|
dhetong/LogSampleTest
|
7ae4cffd43ba6c90f4a5cf164eb44072ece5f08d
|
[
"MIT"
] | null | null | null |
benchmark/IPLoM_agreement.py
|
dhetong/LogSampleTest
|
7ae4cffd43ba6c90f4a5cf164eb44072ece5f08d
|
[
"MIT"
] | null | null | null |
import sys
sys.path.append('../')
from logparser import IPLoM, evaluator
import os
import pandas as pd
CT = [0.25, 0.3, 0.4, 0.4, 0.35, 0.58, 0.3, 0.3, 0.9, 0.78, 0.35, 0.3, 0.4]
lb = [0.3, 0.4, 0.01, 0.2, 0.25, 0.25, 0.3, 0.25, 0.25, 0.25, 0.3, 0.2, 0.7]
n_para = 13
benchmark_settings = {
'HDFS': {
'log_file': 'HDFS/HDFS_2k.log',
'log_format': '<Date> <Time> <Pid> <Level> <Component>: <Content>',
'regex': [r'blk_-?\d+', r'(\d+\.){3}\d+(:\d+)?'],
'st': 0.5,
'depth': 4
},
'Hadoop': {
'log_file': 'Hadoop/Hadoop_2k.log',
'log_format': '<Date> <Time> <Level> \[<Process>\] <Component>: <Content>',
'regex': [r'(\d+\.){3}\d+'],
'st': 0.5,
'depth': 4
},
'Spark': {
'log_file': 'Spark/Spark_2k.log',
'log_format': '<Date> <Time> <Level> <Component>: <Content>',
'regex': [r'(\d+\.){3}\d+', r'\b[KGTM]?B\b', r'([\w-]+\.){2,}[\w-]+'],
'st': 0.5,
'depth': 4
},
'Zookeeper': {
'log_file': 'Zookeeper/Zookeeper_2k.log',
'log_format': '<Date> <Time> - <Level> \[<Node>:<Component>@<Id>\] - <Content>',
'regex': [r'(/|)(\d+\.){3}\d+(:\d+)?'],
'st': 0.5,
'depth': 4
},
'BGL': {
'log_file': 'BGL/BGL_2k.log',
'log_format': '<Label> <Timestamp> <Date> <Node> <Time> <NodeRepeat> <Type> <Component> <Level> <Content>',
'regex': [r'core\.\d+'],
'st': 0.5,
'depth': 4
},
'HPC': {
'log_file': 'HPC/HPC_2k.log',
'log_format': '<LogId> <Node> <Component> <State> <Time> <Flag> <Content>',
'regex': [r'=\d+'],
'st': 0.5,
'depth': 4
},
'Thunderbird': {
'log_file': 'Thunderbird/Thunderbird_2k.log',
'log_format': '<Label> <Timestamp> <Date> <User> <Month> <Day> <Time> <Location> <Component>(\[<PID>\])?: <Content>',
'regex': [r'(\d+\.){3}\d+'],
'st': 0.5,
'depth': 4
},
'Windows': {
'log_file': 'Windows/Windows_2k.log',
'log_format': '<Date> <Time>, <Level> <Component> <Content>',
'regex': [r'0x.*?\s'],
'st': 0.7,
'depth': 5
},
'Linux': {
'log_file': 'Linux/Linux_2k.log',
'log_format': '<Month> <Date> <Time> <Level> <Component>(\[<PID>\])?: <Content>',
'regex': [r'(\d+\.){3}\d+', r'\d{2}:\d{2}:\d{2}'],
'st': 0.39,
'depth': 6
},
'Andriod': {
'log_file': 'Andriod/Andriod_2k.log',
'log_format': '<Date> <Time> <Pid> <Tid> <Level> <Component>: <Content>',
'regex': [r'(/[\w-]+)+', r'([\w-]+\.){2,}[\w-]+', r'\b(\-?\+?\d+)\b|\b0[Xx][a-fA-F\d]+\b|\b[a-fA-F\d]{4,}\b'],
'st': 0.2,
'depth': 6
},
'HealthApp': {
'log_file': 'HealthApp/HealthApp_2k.log',
'log_format': '<Time>\|<Component>\|<Pid>\|<Content>',
'regex': [],
'st': 0.2,
'depth': 4
},
'Apache': {
'log_file': 'Apache/Apache_2k.log',
'log_format': '\[<Time>\] \[<Level>\] <Content>',
'regex': [r'(\d+\.){3}\d+'],
'st': 0.5,
'depth': 4
},
'Proxifier': {
'log_file': 'Proxifier/Proxifier_2k.log',
'log_format': '\[<Time>\] <Program> - <Content>',
'regex': [r'<\d+\ssec', r'([\w-]+\.)+[\w-]+(:\d+)?', r'\d{2}:\d{2}(:\d{2})*', r'[KGTM]B'],
'st': 0.6,
'depth': 3
},
'OpenSSH': {
'log_file': 'OpenSSH/OpenSSH_2k.log',
'log_format': '<Date> <Day> <Time> <Component> sshd\[<Pid>\]: <Content>',
'regex': [r'(\d+\.){3}\d+', r'([\w-]+\.){2,}[\w-]+'],
'st': 0.6,
'depth': 5
},
'OpenStack': {
'log_file': 'OpenStack/OpenStack_2k.log',
'log_format': '<Logrecord> <Date> <Time> <Pid> <Level> <Component> \[<ADDR>\] <Content>',
'regex': [r'((\d+\.){3}\d+,?)+', r'/.+?\s', r'\d+'],
'st': 0.5,
'depth': 5
},
'Mac': {
'log_file': 'Mac/Mac_2k.log',
'log_format': '<Month> <Date> <Time> <User> <Component>\[<PID>\]( \(<Address>\))?: <Content>',
'regex': [r'([\w-]+\.){2,}[\w-]+'],
'st': 0.7,
'depth': 6
},
}
input_dir = '../../AgreementData/'
output_dir_1 = 'result/file1'
output_dir_2 = 'result/file2'
HDFS_dir = 'HDFS/'
Hadoop_dir = 'Hadoop/'
Spark_dir = 'Spark/'
Zookeeper_dir = 'Zookeeper/'
BGL_dir = 'BGL/'
HPC_dir = 'HPC/'
Thunderbird_dir = 'Thunderbird/'
Windows_dir = 'Windows/'
Linux_dir = 'Linux/'
Android_dir = 'Android/'
Apache_dir = 'Apache/'
OpenSSH_dir = 'OpenSSH/'
OpenStack_dir = 'OpenStack/'
Mac_dir = 'Mac/'
HealthApp_dir = 'HealthApp/'
Proxifier_dir = 'Proxifier/'
HDFS_file = 'HDFS.log'
Hadoop_file = 'Hadoop.log'
Spark_file = 'Spark.log'
Zookeeper_file = 'Zookeeper.log'
BGL_file = 'BGL.log'
HPC_file = 'HPC.log'
Thunderbird_file = 'Thunderbird.log'
Windows_file = 'Windows.log'
Linux_file = 'Linux.log'
Android_file = 'Android.log'
Apache_file = 'Apache.log'
OpenSSH_file = 'SSH.log'
OpenStack_file = 'OpenStack.log'
Mac_file = 'Mac.log'
HealthApp_file = 'HealthApp.log'
Proxifier_file = 'Proxifier.log'
Android_num = 10
Apache_num = 10
BGL_num = 10
Hadoop_num = 10
HDFS_num = 10
HealthApp_num = 10
HPC_num = 10
Linux_num = 3
Mac_num = 10
OpenSSH_num = 10
OpenStack_num = 1
Proxifier_num = 2
Spark_num = 10
Thunderbird_num = 10
Windows_num = 10
Zookeeper_num = 10
setting = benchmark_settings['BGL']
agreement_result = []
for index in range(0,BGL_num,1):
logfile_1 = BGL_file + '.part' + str(index)
logfile_2 = BGL_file + '.part' + str(index+1)
indir = input_dir + BGL_dir
print(logfile_1)
print(logfile_2)
for para_index in range(0,n_para-1,1):
para_info = str(CT[para_index]) + ',' + str(lb[para_index])
print(para_info)
parser_1 = IPLoM.LogParser(log_format=setting['log_format'], indir=indir, outdir=output_dir_1,
CT=CT[para_index], lowerBound=lb[para_index], rex=setting['regex'])
parser_2 = IPLoM.LogParser(log_format=setting['log_format'], indir=indir, outdir=output_dir_2,
CT=CT[para_index], lowerBound=lb[para_index], rex=setting['regex'])
parser_1.parse(logfile_1)
parser_2.parse(logfile_2)
agreement = evaluator.evaluate_agreement(
os.path.join(output_dir_1, logfile_1 + '_structured.csv'),
os.path.join(output_dir_2, logfile_2 + '_structured.csv'))
ratio = float(float(agreement)/5000.0)
agreement_result.append([logfile_1,logfile_2,para_info,ratio])
df_result = pd.DataFrame(agreement_result, columns=['File1', 'File2', 'Para', 'Agreement'])
print(df_result)
df_result.to_csv('IPLoM_agreement_BGL.csv')
| 30.222222
| 125
| 0.524118
|
import sys
sys.path.append('../')
from logparser import IPLoM, evaluator
import os
import pandas as pd
CT = [0.25, 0.3, 0.4, 0.4, 0.35, 0.58, 0.3, 0.3, 0.9, 0.78, 0.35, 0.3, 0.4]
lb = [0.3, 0.4, 0.01, 0.2, 0.25, 0.25, 0.3, 0.25, 0.25, 0.25, 0.3, 0.2, 0.7]
n_para = 13
benchmark_settings = {
'HDFS': {
'log_file': 'HDFS/HDFS_2k.log',
'log_format': '<Date> <Time> <Pid> <Level> <Component>: <Content>',
'regex': [r'blk_-?\d+', r'(\d+\.){3}\d+(:\d+)?'],
'st': 0.5,
'depth': 4
},
'Hadoop': {
'log_file': 'Hadoop/Hadoop_2k.log',
'log_format': '<Date> <Time> <Level> \[<Process>\] <Component>: <Content>',
'regex': [r'(\d+\.){3}\d+'],
'st': 0.5,
'depth': 4
},
'Spark': {
'log_file': 'Spark/Spark_2k.log',
'log_format': '<Date> <Time> <Level> <Component>: <Content>',
'regex': [r'(\d+\.){3}\d+', r'\b[KGTM]?B\b', r'([\w-]+\.){2,}[\w-]+'],
'st': 0.5,
'depth': 4
},
'Zookeeper': {
'log_file': 'Zookeeper/Zookeeper_2k.log',
'log_format': '<Date> <Time> - <Level> \[<Node>:<Component>@<Id>\] - <Content>',
'regex': [r'(/|)(\d+\.){3}\d+(:\d+)?'],
'st': 0.5,
'depth': 4
},
'BGL': {
'log_file': 'BGL/BGL_2k.log',
'log_format': '<Label> <Timestamp> <Date> <Node> <Time> <NodeRepeat> <Type> <Component> <Level> <Content>',
'regex': [r'core\.\d+'],
'st': 0.5,
'depth': 4
},
'HPC': {
'log_file': 'HPC/HPC_2k.log',
'log_format': '<LogId> <Node> <Component> <State> <Time> <Flag> <Content>',
'regex': [r'=\d+'],
'st': 0.5,
'depth': 4
},
'Thunderbird': {
'log_file': 'Thunderbird/Thunderbird_2k.log',
'log_format': '<Label> <Timestamp> <Date> <User> <Month> <Day> <Time> <Location> <Component>(\[<PID>\])?: <Content>',
'regex': [r'(\d+\.){3}\d+'],
'st': 0.5,
'depth': 4
},
'Windows': {
'log_file': 'Windows/Windows_2k.log',
'log_format': '<Date> <Time>, <Level> <Component> <Content>',
'regex': [r'0x.*?\s'],
'st': 0.7,
'depth': 5
},
'Linux': {
'log_file': 'Linux/Linux_2k.log',
'log_format': '<Month> <Date> <Time> <Level> <Component>(\[<PID>\])?: <Content>',
'regex': [r'(\d+\.){3}\d+', r'\d{2}:\d{2}:\d{2}'],
'st': 0.39,
'depth': 6
},
'Andriod': {
'log_file': 'Andriod/Andriod_2k.log',
'log_format': '<Date> <Time> <Pid> <Tid> <Level> <Component>: <Content>',
'regex': [r'(/[\w-]+)+', r'([\w-]+\.){2,}[\w-]+', r'\b(\-?\+?\d+)\b|\b0[Xx][a-fA-F\d]+\b|\b[a-fA-F\d]{4,}\b'],
'st': 0.2,
'depth': 6
},
'HealthApp': {
'log_file': 'HealthApp/HealthApp_2k.log',
'log_format': '<Time>\|<Component>\|<Pid>\|<Content>',
'regex': [],
'st': 0.2,
'depth': 4
},
'Apache': {
'log_file': 'Apache/Apache_2k.log',
'log_format': '\[<Time>\] \[<Level>\] <Content>',
'regex': [r'(\d+\.){3}\d+'],
'st': 0.5,
'depth': 4
},
'Proxifier': {
'log_file': 'Proxifier/Proxifier_2k.log',
'log_format': '\[<Time>\] <Program> - <Content>',
'regex': [r'<\d+\ssec', r'([\w-]+\.)+[\w-]+(:\d+)?', r'\d{2}:\d{2}(:\d{2})*', r'[KGTM]B'],
'st': 0.6,
'depth': 3
},
'OpenSSH': {
'log_file': 'OpenSSH/OpenSSH_2k.log',
'log_format': '<Date> <Day> <Time> <Component> sshd\[<Pid>\]: <Content>',
'regex': [r'(\d+\.){3}\d+', r'([\w-]+\.){2,}[\w-]+'],
'st': 0.6,
'depth': 5
},
'OpenStack': {
'log_file': 'OpenStack/OpenStack_2k.log',
'log_format': '<Logrecord> <Date> <Time> <Pid> <Level> <Component> \[<ADDR>\] <Content>',
'regex': [r'((\d+\.){3}\d+,?)+', r'/.+?\s', r'\d+'],
'st': 0.5,
'depth': 5
},
'Mac': {
'log_file': 'Mac/Mac_2k.log',
'log_format': '<Month> <Date> <Time> <User> <Component>\[<PID>\]( \(<Address>\))?: <Content>',
'regex': [r'([\w-]+\.){2,}[\w-]+'],
'st': 0.7,
'depth': 6
},
}
input_dir = '../../AgreementData/'
output_dir_1 = 'result/file1'
output_dir_2 = 'result/file2'
HDFS_dir = 'HDFS/'
Hadoop_dir = 'Hadoop/'
Spark_dir = 'Spark/'
Zookeeper_dir = 'Zookeeper/'
BGL_dir = 'BGL/'
HPC_dir = 'HPC/'
Thunderbird_dir = 'Thunderbird/'
Windows_dir = 'Windows/'
Linux_dir = 'Linux/'
Android_dir = 'Android/'
Apache_dir = 'Apache/'
OpenSSH_dir = 'OpenSSH/'
OpenStack_dir = 'OpenStack/'
Mac_dir = 'Mac/'
HealthApp_dir = 'HealthApp/'
Proxifier_dir = 'Proxifier/'
HDFS_file = 'HDFS.log'
Hadoop_file = 'Hadoop.log'
Spark_file = 'Spark.log'
Zookeeper_file = 'Zookeeper.log'
BGL_file = 'BGL.log'
HPC_file = 'HPC.log'
Thunderbird_file = 'Thunderbird.log'
Windows_file = 'Windows.log'
Linux_file = 'Linux.log'
Android_file = 'Android.log'
Apache_file = 'Apache.log'
OpenSSH_file = 'SSH.log'
OpenStack_file = 'OpenStack.log'
Mac_file = 'Mac.log'
HealthApp_file = 'HealthApp.log'
Proxifier_file = 'Proxifier.log'
Android_num = 10
Apache_num = 10
BGL_num = 10
Hadoop_num = 10
HDFS_num = 10
HealthApp_num = 10
HPC_num = 10
Linux_num = 3
Mac_num = 10
OpenSSH_num = 10
OpenStack_num = 1
Proxifier_num = 2
Spark_num = 10
Thunderbird_num = 10
Windows_num = 10
Zookeeper_num = 10
setting = benchmark_settings['BGL']
agreement_result = []
for index in range(0,BGL_num,1):
logfile_1 = BGL_file + '.part' + str(index)
logfile_2 = BGL_file + '.part' + str(index+1)
indir = input_dir + BGL_dir
print(logfile_1)
print(logfile_2)
for para_index in range(0,n_para-1,1):
para_info = str(CT[para_index]) + ',' + str(lb[para_index])
print(para_info)
parser_1 = IPLoM.LogParser(log_format=setting['log_format'], indir=indir, outdir=output_dir_1,
CT=CT[para_index], lowerBound=lb[para_index], rex=setting['regex'])
parser_2 = IPLoM.LogParser(log_format=setting['log_format'], indir=indir, outdir=output_dir_2,
CT=CT[para_index], lowerBound=lb[para_index], rex=setting['regex'])
parser_1.parse(logfile_1)
parser_2.parse(logfile_2)
agreement = evaluator.evaluate_agreement(
os.path.join(output_dir_1, logfile_1 + '_structured.csv'),
os.path.join(output_dir_2, logfile_2 + '_structured.csv'))
ratio = float(float(agreement)/5000.0)
agreement_result.append([logfile_1,logfile_2,para_info,ratio])
df_result = pd.DataFrame(agreement_result, columns=['File1', 'File2', 'Para', 'Agreement'])
print(df_result)
df_result.to_csv('IPLoM_agreement_BGL.csv')
| true
| true
|
790663f05c783a503c06df853519c767591861fd
| 336
|
py
|
Python
|
blueapps/account/urls.py
|
wangzishuo111/bk_prometheus
|
c6aa16d8a547a3d00fbca317f6846ad35b1297ea
|
[
"MIT"
] | null | null | null |
blueapps/account/urls.py
|
wangzishuo111/bk_prometheus
|
c6aa16d8a547a3d00fbca317f6846ad35b1297ea
|
[
"MIT"
] | 2
|
2021-02-08T20:48:38.000Z
|
2021-06-10T23:03:39.000Z
|
blueapps/account/urls.py
|
wangzishuo111/bk_prometheus
|
c6aa16d8a547a3d00fbca317f6846ad35b1297ea
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.conf.urls import url
from blueapps.account import views
app_name = 'account'
urlpatterns = [
url(r'^login_success/$', views.login_success, name="login_success"),
url(r'^login_page/$', views.login_page, name="login_page"),
url(r'^send_code/$', views.send_code_view, name="send_code")
]
| 25.846154
| 72
| 0.693452
|
from django.conf.urls import url
from blueapps.account import views
app_name = 'account'
urlpatterns = [
url(r'^login_success/$', views.login_success, name="login_success"),
url(r'^login_page/$', views.login_page, name="login_page"),
url(r'^send_code/$', views.send_code_view, name="send_code")
]
| true
| true
|
7906640c2dbfc6ab93f04849e869d3e7a0e97285
| 22,035
|
py
|
Python
|
pyscf/tools/Molpro2Pyscf/wmme.py
|
mfkasim1/pyscf
|
7be5e015b2b40181755c71d888449db936604660
|
[
"Apache-2.0"
] | 1
|
2021-01-24T13:35:42.000Z
|
2021-01-24T13:35:42.000Z
|
pyscf/tools/Molpro2Pyscf/wmme.py
|
mfkasim1/pyscf
|
7be5e015b2b40181755c71d888449db936604660
|
[
"Apache-2.0"
] | 36
|
2018-08-22T19:44:03.000Z
|
2020-05-09T10:02:36.000Z
|
pyscf/tools/Molpro2Pyscf/wmme.py
|
mfkasim1/pyscf
|
7be5e015b2b40181755c71d888449db936604660
|
[
"Apache-2.0"
] | 4
|
2018-02-14T16:28:28.000Z
|
2019-08-12T16:40:30.000Z
|
# TODO: By PySCF-1.5 release
# Copyright 2014-2020 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# 1. code style
# * Indent: 3 -> 4
# * Constant should be all uppercase
# * Function/method should be all lowercase
# * Line wrap around 80 columns
# * Use either double quote or single quote, not mix
#
# 2. Conventions required by PySCF
# * Use PYSCF_TMPDIR to replace _TmpDir
#
# 3. Use proper functions provided by PySCF
#
# This file is adapted with permission from the wmme program of Gerald Knizia.
# See http://sites.psu.edu/knizia/software/
#====================================================
from __future__ import print_function
import numpy as np
from numpy import dot, array
from os import path
from sys import version_info
def GetModulePath():
# (hopefully) return the path of the .py file.
# idea is to leave wmme.py in the same directory as the wmme executable,
# and import invoke the scripts using it via, for example,
# PYTHONPATH=$HOME/dev/wmme:$PYTHONPATH python myscriptfile.py
import inspect
return path.dirname(path.abspath(inspect.getfile(inspect.currentframe())))
if 0:
# set executable/basis library directory explicitly.
_WmmeDir = "/home/cgk/dev/wmme"
else:
# set executable/basis library from path of wmme.py
_WmmeDir = None
_TmpDir = None # if None: use operating system default
_BasisLibDir = None # if None: same as _WmmeDir/bases
#ToAng = 0.5291772108
ToAng = 0.529177209 # molpro default.
def ElementNameDummy():
ElementNames = "X H He Li Be B C N O F Ne Na Mg Al Si P S Cl Ar K Ca Sc Ti V Cr Mn Fe Co Ni Cu Zn Ga Ge As Se Br Kr Rb Sr Y Zr Nb Mo Tc Ru Rh Pd Ag Cd In Sn Sb Te I Xe Cs Ba La Ce Pr Nd Pm Sm Eu Gd Tb Dy Ho Er Tm Yb Lu Hf Ta W Re Os Ir Pt Au Hg Tl Pb Bi Po At Rn".split()
ElementNumbers = dict([(o,i) for (i,o) in enumerate(ElementNames)])
return ElementNames, ElementNumbers
ElementNames, ElementNumbers = ElementNameDummy()
def mdot(*args):
"""chained matrix product: mdot(A,B,C,..) = A*B*C*...
No attempt is made to optimize the contraction order."""
r = args[0]
for a in args[1:]:
r = dot(r,a)
return r
def dot2(A,B): return dot(A.flatten(),B.flatten())
def nCartY(l):
return ((l+1)*(l+2))/2
class FAtom(object):
def __init__(self, Element, Position, Index):
self.Element = Element
self.Pos = Position
self.Index = Index
@property
def Label(self):
# return element and center index combined.
return "%2s%3s"%(self.Element,1 + self.Index)
@property
def iElement(self):
return ElementNumbers[self.Element]
def __str__(self):
return "%s (%6.3f,%6.3f,%6.3f)"%(self.Label, self.Pos[0], self.Pos[1], self.Pos[2])
class FAtomSet(object):
def __init__(self, Positions, Elements, Orientations=None, Name=None):
"""Positions: 3 x nAtom matrix. Given in atomic units (ABohr).
Elements: element name (e.g., H) for each of the positions.
Orientations: If given, a [3,3,N] array encoding the standard
orientation of the given atoms (for replicating potentials!). For
each atom there is a orthogonal 3x3 matrix denoting the ex,ey,ez
directions."""
self.Pos = Positions
assert(self.Pos.shape[0] == 3 and self.Pos.shape[1] == len(Elements))
self.Elements = Elements
self.Orientations = Orientations
self.Name = Name
def MakeXyz(self,NumFmt = "%15.8f",Scale=1.):
Lines = []
for i in range(len(self.Elements)):
Lines.append(" %5s {0} {0} {0}".format(NumFmt) % (\
self.Elements[i], Scale*self.Pos[0,i], Scale*self.Pos[1,i], Scale*self.Pos[2,i]))
return "\n".join(Lines)
def nElecNeutral(self):
"""return number of electrons present in the total system if neutral."""
return sum([ElementNumbers[o] for o in self.Elements])
def fCoreRepulsion1(self, iAt, jAt):
if iAt == jAt: return 0. # <- a core doesn't repulse itself.
ChA, ChB = [ElementNumbers[self.Elements[o]] for o in [iAt, jAt]]
return ChA * ChB / np.sum((self.Pos[:,iAt] - self.Pos[:,jAt])**2)**.5
def fCoreRepulsion(self):
N = len(self.Elements)
Charges = array([ElementNumbers[o] for o in self.Elements])
fCoreEnergy = 0
for i in range(N):
for j in range(i):
fCoreEnergy += self.fCoreRepulsion1(i,j)
#fCoreEnergy += Charges[i] * Charges[j] / np.sum((self.Pos[:,i] - self.Pos[:,j])**2)**.5
return fCoreEnergy
def __str__(self):
Caption = " %5s%15s %15s %15s" % ("ATOM", "POS/X", "POS/Y", "POS/Z")
return Caption + "\n" + self.MakeXyz()
def __len__(self): return len(self.Elements)
def __getitem__(self,key): return FAtom(self.Elements[key], self.Pos[:,key], key)
def __iter__(self):
for (iAt,(Type,Xyz)) in enumerate(zip(self.Elements, self.Pos.T)):
#yield (Type,Xyz)
yield FAtom(Type, Xyz, iAt)
class FBasisShell(object):
"""A generally contracted shell of spherical harmonic basis functions."""
def __init__(self, l, Exp, Co):
self.l = l
assert(isinstance(l,int) and l >= 0 and l <= 8)
self.Exp = np.array(Exp)
assert(self.Exp.ndim == 1)
self.Co = np.array(Co)
assert(self.Co.ndim == 2 and self.Co.shape[0] == len(self.Exp))
self.Element = None # designated element for the basis function
self.Comment = None # comment on the basis function (e.g., literature reference)
@property
def nExp(self):
return len(self.Exp)
@property
def nCo(self):
return self.Co.shape[1]
@property
def nFn(self):
return self.nCo * (2*self.l + 1)
@property
def nFnCa(self):
return self.nCo * nCartY(self.l)
@property
def AngMom(self): return self.l
def __str__(self):
Lines = []
Lines.append("BasisShell [l = %i, nExp = %i, nCo = %i]" % (self.l, self.nExp, self.nCo))
def FmtA(L):
return ", ".join("%12.5f" % o for o in L)
Lines.append(" Exps = [%s]" % FmtA(self.Exp))
for iCo in range(self.nCo):
Lines.append(" Co[%2i] = [%s]" % (iCo, FmtA(self.Co[:,iCo])))
return "\n".join(Lines)
class FBasisShell1(object):
"""A FBasisShell which is placed on a concrete atom."""
def __init__(self, Atom, ShellFn):
self.Atom = Atom
self.Fn = ShellFn
assert(isinstance(self.Fn, FBasisShell))
@property
def Pos(self):
return self.Atom.Pos
@property
def iAtom(self):
return self.Atom.Index
@property
def l(self): return self.Fn.l
@property
def nExp(self): return self.Fn.nExp
@property
def Exp(self): return self.Fn.Exp
@property
def nCo(self): return self.Fn.nCo
@property
def Co(self): return self.Fn.Co
@property
def nFn(self): return self.Fn.nFn
@property
def nFnCa(self): return self.Fn.nFnCa
class FBasisSet(object):
def __init__(self, Shells, Atoms):
# list of FBasisShell1 objects.
self.Shells = Shells
self.Atoms = Atoms
@property
def nFn(self):
n = 0
for Sh in self.Shells:
n += Sh.nFn
return n
@property
def nFnCa(self):
n = 0
for Sh in self.Shells:
n += Sh.nFnCa
return n
def __str__(self):
Lines = []
for o in self.Shells:
Lines.append("Atom %s %s" % (o.Atom, o.Fn))
return "\n".join(Lines)
def FmtCr(self):
#f = 1./ToAng
f = 1.
Lines = []
def Emit(s):
Lines.append(s)
def EmitArray(Name, A):
#Emit(" " + Name + "<" + " ".join("%.16e"%o for o in A) + ">")
Emit(" " + Name + "<" + " ".join("%r"%o for o in A) + ">")
# collect all unique FBasisShell objects.
BasisFns = []
BasisFnIds = {} # map id(BasisFn)->(index)
for Shell in self.Shells:
if id(Shell.Fn) not in BasisFnIds:
BasisFnIds[id(Shell.Fn)] = len(BasisFns)
BasisFns.append(Shell.Fn)
pass
Emit("Basis<Version<0.1> nFns<%i> nShells<%i>" % (len(BasisFns), len(self.Shells)))
# store the function declarations...
def EmitBasisFn(Fn):
Emit(" Fn<Id<%i> l<%i> nExp<%i> nCo<%i>" % (
BasisFnIds[id(Fn)], Fn.l, Fn.nExp, Fn.nCo))
EmitArray("Exp", Fn.Exp)
for Co in Fn.Co.T:
EmitArray("Co", Co)
Emit(" >")
pass
for Fn in BasisFns:
EmitBasisFn(Fn)
# ...and their distribution amongst atoms.
def EmitShell(Sh):
#Emit(" Shell<iAt<%i> x<%.16e> y<%.16e> z<%.16e> FnId<%i>>" % (
Emit(" Shell<iAt<%i> x<%r> y<%r> z<%r> FnId<%i>>" % (
Sh.Atom.Index, f*Sh.Atom.Pos[0], f*Sh.Atom.Pos[1], f*Sh.Atom.Pos[2], BasisFnIds[id(Sh.Fn)]))
pass
for Shell in self.Shells:
EmitShell(Shell)
Emit(">") # end of Basis
return "\n".join(Lines)
def GetAngmomList(self):
# list of all basis function angular momenta in the basis, for converting basis function orders and types.
ls = []
for Shell in self.Shells:
for iCo in range(Shell.nCo):
ls.append(Shell.l)
return ls
class FIntegralContext(object):
"""contains data describing how to evaluate quantum chemistry matrix
elements on electronic system as defined by the given atoms and basis
sets.
Note: Basis sets must either be basis set names (i.e., library names)
or FBasisSet objects.
"""
def __init__(self, Atoms, OrbBasis, FitBasis=None, BasisLibs=None):
self.Atoms = Atoms
self.OrbBasis = OrbBasis
self.FitBasis = FitBasis
self.BasisLibs = BasisLibs
def _InvokeBfint(self, Args, Outputs=None, Inputs=None, MoreBases=None):
Bases = {}
if self.OrbBasis: Bases['--basis-orb'] = self.OrbBasis
if self.FitBasis: Bases['--basis-fit'] = self.FitBasis
if MoreBases:
Bases = dict(list(Bases.items()) + list(MoreBases.items()))
return _InvokeBfint(self.Atoms, Bases, self.BasisLibs, Args, Outputs, Inputs)
def MakeBaseIntegrals(self, Smh=True, MakeS=False):
"""Invoke bfint to calculate CoreEnergy (scalar), CoreH (nOrb x nOrb),
Int2e_Frs (nFit x nOrb x nOrb), and overlap matrix (nOrb x nOrb)"""
# assemble arguments to integral generation program
Args = []
if Smh:
Args.append("--orb-trafo=Smh")
# ^- calculate integrals in symmetrically orthogonalized AO basis
Outputs = []
Outputs.append(("--save-coreh", "INT1E"))
Outputs.append(("--save-fint2e", "INT2E"))
Outputs.append(("--save-overlap", "OVERLAP"))
CoreH, Int2e, Overlap = self._InvokeBfint(Args, Outputs)
nOrb = CoreH.shape[0]
Int2e = Int2e.reshape((Int2e.shape[0], nOrb, nOrb))
CoreEnergy = self.Atoms.fCoreRepulsion()
if MakeS:
return CoreEnergy, CoreH, Int2e, Overlap
else:
return CoreEnergy, CoreH, Int2e
def MakeOverlaps2(self, OrbBasis2):
"""calculate overlap between current basis and a second basis, as
described in OrbBasis2. Returns <1|2> and <2|2> matrices."""
Args = []
MoreBases = {'--basis-orb-2': OrbBasis2}
Outputs = []
Outputs.append(("--save-overlap-2", "OVERLAP_2"))
Outputs.append(("--save-overlap-12", "OVERLAP_12"))
#Outputs.append(("--save-overlap", "OVERLAP"))
Overlap2, Overlap12 = self._InvokeBfint(Args, Outputs, MoreBases=MoreBases)
return Overlap2, Overlap12
def MakeOverlap(self, OrbBasis2=None):
"""calculate overlap within main orbital basis, and, optionally, between main
orbital basis and a second basis, as described in OrbBasis2.
Returns <1|1>, <1|2>, and <2|2> matrices."""
Args = []
Outputs = []
Outputs.append(("--save-overlap", "OVERLAP_1"))
if OrbBasis2 is not None:
MoreBases = {'--basis-orb-2': OrbBasis2}
Outputs.append(("--save-overlap-12", "OVERLAP_12"))
Outputs.append(("--save-overlap-2", "OVERLAP_2"))
return self._InvokeBfint(Args, Outputs, MoreBases=MoreBases)
else:
MoreBases = None
Overlap, = self._InvokeBfint(Args, Outputs, MoreBases=MoreBases)
return Overlap
def MakeNuclearAttractionIntegrals(self, Smh=True):
"""calculate nuclear attraction integrals in main basis, for each individual atomic core.
Returns nAo x nAo x nAtoms array."""
Args = []
if Smh:
Args.append("--orb-trafo=Smh")
Outputs = []
Outputs.append(("--save-vnucN", "VNUC_N"))
VNucN = self._InvokeBfint(Args, Outputs)[0]
nOrb = int(VNucN.shape[0]**.5 + .5)
assert(nOrb**2 == VNucN.shape[0])
assert(VNucN.shape[1] == len(self.Atoms))
return VNucN.reshape(nOrb, nOrb, VNucN.shape[1])
def MakeNuclearSqDistanceIntegrals(self, Smh=True):
"""calculate <mu|(r-rA)^2|nu> integrals in main basis, for each individual atomic core.
Returns nAo x nAo x nAtoms array."""
Args = []
if Smh:
Args.append("--orb-trafo=Smh")
Outputs = []
Outputs.append(("--save-rsqN", "RSQ_N"))
RsqN = self._InvokeBfint(Args, Outputs)[0]
nOrb = int(RsqN.shape[0]**.5 + .5)
assert(nOrb**2 == RsqN.shape[0])
assert(RsqN.shape[1] == len(self.Atoms))
return RsqN.reshape(nOrb, nOrb, RsqN.shape[1])
def MakeKineticIntegrals(self, Smh=True):
"""calculate <mu|-1/2 Laplace|nu> integrals in main basis, for each individual atomic core.
Returns nAo x nAo x nAtoms array."""
Args = []
if Smh:
Args.append("--orb-trafo=Smh")
Outputs = []
Outputs.append(("--save-kinetic", "EKIN"))
Op = self._InvokeBfint(Args, Outputs)[0]
return Op
def MakeDipoleIntegrals(self, Smh=True):
r"""calculate dipole operator matrices <\mu|w|\nu> (w=x,y,z) in
main basis, for each direction. Returns nAo x nAo x 3 array."""
Args = []
if Smh:
Args.append("--orb-trafo=Smh")
Outputs = []
Outputs.append(("--save-dipole", "DIPN"))
DipN = self._InvokeBfint(Args, Outputs)[0]
nOrb = int(DipN.shape[0]**.5 + .5)
assert(nOrb**2 == DipN.shape[0])
assert(DipN.shape[1] == 3)
return DipN.reshape(nOrb, nOrb, 3)
def MakeOrbitalsOnGrid(self, Orbitals, Grid, DerivativeOrder=0):
"""calculate values of molecular orbitals on a grid of 3d points in space.
Input:
- Orbitals: nAo x nOrb matrix, where nAo must be compatible with
self.OrbBasis. The AO dimension must be contravariant AO (i.e., not SMH).
- Grid: 3 x nGrid array giving the coordinates of the grid points.
- DerivativeOrder: 0: only orbital values,
1: orbital values and 1st derivatives,
2: orbital values and up to 2nd derivatives.
Returns:
- nGrid x nDerivComp x nOrb array. If DerivativeOrder is 0, the
DerivComp dimension is omitted.
"""
Args = [("--eval-orbitals-dx=%s" % DerivativeOrder)]
Inputs = [("--eval-orbitals", "ORBITALS.npy", Orbitals)]\
+ [("--grid-coords", "GRID.npy", Grid)]
Outputs = [("--save-grid-values", "ORBS_ON_GRID")]
(ValuesOnGrid,) = self._InvokeBfint(Args, Outputs, Inputs)
nComp = [1,4,10][DerivativeOrder]
if nComp != 1:
ValuesOnGrid = ValuesOnGrid.reshape((Grid.shape[1], nComp, Orbitals.shape[1]))
return ValuesOnGrid
def MakeRaw2eIntegrals(self, Smh=True, Kernel2e="coulomb"):
"""compute Int2e_Frs (nFit x nOrb x nOrb) and fitting metric Int2e_FG (nFit x nFit),
where the fitting metric is *not* absorbed into the 2e integrals."""
# assemble arguments to integral generation program
Args = []
if Smh:
Args.append("--orb-trafo=Smh")
# ^- calculate integrals in symmetrically orthogonalized AO basis
Args.append("--kernel2e='%s'" % Kernel2e)
Args.append("--solve-fitting-eq=false")
Outputs = []
Outputs.append(("--save-fint2e", "INT2E_3IX"))
Outputs.append(("--save-fitting-metric", "INT2E_METRIC"))
Int2e_Frs, Int2e_FG = self._InvokeBfint(Args, Outputs)
nOrb = int(Int2e_Frs.shape[1]**.5 + .5)
assert(nOrb**2 == Int2e_Frs.shape[1])
Int2e_Frs = Int2e_Frs.reshape((Int2e_Frs.shape[0], nOrb, nOrb))
assert(Int2e_Frs.shape[0] == Int2e_FG.shape[0])
assert(Int2e_FG.shape[0] == Int2e_FG.shape[1])
return Int2e_FG, Int2e_Frs
def _InvokeBfint(Atoms, Bases, BasisLibs, BaseArgs, Outputs, Inputs=None):
"""Outputs: an array of tuples (cmdline-arguments,filename-base).
We will generate arguments for each of them and try to read the
corresponding files as numpy arrays and return them in order."""
from tempfile import mkdtemp
from shutil import rmtree
#from commands import getstatusoutput
from subprocess import check_output, CalledProcessError
# make a directory to store our input/output in.
BasePath = mkdtemp(prefix="wmme.", dir=_TmpDir)
def Cleanup():
rmtree(BasePath)
pass
BfIntDir = _WmmeDir
if BfIntDir is None: BfIntDir = GetModulePath()
BasisLibDir = _BasisLibDir
if BasisLibDir is None:
BasisLibDir = path.join(BfIntDir,"bases")
MakeIntegralsExecutable = path.join(BfIntDir,"wmme")
# assemble arguments to integral generation program
FileNameXyz = path.join(BasePath, "ATOMS")
Args = [o for o in BaseArgs]
Args.append("--matrix-format=npy")
for BasisLib in BasisLibs:
Args.append("--basis-lib=%s" % path.join(BasisLibDir, BasisLib))
Args.append("--atoms-au=%s" % FileNameXyz)
iWrittenBasis = 0
for (ParamName, BasisObj) in Bases.items():
if BasisObj is None:
continue
if isinstance(BasisObj, FBasisSet):
# basis is given as an explicit FBasisSet object.
# Write the basis set to disk and supply the file name as argument
BasisFile = path.join(BasePath, "BASIS%i" % iWrittenBasis)
iWrittenBasis += 1
with open(BasisFile, "w") as File:
File.write(BasisObj.FmtCr())
Args.append("%s='!%s'" % (ParamName, BasisFile))
else:
assert(isinstance(BasisObj, str))
# it's just a basis set name: append the name to the arguments.
# (set will be read from library by wmme itself)
Args.append("%s=%s" % (ParamName, BasisObj))
pass
# make file names and arguments for output arrays
FileNameOutputs = []
for (ArgName,FileNameBase) in Outputs:
FileName = path.join(BasePath, FileNameBase)
FileNameOutputs.append(FileName)
Args.append("%s='%s'" % (ArgName, FileName))
XyzLines = "%i\n\n%s\n" % (len(Atoms), Atoms.MakeXyz("%24.16f"))
# ^- note on the .16f: it actually does make a difference. I had .8f
# there before, and it lead to energy changes on the order of 1e-8
# when treating only non-redundant subsystem out of a symmetric
# arrangement.
try:
with open(FileNameXyz, "w") as File:
File.write(XyzLines)
# save input arrays if provided.
if Inputs:
for (ArgName,FileNameBase,Array) in Inputs:
FileName = path.join(BasePath, FileNameBase)
np.save(FileName,Array)
Args.append("%s='%s'" % (ArgName, FileName))
Cmd = "%s %s" % (MakeIntegralsExecutable, " ".join(Args))
#print("!Invoking %s\n" % Cmd)
#iErr, Output = getstatusoutput(Cmd)
#if ( iErr != 0 ):
try:
Output = check_output(Cmd, shell=True)
if (version_info) >= (3,0):
# it returns a byte string in Python 3... which wouldn't be a problem
# if not all OTHER literals were converted to unicode implicitly.
Output = Output.decode("utf-8")
except CalledProcessError as e:
raise Exception("Integral calculation failed. Output was:\n%s\nException was: %s" % (e.output, str(e)))
OutputArrays = []
for FileName in FileNameOutputs:
OutputArrays.append(np.load(FileName))
except:
Cleanup()
raise
# got everything we need. Delete the temporary directory.
Cleanup()
return tuple(OutputArrays)
def ReadXyzFile(FileName,Scale=1./ToAng):
Text = open(FileName,"r").read()
Lines = Text.splitlines()
# allowed formats: <nAtoms> \n Desc \n <atom-list>
# or: <atom-list> (without any headers)
# in the first case, only the first nAtoms+2 lines are read, in the
# second case everything which does not look like a xyz line is
# ignored.
nAtoms = None
r = 0,-1
if ( len(Lines[0].split()) == 1 ):
nAtoms = int(Lines[0].split()[0])
r = 2,nAtoms+2
Atoms = []
Xyz = []
for Line in Lines:
ls = Line.split()
try:
Atom = ls[0]
x,y,z = float(ls[1]), float(ls[2]), float(ls[3])
except:
continue
Atom = Atom[0].upper() + Atom[1:].lower()
# maybe we should allow for (and ignore) group numbers after the
# elements?
if Atom not in ElementNames:
raise Exception("while reading '%s': unrecognized element '%s'." % (FileName,Atom))
Atoms.append(Atom)
Xyz.append((x,y,z))
Xyz = Scale*array(Xyz).T
if 0:
print("*read '%s':\n%s" % (FileName, str(FAtomSet(Xyz, Atoms))))
return Xyz, Atoms
def ReadAtomsFromXyzFile(FileName, Scale=1./ToAng):
Xyz,Elements = ReadXyzFile(FileName, Scale)
return FAtomSet(Xyz, Elements)
| 37.158516
| 274
| 0.621103
|
from __future__ import print_function
import numpy as np
from numpy import dot, array
from os import path
from sys import version_info
def GetModulePath():
import inspect
return path.dirname(path.abspath(inspect.getfile(inspect.currentframe())))
if 0:
_WmmeDir = "/home/cgk/dev/wmme"
else:
_WmmeDir = None
_TmpDir = None
_BasisLibDir = None
ToAng = 0.529177209
def ElementNameDummy():
ElementNames = "X H He Li Be B C N O F Ne Na Mg Al Si P S Cl Ar K Ca Sc Ti V Cr Mn Fe Co Ni Cu Zn Ga Ge As Se Br Kr Rb Sr Y Zr Nb Mo Tc Ru Rh Pd Ag Cd In Sn Sb Te I Xe Cs Ba La Ce Pr Nd Pm Sm Eu Gd Tb Dy Ho Er Tm Yb Lu Hf Ta W Re Os Ir Pt Au Hg Tl Pb Bi Po At Rn".split()
ElementNumbers = dict([(o,i) for (i,o) in enumerate(ElementNames)])
return ElementNames, ElementNumbers
ElementNames, ElementNumbers = ElementNameDummy()
def mdot(*args):
r = args[0]
for a in args[1:]:
r = dot(r,a)
return r
def dot2(A,B): return dot(A.flatten(),B.flatten())
def nCartY(l):
return ((l+1)*(l+2))/2
class FAtom(object):
def __init__(self, Element, Position, Index):
self.Element = Element
self.Pos = Position
self.Index = Index
@property
def Label(self):
return "%2s%3s"%(self.Element,1 + self.Index)
@property
def iElement(self):
return ElementNumbers[self.Element]
def __str__(self):
return "%s (%6.3f,%6.3f,%6.3f)"%(self.Label, self.Pos[0], self.Pos[1], self.Pos[2])
class FAtomSet(object):
def __init__(self, Positions, Elements, Orientations=None, Name=None):
self.Pos = Positions
assert(self.Pos.shape[0] == 3 and self.Pos.shape[1] == len(Elements))
self.Elements = Elements
self.Orientations = Orientations
self.Name = Name
def MakeXyz(self,NumFmt = "%15.8f",Scale=1.):
Lines = []
for i in range(len(self.Elements)):
Lines.append(" %5s {0} {0} {0}".format(NumFmt) % (\
self.Elements[i], Scale*self.Pos[0,i], Scale*self.Pos[1,i], Scale*self.Pos[2,i]))
return "\n".join(Lines)
def nElecNeutral(self):
return sum([ElementNumbers[o] for o in self.Elements])
def fCoreRepulsion1(self, iAt, jAt):
if iAt == jAt: return 0.
ChA, ChB = [ElementNumbers[self.Elements[o]] for o in [iAt, jAt]]
return ChA * ChB / np.sum((self.Pos[:,iAt] - self.Pos[:,jAt])**2)**.5
def fCoreRepulsion(self):
N = len(self.Elements)
Charges = array([ElementNumbers[o] for o in self.Elements])
fCoreEnergy = 0
for i in range(N):
for j in range(i):
fCoreEnergy += self.fCoreRepulsion1(i,j)
#fCoreEnergy += Charges[i] * Charges[j] / np.sum((self.Pos[:,i] - self.Pos[:,j])**2)**.5
return fCoreEnergy
def __str__(self):
Caption = " %5s%15s %15s %15s" % ("ATOM", "POS/X", "POS/Y", "POS/Z")
return Caption + "\n" + self.MakeXyz()
def __len__(self): return len(self.Elements)
def __getitem__(self,key): return FAtom(self.Elements[key], self.Pos[:,key], key)
def __iter__(self):
for (iAt,(Type,Xyz)) in enumerate(zip(self.Elements, self.Pos.T)):
#yield (Type,Xyz)
yield FAtom(Type, Xyz, iAt)
class FBasisShell(object):
def __init__(self, l, Exp, Co):
self.l = l
assert(isinstance(l,int) and l >= 0 and l <= 8)
self.Exp = np.array(Exp)
assert(self.Exp.ndim == 1)
self.Co = np.array(Co)
assert(self.Co.ndim == 2 and self.Co.shape[0] == len(self.Exp))
self.Element = None # designated element for the basis function
self.Comment = None # comment on the basis function (e.g., literature reference)
@property
def nExp(self):
return len(self.Exp)
@property
def nCo(self):
return self.Co.shape[1]
@property
def nFn(self):
return self.nCo * (2*self.l + 1)
@property
def nFnCa(self):
return self.nCo * nCartY(self.l)
@property
def AngMom(self): return self.l
def __str__(self):
Lines = []
Lines.append("BasisShell [l = %i, nExp = %i, nCo = %i]" % (self.l, self.nExp, self.nCo))
def FmtA(L):
return ", ".join("%12.5f" % o for o in L)
Lines.append(" Exps = [%s]" % FmtA(self.Exp))
for iCo in range(self.nCo):
Lines.append(" Co[%2i] = [%s]" % (iCo, FmtA(self.Co[:,iCo])))
return "\n".join(Lines)
class FBasisShell1(object):
def __init__(self, Atom, ShellFn):
self.Atom = Atom
self.Fn = ShellFn
assert(isinstance(self.Fn, FBasisShell))
@property
def Pos(self):
return self.Atom.Pos
@property
def iAtom(self):
return self.Atom.Index
@property
def l(self): return self.Fn.l
@property
def nExp(self): return self.Fn.nExp
@property
def Exp(self): return self.Fn.Exp
@property
def nCo(self): return self.Fn.nCo
@property
def Co(self): return self.Fn.Co
@property
def nFn(self): return self.Fn.nFn
@property
def nFnCa(self): return self.Fn.nFnCa
class FBasisSet(object):
def __init__(self, Shells, Atoms):
# list of FBasisShell1 objects.
self.Shells = Shells
self.Atoms = Atoms
@property
def nFn(self):
n = 0
for Sh in self.Shells:
n += Sh.nFn
return n
@property
def nFnCa(self):
n = 0
for Sh in self.Shells:
n += Sh.nFnCa
return n
def __str__(self):
Lines = []
for o in self.Shells:
Lines.append("Atom %s %s" % (o.Atom, o.Fn))
return "\n".join(Lines)
def FmtCr(self):
#f = 1./ToAng
f = 1.
Lines = []
def Emit(s):
Lines.append(s)
def EmitArray(Name, A):
#Emit(" " + Name + "<" + " ".join("%.16e"%o for o in A) + ">")
Emit(" " + Name + "<" + " ".join("%r"%o for o in A) + ">")
# collect all unique FBasisShell objects.
BasisFns = []
BasisFnIds = {} # map id(BasisFn)->(index)
for Shell in self.Shells:
if id(Shell.Fn) not in BasisFnIds:
BasisFnIds[id(Shell.Fn)] = len(BasisFns)
BasisFns.append(Shell.Fn)
pass
Emit("Basis<Version<0.1> nFns<%i> nShells<%i>" % (len(BasisFns), len(self.Shells)))
# store the function declarations...
def EmitBasisFn(Fn):
Emit(" Fn<Id<%i> l<%i> nExp<%i> nCo<%i>" % (
BasisFnIds[id(Fn)], Fn.l, Fn.nExp, Fn.nCo))
EmitArray("Exp", Fn.Exp)
for Co in Fn.Co.T:
EmitArray("Co", Co)
Emit(" >")
pass
for Fn in BasisFns:
EmitBasisFn(Fn)
# ...and their distribution amongst atoms.
def EmitShell(Sh):
#Emit(" Shell<iAt<%i> x<%.16e> y<%.16e> z<%.16e> FnId<%i>>" % (
Emit(" Shell<iAt<%i> x<%r> y<%r> z<%r> FnId<%i>>" % (
Sh.Atom.Index, f*Sh.Atom.Pos[0], f*Sh.Atom.Pos[1], f*Sh.Atom.Pos[2], BasisFnIds[id(Sh.Fn)]))
pass
for Shell in self.Shells:
EmitShell(Shell)
Emit(">") # end of Basis
return "\n".join(Lines)
def GetAngmomList(self):
# list of all basis function angular momenta in the basis, for converting basis function orders and types.
ls = []
for Shell in self.Shells:
for iCo in range(Shell.nCo):
ls.append(Shell.l)
return ls
class FIntegralContext(object):
def __init__(self, Atoms, OrbBasis, FitBasis=None, BasisLibs=None):
self.Atoms = Atoms
self.OrbBasis = OrbBasis
self.FitBasis = FitBasis
self.BasisLibs = BasisLibs
def _InvokeBfint(self, Args, Outputs=None, Inputs=None, MoreBases=None):
Bases = {}
if self.OrbBasis: Bases['--basis-orb'] = self.OrbBasis
if self.FitBasis: Bases['--basis-fit'] = self.FitBasis
if MoreBases:
Bases = dict(list(Bases.items()) + list(MoreBases.items()))
return _InvokeBfint(self.Atoms, Bases, self.BasisLibs, Args, Outputs, Inputs)
def MakeBaseIntegrals(self, Smh=True, MakeS=False):
# assemble arguments to integral generation program
Args = []
if Smh:
Args.append("--orb-trafo=Smh")
# ^- calculate integrals in symmetrically orthogonalized AO basis
Outputs = []
Outputs.append(("--save-coreh", "INT1E"))
Outputs.append(("--save-fint2e", "INT2E"))
Outputs.append(("--save-overlap", "OVERLAP"))
CoreH, Int2e, Overlap = self._InvokeBfint(Args, Outputs)
nOrb = CoreH.shape[0]
Int2e = Int2e.reshape((Int2e.shape[0], nOrb, nOrb))
CoreEnergy = self.Atoms.fCoreRepulsion()
if MakeS:
return CoreEnergy, CoreH, Int2e, Overlap
else:
return CoreEnergy, CoreH, Int2e
def MakeOverlaps2(self, OrbBasis2):
Args = []
MoreBases = {'--basis-orb-2': OrbBasis2}
Outputs = []
Outputs.append(("--save-overlap-2", "OVERLAP_2"))
Outputs.append(("--save-overlap-12", "OVERLAP_12"))
#Outputs.append(("--save-overlap", "OVERLAP"))
Overlap2, Overlap12 = self._InvokeBfint(Args, Outputs, MoreBases=MoreBases)
return Overlap2, Overlap12
def MakeOverlap(self, OrbBasis2=None):
Args = []
Outputs = []
Outputs.append(("--save-overlap", "OVERLAP_1"))
if OrbBasis2 is not None:
MoreBases = {'--basis-orb-2': OrbBasis2}
Outputs.append(("--save-overlap-12", "OVERLAP_12"))
Outputs.append(("--save-overlap-2", "OVERLAP_2"))
return self._InvokeBfint(Args, Outputs, MoreBases=MoreBases)
else:
MoreBases = None
Overlap, = self._InvokeBfint(Args, Outputs, MoreBases=MoreBases)
return Overlap
def MakeNuclearAttractionIntegrals(self, Smh=True):
Args = []
if Smh:
Args.append("--orb-trafo=Smh")
Outputs = []
Outputs.append(("--save-vnucN", "VNUC_N"))
VNucN = self._InvokeBfint(Args, Outputs)[0]
nOrb = int(VNucN.shape[0]**.5 + .5)
assert(nOrb**2 == VNucN.shape[0])
assert(VNucN.shape[1] == len(self.Atoms))
return VNucN.reshape(nOrb, nOrb, VNucN.shape[1])
def MakeNuclearSqDistanceIntegrals(self, Smh=True):
Args = []
if Smh:
Args.append("--orb-trafo=Smh")
Outputs = []
Outputs.append(("--save-rsqN", "RSQ_N"))
RsqN = self._InvokeBfint(Args, Outputs)[0]
nOrb = int(RsqN.shape[0]**.5 + .5)
assert(nOrb**2 == RsqN.shape[0])
assert(RsqN.shape[1] == len(self.Atoms))
return RsqN.reshape(nOrb, nOrb, RsqN.shape[1])
def MakeKineticIntegrals(self, Smh=True):
Args = []
if Smh:
Args.append("--orb-trafo=Smh")
Outputs = []
Outputs.append(("--save-kinetic", "EKIN"))
Op = self._InvokeBfint(Args, Outputs)[0]
return Op
def MakeDipoleIntegrals(self, Smh=True):
Args = []
if Smh:
Args.append("--orb-trafo=Smh")
Outputs = []
Outputs.append(("--save-dipole", "DIPN"))
DipN = self._InvokeBfint(Args, Outputs)[0]
nOrb = int(DipN.shape[0]**.5 + .5)
assert(nOrb**2 == DipN.shape[0])
assert(DipN.shape[1] == 3)
return DipN.reshape(nOrb, nOrb, 3)
def MakeOrbitalsOnGrid(self, Orbitals, Grid, DerivativeOrder=0):
Args = [("--eval-orbitals-dx=%s" % DerivativeOrder)]
Inputs = [("--eval-orbitals", "ORBITALS.npy", Orbitals)]\
+ [("--grid-coords", "GRID.npy", Grid)]
Outputs = [("--save-grid-values", "ORBS_ON_GRID")]
(ValuesOnGrid,) = self._InvokeBfint(Args, Outputs, Inputs)
nComp = [1,4,10][DerivativeOrder]
if nComp != 1:
ValuesOnGrid = ValuesOnGrid.reshape((Grid.shape[1], nComp, Orbitals.shape[1]))
return ValuesOnGrid
def MakeRaw2eIntegrals(self, Smh=True, Kernel2e="coulomb"):
# assemble arguments to integral generation program
Args = []
if Smh:
Args.append("--orb-trafo=Smh")
# ^- calculate integrals in symmetrically orthogonalized AO basis
Args.append("--kernel2e='%s'" % Kernel2e)
Args.append("--solve-fitting-eq=false")
Outputs = []
Outputs.append(("--save-fint2e", "INT2E_3IX"))
Outputs.append(("--save-fitting-metric", "INT2E_METRIC"))
Int2e_Frs, Int2e_FG = self._InvokeBfint(Args, Outputs)
nOrb = int(Int2e_Frs.shape[1]**.5 + .5)
assert(nOrb**2 == Int2e_Frs.shape[1])
Int2e_Frs = Int2e_Frs.reshape((Int2e_Frs.shape[0], nOrb, nOrb))
assert(Int2e_Frs.shape[0] == Int2e_FG.shape[0])
assert(Int2e_FG.shape[0] == Int2e_FG.shape[1])
return Int2e_FG, Int2e_Frs
def _InvokeBfint(Atoms, Bases, BasisLibs, BaseArgs, Outputs, Inputs=None):
from tempfile import mkdtemp
from shutil import rmtree
#from commands import getstatusoutput
from subprocess import check_output, CalledProcessError
# make a directory to store our input/output in.
BasePath = mkdtemp(prefix="wmme.", dir=_TmpDir)
def Cleanup():
rmtree(BasePath)
pass
BfIntDir = _WmmeDir
if BfIntDir is None: BfIntDir = GetModulePath()
BasisLibDir = _BasisLibDir
if BasisLibDir is None:
BasisLibDir = path.join(BfIntDir,"bases")
MakeIntegralsExecutable = path.join(BfIntDir,"wmme")
# assemble arguments to integral generation program
FileNameXyz = path.join(BasePath, "ATOMS")
Args = [o for o in BaseArgs]
Args.append("--matrix-format=npy")
for BasisLib in BasisLibs:
Args.append("--basis-lib=%s" % path.join(BasisLibDir, BasisLib))
Args.append("--atoms-au=%s" % FileNameXyz)
iWrittenBasis = 0
for (ParamName, BasisObj) in Bases.items():
if BasisObj is None:
continue
if isinstance(BasisObj, FBasisSet):
# basis is given as an explicit FBasisSet object.
# Write the basis set to disk and supply the file name as argument
BasisFile = path.join(BasePath, "BASIS%i" % iWrittenBasis)
iWrittenBasis += 1
with open(BasisFile, "w") as File:
File.write(BasisObj.FmtCr())
Args.append("%s='!%s'" % (ParamName, BasisFile))
else:
assert(isinstance(BasisObj, str))
# it's just a basis set name: append the name to the arguments.
Args.append("%s=%s" % (ParamName, BasisObj))
pass
FileNameOutputs = []
for (ArgName,FileNameBase) in Outputs:
FileName = path.join(BasePath, FileNameBase)
FileNameOutputs.append(FileName)
Args.append("%s='%s'" % (ArgName, FileName))
XyzLines = "%i\n\n%s\n" % (len(Atoms), Atoms.MakeXyz("%24.16f"))
try:
with open(FileNameXyz, "w") as File:
File.write(XyzLines)
if Inputs:
for (ArgName,FileNameBase,Array) in Inputs:
FileName = path.join(BasePath, FileNameBase)
np.save(FileName,Array)
Args.append("%s='%s'" % (ArgName, FileName))
Cmd = "%s %s" % (MakeIntegralsExecutable, " ".join(Args))
try:
Output = check_output(Cmd, shell=True)
if (version_info) >= (3,0):
# if not all OTHER literals were converted to unicode implicitly.
Output = Output.decode("utf-8")
except CalledProcessError as e:
raise Exception("Integral calculation failed. Output was:\n%s\nException was: %s" % (e.output, str(e)))
OutputArrays = []
for FileName in FileNameOutputs:
OutputArrays.append(np.load(FileName))
except:
Cleanup()
raise
# got everything we need. Delete the temporary directory.
Cleanup()
return tuple(OutputArrays)
def ReadXyzFile(FileName,Scale=1./ToAng):
Text = open(FileName,"r").read()
Lines = Text.splitlines()
# allowed formats: <nAtoms> \n Desc \n <atom-list>
# or: <atom-list> (without any headers)
# in the first case, only the first nAtoms+2 lines are read, in the
# second case everything which does not look like a xyz line is
# ignored.
nAtoms = None
r = 0,-1
if ( len(Lines[0].split()) == 1 ):
nAtoms = int(Lines[0].split()[0])
r = 2,nAtoms+2
Atoms = []
Xyz = []
for Line in Lines:
ls = Line.split()
try:
Atom = ls[0]
x,y,z = float(ls[1]), float(ls[2]), float(ls[3])
except:
continue
Atom = Atom[0].upper() + Atom[1:].lower()
# maybe we should allow for (and ignore) group numbers after the
# elements?
if Atom not in ElementNames:
raise Exception("while reading '%s': unrecognized element '%s'." % (FileName,Atom))
Atoms.append(Atom)
Xyz.append((x,y,z))
Xyz = Scale*array(Xyz).T
if 0:
print("*read '%s':\n%s" % (FileName, str(FAtomSet(Xyz, Atoms))))
return Xyz, Atoms
def ReadAtomsFromXyzFile(FileName, Scale=1./ToAng):
Xyz,Elements = ReadXyzFile(FileName, Scale)
return FAtomSet(Xyz, Elements)
| true
| true
|
79066490d56c44bc5d1a863dc599a0ba58b3e34a
| 11,357
|
py
|
Python
|
main.py
|
kgantsov/instafollower
|
4720fd074907761044b3a8aa060a146eab605b3d
|
[
"MIT"
] | null | null | null |
main.py
|
kgantsov/instafollower
|
4720fd074907761044b3a8aa060a146eab605b3d
|
[
"MIT"
] | null | null | null |
main.py
|
kgantsov/instafollower
|
4720fd074907761044b3a8aa060a146eab605b3d
|
[
"MIT"
] | null | null | null |
import time
import random
import os
import logging
import sys
from datetime import datetime
from datetime import timedelta
from urllib.parse import urlparse
from urllib.parse import urljoin
import click
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import NoSuchElementException
from pyvirtualdisplay import Display
from models import db
from models import Following
from models import Comment
from models import Like
username = os.environ['instagram_username']
password = os.environ['instagram_password']
dir_path = os.path.dirname(os.path.realpath(__file__))
logging.basicConfig(
stream=sys.stdout,
level='INFO',
format='%(asctime)s %(levelname)s:%(name)s:%(message)s'
)
log = logging.getLogger('app')
def sleep(duration):
log.info('Sleeping for {} seconds'.format(duration))
time.sleep(duration)
def have_like(p):
return random.randint(1, 100) < p
def get_url(driver):
url = urlparse(driver.current_url)
return urljoin('{}://{}'.format(url.scheme, url.netloc), url.path)
def get_driver(gui=True):
options = webdriver.ChromeOptions()
if not gui:
options.add_argument('headless')
options.add_argument('--no-sandbox')
options.add_argument('window-size=1200x600')
driver = webdriver.Chrome(
executable_path='/usr/local/bin/chromedriver',
chrome_options=options
)
driver.implicitly_wait(15)
return driver
def login(driver, username, password):
login_btn = driver.find_element_by_xpath("//p[@class='izU2O']/a[text()='Log in']")
login_btn.click()
sleep(5)
login_input = driver.find_element_by_xpath("//INPUT[@name='username']")
login_input.send_keys(username)
password_input = driver.find_element_by_xpath("//INPUT[@type='password']")
password_input.send_keys(password)
password_input.send_keys(Keys.RETURN)
sleep(10)
def search(driver, tag):
driver.get('https://www.instagram.com/explore/tags/{tag}/'.format(tag=tag))
sleep(4)
first_image = driver.find_element_by_xpath(
"//article/div[2]/div[1]/div[1]/div[1]"
)
first_image.click()
sleep(2)
def go_to_next_photo(driver):
try:
nex_btn = driver.find_element_by_xpath(
"//a[contains(@class, coreSpriteRightPaginationArrow)][text()='Next']"
)
except Exception:
driver.save_screenshot('screenshot.png')
else:
nex_btn.click()
time.sleep(1)
def is_already_liked(driver):
try:
driver.find_element_by_xpath("//span[@aria-label='Like']")
except NoSuchElementException:
log.info('Picture has already been liked {}'.format(driver.current_url))
return True
else:
log.info('Picture has NOT been liked yet {}'.format(driver.current_url))
return False
def like_post(driver):
url = get_url(driver)
try:
Like.select().where(Like.url == url).get()
except Like.DoesNotExist:
pass
else:
log.info('Post has already been liked {url}'.format(url=url))
return False
try:
like_btn = driver.find_element_by_xpath("//span[@aria-label='Like']")
except NoSuchElementException:
log.info('Could not find like button {}'.format(driver.current_url))
time.sleep(1)
return False
else:
log.info('Found like button. Trying to like {}'.format(driver.current_url))
like_btn.click()
Like.create(url=url)
log.info('Liked picture {url}'.format(url=url))
return True
def comment_post(driver, text):
url = get_url(driver)
try:
Comment.select().where(Comment.url == url).get()
except Comment.DoesNotExist:
pass
else:
log.info('Post has already been commented {url}'.format(url=url))
return False
try:
comment_input = driver.find_element_by_xpath('//TEXTAREA[@placeholder="Add a comment…"]')
except NoSuchElementException as e:
log.info(e)
return False
else:
# comment_input.click()
# comment_input.clear()
# time.sleep(1)
# comment_input = driver.find_element_by_xpath('//TEXTAREA[@placeholder="Add a comment…"]')
# --------------------
driver.execute_script(
"arguments[0].value = '{} ';".format(text), comment_input
)
# An extra space is added here and then deleted.
# This forces the input box to update the reactJS core
comment_input.send_keys("\b")
comment_input = driver.find_element_by_xpath('//TEXTAREA[@placeholder="Add a comment…"]')
comment_input.submit()
# --------------------
# comment_input.send_keys(text)
# comment_input.send_keys(Keys.RETURN)
# comment_input.clear()
Comment.create(url=url, comment=text)
log.info('Commented picture {url} with "{text}"'.format(url=url, text=text))
time.sleep(1)
return True
def subscribe(driver):
name_label = driver.find_element_by_xpath("//article/header//div[@class='e1e1d']/a[text()]")
name = name_label.text
follow_btn = driver.find_element_by_xpath("//article/header/div//button[text()]")
try:
following = Following.select().where(Following.name == name).get()
except Following.DoesNotExist:
pass
else:
log.info(
'Already subscribed on user: @{user} ({following})'.format(
user=name,
following=following
)
)
return False
btn_text = follow_btn.text
if btn_text == 'Follow':
log.info('Going to subscribe on user: @{user}'.format(user=name))
try:
follow_btn.click()
time.sleep(1)
except Exception as e:
log.info(e)
else:
Following.create(name=name)
return True
else:
log.info('Already subscribed on user: @{user}'.format(user=name))
return False
def get_random_comment():
comments = [
'Nice',
'Nice photo',
'Nice picture',
'Nice capture',
'Nice image',
'Nice shot',
'Great photo',
'Great job',
'Awesome picture',
'awesome shot',
'Like it',
'Like this picture',
'Like this photo',
'Like this image',
'Beautiful',
'Beautiful photo',
'Beautiful picture',
'Lovely picture',
'Lovely photo',
'Amazing',
'Amazing shot',
'Amazing capture',
'Amazing photo',
'Wonderful shot',
'Wonderful picture',
'Wonderful photo',
]
return random.choice(comments)
@click.group()
def cli():
pass
@cli.command()
@click.option('--tag', default='landscape', help='Instagram tag')
@click.option('--count', default=100, help='Number of user to follow')
@click.option('--gui/--no-gui', default=True, help='GUI')
def run_follower(tag, count, gui):
driver = get_driver(gui)
driver.get("https://www.instagram.com/")
login(driver, username=username, password=password)
search(driver, tag=tag)
liked = 0
commented = 0
subscribed = 0
while liked < count:
go_to_next_photo(driver)
was_liked = like_post(driver)
if was_liked:
liked += 1
# if have_like(15) and comment_post(driver, text=get_random_comment()):
# if comment_post(driver, text=get_random_comment()):
# commented += 1
if have_like(33) and subscribe(driver):
subscribed += 1
log.info('Liked: {}, Commented: {} Subscribed {}'.format(liked, commented, subscribed))
if was_liked:
duration = random.randint(20, 60)
sleep(duration)
else:
duration = random.randint(1, 8)
sleep(duration)
driver.close()
@cli.command()
@click.option('--count', default=100, help='Number of user to follow')
@click.option('--gui/--no-gui', default=True, help='GUI')
def run_unfollower(count, gui):
initial_count = count
driver = get_driver(gui)
driver.implicitly_wait(3)
driver.get("https://www.instagram.com/")
login(driver, username=username, password=password)
following_users = (
Following.select()
.where(
Following.is_following == True,
Following.date_created < datetime.now() - timedelta(days=14)
)
.order_by(Following.date_created)
)
for following in following_users:
if count <= 0:
return
log.info(
'Going to unfollow `@{user}` ({date})'.format(
user=following.name, date=following.date_created
)
)
driver.get("https://www.instagram.com/{name}".format(name=following.name))
time.sleep(1)
try:
unfollow_btn = driver.find_element_by_xpath("//button[text()='Following']")
except NoSuchElementException:
still_following = False
log.info('Already not following user `@{user}`'.format(user=following.name))
following.is_following = False
following.save()
else:
log.info('Still following user `@{user}`'.format(user=following.name))
still_following = True
unfollow_btn.click()
duration = random.randint(5, 10)
sleep(duration)
try:
unfollow_btn = driver.find_element_by_xpath(
"//div[@class='piCib']//button[text()='Unfollow']"
)
except NoSuchElementException:
pass
else:
still_following = True
unfollow_btn.click()
sleep(2)
tries = 0
while still_following:
driver.refresh()
try:
driver.find_element_by_xpath("//button[text()='Follow']")
except NoSuchElementException:
pass
else:
still_following = False
count -= 1
try:
driver.find_element_by_xpath("//button[text()='Follow Back']")
except NoSuchElementException:
pass
else:
still_following = False
count -= 1
if still_following:
try:
unfollow_btn = driver.find_element_by_xpath("//button[text()='Following']")
except NoSuchElementException:
pass
else:
log.info(
'Still following user `@{user}` (tries {tries})'.format(
user=following.name,
tries=tries
)
)
still_following = True
unfollow_btn.click()
if tries == 0:
break
tries += 1
log.info('-- {count} of {initial_count} users are unfollowed --'.format(
count=initial_count - count, initial_count=initial_count
))
driver.close()
@cli.command()
def init_db():
db.connect()
db.create_tables([Following, Comment, Like])
if __name__ == "__main__":
cli()
| 26.59719
| 99
| 0.588095
|
import time
import random
import os
import logging
import sys
from datetime import datetime
from datetime import timedelta
from urllib.parse import urlparse
from urllib.parse import urljoin
import click
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import NoSuchElementException
from pyvirtualdisplay import Display
from models import db
from models import Following
from models import Comment
from models import Like
username = os.environ['instagram_username']
password = os.environ['instagram_password']
dir_path = os.path.dirname(os.path.realpath(__file__))
logging.basicConfig(
stream=sys.stdout,
level='INFO',
format='%(asctime)s %(levelname)s:%(name)s:%(message)s'
)
log = logging.getLogger('app')
def sleep(duration):
log.info('Sleeping for {} seconds'.format(duration))
time.sleep(duration)
def have_like(p):
return random.randint(1, 100) < p
def get_url(driver):
url = urlparse(driver.current_url)
return urljoin('{}://{}'.format(url.scheme, url.netloc), url.path)
def get_driver(gui=True):
options = webdriver.ChromeOptions()
if not gui:
options.add_argument('headless')
options.add_argument('--no-sandbox')
options.add_argument('window-size=1200x600')
driver = webdriver.Chrome(
executable_path='/usr/local/bin/chromedriver',
chrome_options=options
)
driver.implicitly_wait(15)
return driver
def login(driver, username, password):
login_btn = driver.find_element_by_xpath("//p[@class='izU2O']/a[text()='Log in']")
login_btn.click()
sleep(5)
login_input = driver.find_element_by_xpath("//INPUT[@name='username']")
login_input.send_keys(username)
password_input = driver.find_element_by_xpath("//INPUT[@type='password']")
password_input.send_keys(password)
password_input.send_keys(Keys.RETURN)
sleep(10)
def search(driver, tag):
driver.get('https://www.instagram.com/explore/tags/{tag}/'.format(tag=tag))
sleep(4)
first_image = driver.find_element_by_xpath(
"//article/div[2]/div[1]/div[1]/div[1]"
)
first_image.click()
sleep(2)
def go_to_next_photo(driver):
try:
nex_btn = driver.find_element_by_xpath(
"//a[contains(@class, coreSpriteRightPaginationArrow)][text()='Next']"
)
except Exception:
driver.save_screenshot('screenshot.png')
else:
nex_btn.click()
time.sleep(1)
def is_already_liked(driver):
try:
driver.find_element_by_xpath("//span[@aria-label='Like']")
except NoSuchElementException:
log.info('Picture has already been liked {}'.format(driver.current_url))
return True
else:
log.info('Picture has NOT been liked yet {}'.format(driver.current_url))
return False
def like_post(driver):
url = get_url(driver)
try:
Like.select().where(Like.url == url).get()
except Like.DoesNotExist:
pass
else:
log.info('Post has already been liked {url}'.format(url=url))
return False
try:
like_btn = driver.find_element_by_xpath("//span[@aria-label='Like']")
except NoSuchElementException:
log.info('Could not find like button {}'.format(driver.current_url))
time.sleep(1)
return False
else:
log.info('Found like button. Trying to like {}'.format(driver.current_url))
like_btn.click()
Like.create(url=url)
log.info('Liked picture {url}'.format(url=url))
return True
def comment_post(driver, text):
url = get_url(driver)
try:
Comment.select().where(Comment.url == url).get()
except Comment.DoesNotExist:
pass
else:
log.info('Post has already been commented {url}'.format(url=url))
return False
try:
comment_input = driver.find_element_by_xpath('//TEXTAREA[@placeholder="Add a comment…"]')
except NoSuchElementException as e:
log.info(e)
return False
else:
driver.execute_script(
"arguments[0].value = '{} ';".format(text), comment_input
)
comment_input.send_keys("\b")
comment_input = driver.find_element_by_xpath('//TEXTAREA[@placeholder="Add a comment…"]')
comment_input.submit()
Comment.create(url=url, comment=text)
log.info('Commented picture {url} with "{text}"'.format(url=url, text=text))
time.sleep(1)
return True
def subscribe(driver):
name_label = driver.find_element_by_xpath("//article/header//div[@class='e1e1d']/a[text()]")
name = name_label.text
follow_btn = driver.find_element_by_xpath("//article/header/div//button[text()]")
try:
following = Following.select().where(Following.name == name).get()
except Following.DoesNotExist:
pass
else:
log.info(
'Already subscribed on user: @{user} ({following})'.format(
user=name,
following=following
)
)
return False
btn_text = follow_btn.text
if btn_text == 'Follow':
log.info('Going to subscribe on user: @{user}'.format(user=name))
try:
follow_btn.click()
time.sleep(1)
except Exception as e:
log.info(e)
else:
Following.create(name=name)
return True
else:
log.info('Already subscribed on user: @{user}'.format(user=name))
return False
def get_random_comment():
comments = [
'Nice',
'Nice photo',
'Nice picture',
'Nice capture',
'Nice image',
'Nice shot',
'Great photo',
'Great job',
'Awesome picture',
'awesome shot',
'Like it',
'Like this picture',
'Like this photo',
'Like this image',
'Beautiful',
'Beautiful photo',
'Beautiful picture',
'Lovely picture',
'Lovely photo',
'Amazing',
'Amazing shot',
'Amazing capture',
'Amazing photo',
'Wonderful shot',
'Wonderful picture',
'Wonderful photo',
]
return random.choice(comments)
@click.group()
def cli():
pass
@cli.command()
@click.option('--tag', default='landscape', help='Instagram tag')
@click.option('--count', default=100, help='Number of user to follow')
@click.option('--gui/--no-gui', default=True, help='GUI')
def run_follower(tag, count, gui):
driver = get_driver(gui)
driver.get("https://www.instagram.com/")
login(driver, username=username, password=password)
search(driver, tag=tag)
liked = 0
commented = 0
subscribed = 0
while liked < count:
go_to_next_photo(driver)
was_liked = like_post(driver)
if was_liked:
liked += 1
if have_like(33) and subscribe(driver):
subscribed += 1
log.info('Liked: {}, Commented: {} Subscribed {}'.format(liked, commented, subscribed))
if was_liked:
duration = random.randint(20, 60)
sleep(duration)
else:
duration = random.randint(1, 8)
sleep(duration)
driver.close()
@cli.command()
@click.option('--count', default=100, help='Number of user to follow')
@click.option('--gui/--no-gui', default=True, help='GUI')
def run_unfollower(count, gui):
initial_count = count
driver = get_driver(gui)
driver.implicitly_wait(3)
driver.get("https://www.instagram.com/")
login(driver, username=username, password=password)
following_users = (
Following.select()
.where(
Following.is_following == True,
Following.date_created < datetime.now() - timedelta(days=14)
)
.order_by(Following.date_created)
)
for following in following_users:
if count <= 0:
return
log.info(
'Going to unfollow `@{user}` ({date})'.format(
user=following.name, date=following.date_created
)
)
driver.get("https://www.instagram.com/{name}".format(name=following.name))
time.sleep(1)
try:
unfollow_btn = driver.find_element_by_xpath("//button[text()='Following']")
except NoSuchElementException:
still_following = False
log.info('Already not following user `@{user}`'.format(user=following.name))
following.is_following = False
following.save()
else:
log.info('Still following user `@{user}`'.format(user=following.name))
still_following = True
unfollow_btn.click()
duration = random.randint(5, 10)
sleep(duration)
try:
unfollow_btn = driver.find_element_by_xpath(
"//div[@class='piCib']//button[text()='Unfollow']"
)
except NoSuchElementException:
pass
else:
still_following = True
unfollow_btn.click()
sleep(2)
tries = 0
while still_following:
driver.refresh()
try:
driver.find_element_by_xpath("//button[text()='Follow']")
except NoSuchElementException:
pass
else:
still_following = False
count -= 1
try:
driver.find_element_by_xpath("//button[text()='Follow Back']")
except NoSuchElementException:
pass
else:
still_following = False
count -= 1
if still_following:
try:
unfollow_btn = driver.find_element_by_xpath("//button[text()='Following']")
except NoSuchElementException:
pass
else:
log.info(
'Still following user `@{user}` (tries {tries})'.format(
user=following.name,
tries=tries
)
)
still_following = True
unfollow_btn.click()
if tries == 0:
break
tries += 1
log.info('-- {count} of {initial_count} users are unfollowed --'.format(
count=initial_count - count, initial_count=initial_count
))
driver.close()
@cli.command()
def init_db():
db.connect()
db.create_tables([Following, Comment, Like])
if __name__ == "__main__":
cli()
| true
| true
|
790664cbc975f154699499faf04f7a7c0e40ed09
| 1,186
|
py
|
Python
|
SoftLayer/CLI/virt/placementgroup/create_options.py
|
dvzrv/softlayer-python
|
9a5f6c6981bcc370084537b4d1769383499ce90d
|
[
"MIT"
] | 126
|
2015-01-05T05:09:22.000Z
|
2021-07-02T00:16:35.000Z
|
SoftLayer/CLI/virt/placementgroup/create_options.py
|
dvzrv/softlayer-python
|
9a5f6c6981bcc370084537b4d1769383499ce90d
|
[
"MIT"
] | 969
|
2015-01-05T15:55:31.000Z
|
2022-03-31T19:55:20.000Z
|
SoftLayer/CLI/virt/placementgroup/create_options.py
|
dvzrv/softlayer-python
|
9a5f6c6981bcc370084537b4d1769383499ce90d
|
[
"MIT"
] | 176
|
2015-01-22T11:23:40.000Z
|
2022-02-11T13:16:58.000Z
|
"""List options for creating Placement Groups"""
# :license: MIT, see LICENSE for more details.
import click
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
from SoftLayer.managers.vs_placement import PlacementManager as PlacementManager
@click.command()
@environment.pass_env
def cli(env):
"""List options for creating a placement group."""
manager = PlacementManager(env.client)
routers = manager.get_routers()
env.fout(get_router_table(routers))
rules = manager.get_all_rules()
env.fout(get_rule_table(rules))
def get_router_table(routers):
"""Formats output from _get_routers and returns a table. """
table = formatting.Table(['Datacenter', 'Hostname', 'Backend Router Id'], "Available Routers")
for router in routers:
datacenter = router['topLevelLocation']['longName']
table.add_row([datacenter, router['hostname'], router['id']])
return table
def get_rule_table(rules):
"""Formats output from get_all_rules and returns a table. """
table = formatting.Table(['Id', 'KeyName'], "Rules")
for rule in rules:
table.add_row([rule['id'], rule['keyName']])
return table
| 30.410256
| 98
| 0.711636
|
import click
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
from SoftLayer.managers.vs_placement import PlacementManager as PlacementManager
@click.command()
@environment.pass_env
def cli(env):
manager = PlacementManager(env.client)
routers = manager.get_routers()
env.fout(get_router_table(routers))
rules = manager.get_all_rules()
env.fout(get_rule_table(rules))
def get_router_table(routers):
table = formatting.Table(['Datacenter', 'Hostname', 'Backend Router Id'], "Available Routers")
for router in routers:
datacenter = router['topLevelLocation']['longName']
table.add_row([datacenter, router['hostname'], router['id']])
return table
def get_rule_table(rules):
table = formatting.Table(['Id', 'KeyName'], "Rules")
for rule in rules:
table.add_row([rule['id'], rule['keyName']])
return table
| true
| true
|
7906654c4de88ccb0d6873323dde8c9eb701060e
| 20,038
|
py
|
Python
|
whisk/test_merge3.py
|
aiporre/whisk
|
e07c381bc5d0df4e5dcabd7d75c0c97d0de3ad2c
|
[
"BSD-3-Clause"
] | null | null | null |
whisk/test_merge3.py
|
aiporre/whisk
|
e07c381bc5d0df4e5dcabd7d75c0c97d0de3ad2c
|
[
"BSD-3-Clause"
] | null | null | null |
whisk/test_merge3.py
|
aiporre/whisk
|
e07c381bc5d0df4e5dcabd7d75c0c97d0de3ad2c
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Author: Nathan Clack
Date : 2009
Copyright (c) 2009 HHMI. Free downloads and distribution are allowed for any
non-profit research and educational purposes as long as proper credit is given
to the author. All other rights reserved.
"""
from .tests import plot_whiskers
from ui.whiskerdata.trace import Whisker_Seg
from numpy import *
import pdb
from functools import reduce
def load():
from ui.whiskerdata import load_whiskers, load_trajectories
from ui.genetiff import Reader
movie = Reader('data/seq/whisker_data_0140.seq',adjuststipple=1)
w,wid = load_whiskers('seq.whiskers')
#movie = Reader('../../data/W0.tif',adjuststipple=1)
#w,wid = load_whiskers('w0-grid.whiskers')
#w,wid = load_whiskers('whisk-vc/whisk-vc/seq.whiskers')
#movie = Reader('data/JF8410_041808_001.tif',adjuststipple=1)
#w,wid = load_whiskers('test.whiskers')
#movie = Reader('data/lorenz/090519-19a_0035.seq',adjuststipple=1)
#w,wid = load_whiskers('lorenz.whiskers')
#w,wid = load_whiskers('results/seq-hand.whiskers')
#t,tid = load_trajectories('results/seq-hand.trajectories')
return w,movie
def check_bounds(wvd,shape):
for fid, wv in wvd.items():
for i,w in wv.items():
for x,y,t,s in w:
if x<0 or x>=shape[1] or y<0 or y>=shape[0]:
print("out of bounds")
pdb.set_trace()
if not ( w.x.flags.contiguous and w.y.flags.contiguous ):
print("not contiguous")
pdb.set_trace()
def fix(wvd,movie,scale=2, signal_per_pixel = 0, max_dist = 60, max_angle = 20.*pi/180.):
shape = movie[0].shape
for fid,wv in list(wvd.items()):
print(fid)
table = CollisionTable( wv, shape, scale )
r = set( resolution( table, wv ) )
for j,l in choose_gaps(movie[fid],r,signal_per_pixel,max_dist,max_angle):
e = reduce( Whisker_Seg.join, j )
r.discard( j[0] )
r.discard( j[-1] )
r.add(e)
wvd[fid] = dict( [ p for p in enumerate(r) ] )
return wvd
def compute_join_length( px, py, tlow = 0.0, thigh = 1.0 ):
from scipy.integrate import quad
xp = polyder( px, 1 )
yp = polyder( py, 1 )
xp2 = polymul( xp, xp )
yp2 = polymul( yp, yp )
p = polyadd( xp2, yp2 )
integrand = lambda t: sqrt( polyval( p, t ) )
return quad(integrand, tlow, thigh) [0]
def compute_join_curvature( px, py ):
from scipy.integrate import quad
xp = polyder( px, 1 )
xpp = polyder( px, 2 )
yp = polyder( py, 1 )
ypp = polyder( py, 2 )
pn = polyadd( polymul( xp, ypp ), polymul( yp, xpp )) #numerator
pd = polyadd( polymul( xp, xp ) , polymul( yp, yp ) ) #denominator
integrand = lambda t: fabs(polyval( pn, t )/( polyval( pd, t )**(1.5)) )
return quad(integrand, 0, 1) [0]
def compute_join_angle( px, py ):
from scipy.integrate import quad
xp = polyder( px, 1 )
yp = polyder( py, 1 )
integrand = lambda t: arctan2(polyval(yp, t), polyval(xp, t))
return quad(integrand, 0, 1) [0]
def _compute_intensity( im, x, y ):
if ( x<0 ).any() or \
( x>=im.shape[1] ).any() or \
( y<0 ).any() or \
( y>=im.shape[0] ).any():
return inf
p = set( p for p in zip(x,y) )
score = 0
for j,i in p:
score += im[i,j]
return score/len(p)
def compute_join_intensity( im, px, py ):
tt = linspace(0,1,50)
x = array( [round(polyval(px,t)) for t in tt] )
y = array( [round(polyval(px,t)) for t in tt] )
return _compute_intensity(im,x,y)
def compute_join_score( im, px, py, thick = 2 ):
tt = linspace(0,1,50)
dpx = polyder(px)
dpy = polyder(py)
dL2 = polymul(dpx,dpx) + polymul(dpy,dpy)
ux = polyval( px,tt )
uy = polyval( py,tt )
dx = diff(ux) #polyval( px,tt )
dy = diff(uy) #polyval( py,tt )
dx = r_[dx[0],dx]
dy = r_[dy[0],dy]
dL = sqrt( dx**2 + dy**2 )
a = _compute_intensity(im, ux, uy )
b = _compute_intensity(im, ux + thick*dy/dL , uy - thick*dx/dL )
c = _compute_intensity(im, ux - thick*dy/dL , uy + thick*dx/dL )
return (2*a - b - c)/4.0
def solve_polynomial_join( left, right, reverse = 0):
"""
Solves for a parametric cubic polynomial curve joining the right side of left
to the left side of right. The curve matches slope and position at it's
boundaries and is parameterized from 0 to 1; 0 being the left boundary and 1
being the right.
method: parametric cubic matching position and slope of endpoints.
This ends up being cheap to compute, since the matrix is
known (interval of parameter is always 0 to 1) and so the
inverse can be precomputed.
minv is inverse of m, where:
m = array( [ [ a**3, a**2, a, 1 ],
[ b**3, b**2, b, 1 ],
[ 3*a**2, 2*a , 1, 0 ],
[ 3*b**2, 2*b , 1, 0 ] ] )
is the matrix for the linear system:
m * coeff = v,
with v = [ x(0) x(1) dx/dt(0) dx/dt(1) ].
Here a = 0 and b = 1 so m and it's inverse is always the same.
"""
minv = matrix( [[ 2., -2., 1., 1.],
[-3., 3., -2., -1.],
[ 0., 0., 1., 0.],
[ 1., 0., 0., 0.]])
#take care of cases joining very short segements
lr = len(right)
ll = len(left)
#L = length( right.x, right.y ) + length( left.x, left.y )
#dd = hypot( left.x[0] - right.x[-1], left.y[0] - right.y[-1] )
nl = ll/4
nr = lr/4
slope = lambda v: v[ 0] - v[-1] # want the total change over the length
#slope = lambda v: diff(v).mean()
length = lambda x,y: hypot(diff(x),diff(y)).sum() # euclidian distance in pixels
#
# Compute slope at boundary.
# Uses a number of points near the boundary to compute slope.
# Need to account for edge cases where one or both sides
# consist of very few points.
#
if nr < 2 and nl < 2:
lnorm = length( left.x , left.y )
rnorm = length( right.x , right.y )
dly = diff( left.y ).mean() / lnorm
dlx = diff( left.x ).mean() / lnorm
dry = diff(right.y ).mean() / rnorm
drx = diff(right.x ).mean() / rnorm
nl = 0
nr = lr - 1
elif nr < 2: # use the derivative on the other side
lnorm = length( left.x[:nl], left.y[:nl] )
rnorm = length( right.x , right.y )
dly = -slope( left.y[(-nl):] ) / lnorm
dlx = -slope( left.x[(-nl):] ) / lnorm
dry = diff(right.y ).mean() / rnorm
drx = diff(right.x ).mean() / rnorm
nr = lr - 1
#print dly,dlx,dry,drx
elif nl < 2: # use the derivative on the other side
rnorm = length( right.x[:nr], right.y[:nr] )
lnorm = length( left.x , left.y )
dry = -slope(right.y[:nr] ) / rnorm
drx = -slope(right.x[:nr] ) / rnorm
dly = diff( left.y ).mean() / lnorm
dlx = diff( left.x ).mean() / lnorm
nl = 0
else: # the "normal" case
rnorm = length( right.x[:nr], right.y[:nr] ) # Compute path length of right border region
lnorm = length( left.x[(-nl):], left.y[(-nl):] ) # Compute path length of left border region
dry = -slope(right.y[:nr] ) / rnorm # Compute dy/dl for right side
drx = -slope(right.x[:nr] ) / rnorm # etc...
dly = -slope( left.y[(-nl):] ) / lnorm
dlx = -slope( left.x[(-nl):] ) / lnorm
rnorm = hypot( left.x[0] - right.x[0], left.y[0] - right.y[0] )
lnorm = hypot( left.x[-1]- right.x[0], left.y[-1]- right.y[0] )
if not isfinite(dlx): dlx =(left.x[0] - right.x[0])/lnorm
if not isfinite(dly): dly =(left.y[0] - right.y[0])/lnorm
if not isfinite(drx): drx =(left.x[-1] - right.x[0])/rnorm
if not isfinite(dry): dry =(left.y[-1] - right.y[0])/rnorm
if reverse:
dlx = -dlx
dly = -dly
drx = -drx
dry = -dry
ry = right.y[ 0] ## right.y[nr]
ly = left.y[-1 ] ## left.y[-nl]
rx = right.x[ 0] ## right.x[nr]
lx = left.x[-1 ] ## left.x[-nl]
L = hypot( rx-lx, ry-ly ) # Approximate dl/dt
print("L:%g"%L)
yv = matrix( [[ ly ],
[ ry ],
[ dly * L ], # dy/dt = dy/dl * dl/dt
[ dry * L ]])
xv = matrix( [[ lx ],
[ rx ],
[ dlx * L ],
[ drx * L ]])
cx = minv*xv
cy = minv*yv
if not (isfinite(cx).any() and isfinite(cy).any()):
pdb.set_trace()
return [array(t).squeeze() for t in (cx,cy)]
def plot_join(px,py,*args,**kwargs):
from pylab import plot, polyval
tt = linspace(0,1,50)
plot( polyval(px,tt), polyval(py,tt), *args, **kwargs )
def plot_test(px,py,thick=2):
from pylab import plot
tt = linspace(0,1,50)
dpx = polyder(px)
dpy = polyder(py)
dL2 = polymul(dpx,dpx) + polymul(dpy,dpy)
ux = polyval( px,tt )
uy = polyval( py,tt )
dx = diff(ux) #polyval( px,tt )
dy = diff(uy) #polyval( py,tt )
dx = r_[dx[0],dx]
dy = r_[dy[0],dy]
dL = sqrt( dx**2 + dy**2 )
plot( ux, uy , '.-')
plot( ux + thick*dy/dL , uy - thick*dx/dL ,'-')
plot( ux - thick*dy/dL , uy + thick*dx/dL ,'-' )
def filter_ends( wv, min_score, shape, border = 10 ):
"""
Return candidate ends for joining.
Returns an iterator yielding (Whisker_Seg, side).
"""
maxy, maxx = [x - border for x in shape]
minx, miny = border, border
test_point = lambda x,y: x>minx and x<maxx and y > miny and y < maxy
bordertest = lambda e,side: test_point( e.x[side], e.y[side] )
scoretest = lambda e,side: e.scores[side] > min_score
sides = [0,-1]
for e in wv:
for s in sides:
if bordertest(e,s) and scoretest(e,s):
yield e,s
def plot_candidate_ends(im, wv, min_score, border = 10):
from pylab import plot, imshow, cm, ion,ioff, show, text
left,right = group_ends( list(filter_ends(wv,min_score,im.shape, border)) )
ioff()
#imshow(im,cmap=cm.gray,hold=0)
m = {0:'ro',-1:'gs'}
for i,e in enumerate(left):
s = 0
text(e.x[s],e.y[s],str(i),color=m[s][0])
plot([e.x[s]],[e.y[s]],m[s])
for i,e in enumerate(right):
s = -1
text(e.x[s],e.y[s],str(i),color=m[s][0])
plot([e.x[s]],[e.y[s]],m[s])
show()
ion()
def group_ends( ends ):
return [e for e,s in ends if s == 0], [e for e,s in ends if s == -1]
def end_direction(w, side, n=16):
a = 0
b = min( n, len(w) )
if side != 0:
a = -b
b = -1
dx = diff( w.x[a:b] ).mean()
dy = diff( w.y[a:b] ).mean()
return dx,dy
def make_joining_whisker(px,py,dist,lthick,lscore,rthick,rscore):
w = Whisker_Seg()
tt = linspace(0,1,round(dist))
w.x = polyval(px,tt).astype(float32)
w.y = polyval(py,tt).astype(float32)
w.thick = polyval( [rthick-lthick,lthick], tt ).astype(float32)
w.scores = polyval( [rscore-lscore,lscore], tt ).astype(float32)
return w
def choose_gaps(im,wv, signal_per_pixel = 0.0, max_dist=60, max_angle = pi/4.):
left,right = group_ends( list(filter_ends(wv,100,im.shape)) )
theta = lambda w,side: reduce(arctan2, reversed( end_direction(w,side) ) )
dtheta = lambda left,right: fabs(theta(left,0) - theta(right,-1))
for i,a in enumerate(left):
for j,b in enumerate(right):
dx = a.x[ 0]-b.x[-1]
dy = a.y[ 0]-b.y[-1]
d = hypot(dx,dy)
dth = dtheta(a,b)
v = end_direction(a,0)
norm = hypot(*v)
proj = dot( v/norm, (dx,dy) )
# jth: angle change from a to direct line joining a,b
jth = fabs(arctan2( hypot(*( dx-proj*v[0]/norm, dy-proj*v[1]/norm )) , proj ))
#print i,j,
#print "\tD: %g Proj: %g Theta: %g"%(d,proj,jth*180/pi)
l=0;
if d < max_dist and jth < max_angle and proj > 0:
px,py = solve_polynomial_join( b, a )
l = compute_join_score(im,px,py)
if l < -signal_per_pixel:
#plot_test(px,py)
print("\tScore: %g Theta: %g"%(l,jth*180/pi))
e = make_joining_whisker(px,py,d,b.thick[-1],b.scores[-1],a.thick[ 0],a.scores[ 0])
yield (b,e,a),l
def gap_measures(im,wv):
pmetric = lambda p: sqrt(dot(p[:-1],p[:-1]))
left,right = group_ends( list(filter_ends(wv,100,im.shape)) )
shape = (len(left),len(right) )
d = zeros( shape )
l = zeros( shape )
c = zeros( shape )
cx = zeros( shape )
cy = zeros( shape )
for i,a in enumerate(left):
for j,b in enumerate(right):
dx = a.x[0 ]-b.x[-1]
dy = a.y[0 ]-b.y[-1]
d[i,j] = hypot(dx,dy)
px,py = solve_polynomial_join( b, a )
lpx,lpy = solve_polynomial_join( a, a, reverse = 1 )
rpx,rpy = solve_polynomial_join( b, b, reverse = 1 )
cx[i,j] = max( pmetric( px - lpx ) , pmetric( px - rpx ) )
cy[i,j] = max( pmetric( px - lpx ) , pmetric( py - rpy ) )
#l[i,j] = compute_join_length(px,py)
l[i,j] = compute_join_score(im,px,py)
plot_test(px,py)
#c[i,j] = compute_join_curvature(px,py)
#if sqrt( px[0]**2 + py[0]**2 ) < 50.0:
# plot_join(px,py)
return d,l,cx,cy
def trace_overlap(xxx_todo_changeme, xxx_todo_changeme1, thresh = 2.0 ):
# DONE: does not assume that indexes run along same direction
(wa,i) = xxx_todo_changeme
(wb,j) = xxx_todo_changeme1
def dist(ia,ib):
a,b = wa[ia], wb[ib]
return hypot( a[0] - b[0], a[1] - b[1] )
# determine relative direction of indexing
ia,ib = i,j
if ia == len(wa)-1 or ib == len(wb)-1:
if ia != 0 and ib != 0:
dax = wa.x[ia-1] - wa.x[ia]
day = wa.y[ia-1] - wa.y[ia]
dbx = wb.x[ib-1] - wb.x[ib]
dby = wb.y[ib-1] - wb.y[ib]
elif ia == 0:
dax = wa.x[ia+1] - wa.x[ia]
day = wa.y[ia+1] - wa.y[ia]
dbx = - wb.x[ib-1] + wb.x[ib]
dby = - wb.y[ib-1] + wb.y[ib]
elif ib == 0:
dax = - wa.x[ia-1] + wa.x[ia]
day = - wa.y[ia-1] + wa.y[ia]
dbx = wb.x[ib+1] - wb.x[ib]
dby = wb.y[ib+1] - wb.y[ib]
else:
dax = wa.x[ia+1] - wa.x[ia]
day = wa.y[ia+1] - wa.y[ia]
dbx = wb.x[ib+1] - wb.x[ib]
dby = wb.y[ib+1] - wb.y[ib]
stepa = -1; #only need to keep track of one direction
enda = 0;
notend = lambda i,n: i>n
if( abs(dax) > abs(day) ): #determine by x change
if( dax*dbx < 0 ): #have different signs
stepa = 1
enda = len(wa)
notend = lambda i,n: i<n-1
else: #determine by y change
if( day*dby < 0 ): #have different signs
stepa = 1
enda = len(wa)
notend = lambda i,n: i<n-1
bnda = [i,i]
bndb = [j,j]
ms = 0
while ms < thresh and notend(ia,enda) and ib > 0:
moves = ( ( ia + stepa, ib - 1 ),
( ia + stepa, ib ),
( ia , ib - 1 ) )
scores = [dist( iam, ibm ) for iam, ibm in moves]
ms = min(scores)
for idx,s in enumerate( scores ): #choose best move
if s == ms:
ia,ib = moves[idx]
break
#relax at boundary, move downhill
if not notend(ia,enda) and ib == 0:
pass
elif not notend(ia,enda):
last = ms
s = dist( ia, ib - 1 )
while s < last and ib > 1:
ib -= 1
last = s
s = dist( ia, ib - 1 )
elif ib == 0:
last = ms
s = dist( ia + stepa, ib )
while s < last and notend(ia,enda-stepa):
ia += stepa
last = s
s = dist( ia + stepa, ib )
bnda[0] = ia
bndb[0] = ib
#flip direction
if stepa == -1:
stepa = 1
enda = len(wa)
notend = lambda i,n:i<n-1
else:
stepa = -1
enda = 0
notend = lambda i,n: i>n
ia,ib = i,j
ms = 0
while ms < thresh and notend(ia,enda) and ib < len(wb)-1:
moves = ( ( ia + stepa, ib + 1 ),
( ia + stepa, ib ),
( ia , ib + 1 ) )
scores = [dist( iam, ibm ) for iam, ibm in moves]
ms = min(scores)
for idx,s in enumerate(scores):
if s == ms:
ia,ib = moves[idx]
break
#relax at boundary, move downhill
if not notend(ia,enda) and ib == len(wb)-1:
pass
elif not notend(ia,enda):
last = ms
s = dist( ia, ib + 1 )
while s < last and ib < len(wb)-2:
ib += 1
last = s
s = dist( ia, ib + 1 )
elif ib == len(wb)-1:
last = ms
s = dist( ia + stepa, ib )
while s < last and notend(ia,enda-stepa):
ia += stepa
last = s
s = dist( ia + stepa, ib )
bnda[1] = ia
bndb[1] = ib
bnda.sort()
return bnda, bndb
def resolution(table, wvd):
rest = set(wvd.values())
match = next(table)
while match:
keep,discard = merge(match)
if discard:
for a in discard:
table.remove( a )
for a in keep:
yield a
for a,i in match:
rest.discard(a)
match = next(table)
for a in rest:
yield a
def pairwise_merge( match ):
overhang = 8
wa = match[0][0]
wb = match[1][0]
bnda, bndb = trace_overlap(*match)
iscomplete = lambda bnd,w: bnd[0] < overhang and bnd[1] >= len(w)-overhang
if iscomplete(bnda,wa) or iscomplete(bndb,wb):
sa = wa.scores.sum()
sb = wb.scores.sum()
if sa > sb:
return wa,None
else:
return None,wb
return None,None
def merge( match ):
dep = dict( [ (e[0],0) for e in match ] )
#iterate through all pairs and mark those who are contained in another whisker
# The pairwise merge should impose a strict ordering
match = list(match)
for i,ma in enumerate(match):
for j,mb in enumerate(match[ (i+1): ]):
ra,rb = pairwise_merge( (ma,mb) )
if ra or rb:
if not ra:
dep[ma[0]] = 1
if not rb:
dep[mb[0]] = 1
# partition into two sets. Those to keep and those to discard.
# Those to keep depend on none of the others.
return [ k for k,v in dep.items() if v==0 ], \
[ k for k,v in dep.items() if v!=0 ]
class CollisionTable(object):
def __init__(self, wvd, shape, scale):
""" `wvd` may be either a dict or list of whiskers """
object.__init__(self)
self._map = {}
self._shape = shape
self._scale = scale
self._stride = stride = shape[1]/scale
self.topx = lambda p: int(p[0]/scale) + stride * int(p[1]/scale)
self._build_inverse_table( wvd )
def _build_inverse_table(self, wvd ):
g = enumerate(wvd)
if isinstance(wvd, dict):
g = iter(wvd.items())
for i,w in g:
self.add(w)
def update( self, changes ):
""" Changes is a dict mapping old whisker segments to new segments """
last = None
for w,p in changes.items():
self.remove(w)
if p:
self.add(p[0]) # add back ends
self.add(p[-1])
last = p[1]
if last:
self.add(last) # add back last middle
def add(self, w):
if not w: return
hash = lambda e: enumerate( map(self.topx,list(zip(e.x,e.y))) )
for i,px in hash(w):
self._map.setdefault(px,set()).add( (w,i) )
for i,px in hash(w): # scan back through and remove repeat hits on a pixel
for x in [e for e in self._map[px] if e[0] == w][1:]:
self._map[px].remove(x)
def remove(self, w):
if not w: return
hash = lambda e: enumerate( map(self.topx,list(zip(e.x,e.y))) )
for i,px in hash(w):
s = self._map.get(px)
if s:
s.discard( (w,i) )
def __iter__(self):
m = next(self)
while m:
yield m
m = next(self)
def __next__(self):
""" This changes the inverse table by removing hits.
Returns a (Whisker_Seg, index),(Whisker_Seg, index)... tuple
or None, if done.
"""
todelete = []
retval = None
for px,s in self._map.items():
todelete.append(px) # get rid of references to visited pixels
if len(s) > 1:
retval = s
break
for k in todelete:
del self._map[k]
return retval
def counts( self ):
tosc = lambda e: e/self._scale
im = zeros(list(map(tosc, self._shape)))
imr = im.ravel()
for px,s in self._map.items():
imr[px] = len(s) #len(set( [e for e,i in s] ))
return im
| 32.529221
| 99
| 0.550155
|
from .tests import plot_whiskers
from ui.whiskerdata.trace import Whisker_Seg
from numpy import *
import pdb
from functools import reduce
def load():
from ui.whiskerdata import load_whiskers, load_trajectories
from ui.genetiff import Reader
movie = Reader('data/seq/whisker_data_0140.seq',adjuststipple=1)
w,wid = load_whiskers('seq.whiskers')
return w,movie
def check_bounds(wvd,shape):
for fid, wv in wvd.items():
for i,w in wv.items():
for x,y,t,s in w:
if x<0 or x>=shape[1] or y<0 or y>=shape[0]:
print("out of bounds")
pdb.set_trace()
if not ( w.x.flags.contiguous and w.y.flags.contiguous ):
print("not contiguous")
pdb.set_trace()
def fix(wvd,movie,scale=2, signal_per_pixel = 0, max_dist = 60, max_angle = 20.*pi/180.):
shape = movie[0].shape
for fid,wv in list(wvd.items()):
print(fid)
table = CollisionTable( wv, shape, scale )
r = set( resolution( table, wv ) )
for j,l in choose_gaps(movie[fid],r,signal_per_pixel,max_dist,max_angle):
e = reduce( Whisker_Seg.join, j )
r.discard( j[0] )
r.discard( j[-1] )
r.add(e)
wvd[fid] = dict( [ p for p in enumerate(r) ] )
return wvd
def compute_join_length( px, py, tlow = 0.0, thigh = 1.0 ):
from scipy.integrate import quad
xp = polyder( px, 1 )
yp = polyder( py, 1 )
xp2 = polymul( xp, xp )
yp2 = polymul( yp, yp )
p = polyadd( xp2, yp2 )
integrand = lambda t: sqrt( polyval( p, t ) )
return quad(integrand, tlow, thigh) [0]
def compute_join_curvature( px, py ):
from scipy.integrate import quad
xp = polyder( px, 1 )
xpp = polyder( px, 2 )
yp = polyder( py, 1 )
ypp = polyder( py, 2 )
pn = polyadd( polymul( xp, ypp ), polymul( yp, xpp ))
pd = polyadd( polymul( xp, xp ) , polymul( yp, yp ) )
integrand = lambda t: fabs(polyval( pn, t )/( polyval( pd, t )**(1.5)) )
return quad(integrand, 0, 1) [0]
def compute_join_angle( px, py ):
from scipy.integrate import quad
xp = polyder( px, 1 )
yp = polyder( py, 1 )
integrand = lambda t: arctan2(polyval(yp, t), polyval(xp, t))
return quad(integrand, 0, 1) [0]
def _compute_intensity( im, x, y ):
if ( x<0 ).any() or \
( x>=im.shape[1] ).any() or \
( y<0 ).any() or \
( y>=im.shape[0] ).any():
return inf
p = set( p for p in zip(x,y) )
score = 0
for j,i in p:
score += im[i,j]
return score/len(p)
def compute_join_intensity( im, px, py ):
tt = linspace(0,1,50)
x = array( [round(polyval(px,t)) for t in tt] )
y = array( [round(polyval(px,t)) for t in tt] )
return _compute_intensity(im,x,y)
def compute_join_score( im, px, py, thick = 2 ):
tt = linspace(0,1,50)
dpx = polyder(px)
dpy = polyder(py)
dL2 = polymul(dpx,dpx) + polymul(dpy,dpy)
ux = polyval( px,tt )
uy = polyval( py,tt )
dx = diff(ux)
dy = diff(uy)
dx = r_[dx[0],dx]
dy = r_[dy[0],dy]
dL = sqrt( dx**2 + dy**2 )
a = _compute_intensity(im, ux, uy )
b = _compute_intensity(im, ux + thick*dy/dL , uy - thick*dx/dL )
c = _compute_intensity(im, ux - thick*dy/dL , uy + thick*dx/dL )
return (2*a - b - c)/4.0
def solve_polynomial_join( left, right, reverse = 0):
minv = matrix( [[ 2., -2., 1., 1.],
[-3., 3., -2., -1.],
[ 0., 0., 1., 0.],
[ 1., 0., 0., 0.]])
lr = len(right)
ll = len(left)
nl = ll/4
nr = lr/4
slope = lambda v: v[ 0] - v[-1]
length = lambda x,y: hypot(diff(x),diff(y)).sum()
if nr < 2 and nl < 2:
lnorm = length( left.x , left.y )
rnorm = length( right.x , right.y )
dly = diff( left.y ).mean() / lnorm
dlx = diff( left.x ).mean() / lnorm
dry = diff(right.y ).mean() / rnorm
drx = diff(right.x ).mean() / rnorm
nl = 0
nr = lr - 1
elif nr < 2:
lnorm = length( left.x[:nl], left.y[:nl] )
rnorm = length( right.x , right.y )
dly = -slope( left.y[(-nl):] ) / lnorm
dlx = -slope( left.x[(-nl):] ) / lnorm
dry = diff(right.y ).mean() / rnorm
drx = diff(right.x ).mean() / rnorm
nr = lr - 1
elif nl < 2:
rnorm = length( right.x[:nr], right.y[:nr] )
lnorm = length( left.x , left.y )
dry = -slope(right.y[:nr] ) / rnorm
drx = -slope(right.x[:nr] ) / rnorm
dly = diff( left.y ).mean() / lnorm
dlx = diff( left.x ).mean() / lnorm
nl = 0
else:
rnorm = length( right.x[:nr], right.y[:nr] )
lnorm = length( left.x[(-nl):], left.y[(-nl):] )
dry = -slope(right.y[:nr] ) / rnorm
drx = -slope(right.x[:nr] ) / rnorm
dly = -slope( left.y[(-nl):] ) / lnorm
dlx = -slope( left.x[(-nl):] ) / lnorm
rnorm = hypot( left.x[0] - right.x[0], left.y[0] - right.y[0] )
lnorm = hypot( left.x[-1]- right.x[0], left.y[-1]- right.y[0] )
if not isfinite(dlx): dlx =(left.x[0] - right.x[0])/lnorm
if not isfinite(dly): dly =(left.y[0] - right.y[0])/lnorm
if not isfinite(drx): drx =(left.x[-1] - right.x[0])/rnorm
if not isfinite(dry): dry =(left.y[-1] - right.y[0])/rnorm
if reverse:
dlx = -dlx
dly = -dly
drx = -drx
dry = -dry
ry = right.y[ 0] [-1 ] x[ 0] [-1 ] rx-lx, ry-ly )
print("L:%g"%L)
yv = matrix( [[ ly ],
[ ry ],
[ dly * L ],
[ dry * L ]])
xv = matrix( [[ lx ],
[ rx ],
[ dlx * L ],
[ drx * L ]])
cx = minv*xv
cy = minv*yv
if not (isfinite(cx).any() and isfinite(cy).any()):
pdb.set_trace()
return [array(t).squeeze() for t in (cx,cy)]
def plot_join(px,py,*args,**kwargs):
from pylab import plot, polyval
tt = linspace(0,1,50)
plot( polyval(px,tt), polyval(py,tt), *args, **kwargs )
def plot_test(px,py,thick=2):
from pylab import plot
tt = linspace(0,1,50)
dpx = polyder(px)
dpy = polyder(py)
dL2 = polymul(dpx,dpx) + polymul(dpy,dpy)
ux = polyval( px,tt )
uy = polyval( py,tt )
dx = diff(ux)
dy = diff(uy)
dx = r_[dx[0],dx]
dy = r_[dy[0],dy]
dL = sqrt( dx**2 + dy**2 )
plot( ux, uy , '.-')
plot( ux + thick*dy/dL , uy - thick*dx/dL ,'-')
plot( ux - thick*dy/dL , uy + thick*dx/dL ,'-' )
def filter_ends( wv, min_score, shape, border = 10 ):
maxy, maxx = [x - border for x in shape]
minx, miny = border, border
test_point = lambda x,y: x>minx and x<maxx and y > miny and y < maxy
bordertest = lambda e,side: test_point( e.x[side], e.y[side] )
scoretest = lambda e,side: e.scores[side] > min_score
sides = [0,-1]
for e in wv:
for s in sides:
if bordertest(e,s) and scoretest(e,s):
yield e,s
def plot_candidate_ends(im, wv, min_score, border = 10):
from pylab import plot, imshow, cm, ion,ioff, show, text
left,right = group_ends( list(filter_ends(wv,min_score,im.shape, border)) )
ioff()
m = {0:'ro',-1:'gs'}
for i,e in enumerate(left):
s = 0
text(e.x[s],e.y[s],str(i),color=m[s][0])
plot([e.x[s]],[e.y[s]],m[s])
for i,e in enumerate(right):
s = -1
text(e.x[s],e.y[s],str(i),color=m[s][0])
plot([e.x[s]],[e.y[s]],m[s])
show()
ion()
def group_ends( ends ):
return [e for e,s in ends if s == 0], [e for e,s in ends if s == -1]
def end_direction(w, side, n=16):
a = 0
b = min( n, len(w) )
if side != 0:
a = -b
b = -1
dx = diff( w.x[a:b] ).mean()
dy = diff( w.y[a:b] ).mean()
return dx,dy
def make_joining_whisker(px,py,dist,lthick,lscore,rthick,rscore):
w = Whisker_Seg()
tt = linspace(0,1,round(dist))
w.x = polyval(px,tt).astype(float32)
w.y = polyval(py,tt).astype(float32)
w.thick = polyval( [rthick-lthick,lthick], tt ).astype(float32)
w.scores = polyval( [rscore-lscore,lscore], tt ).astype(float32)
return w
def choose_gaps(im,wv, signal_per_pixel = 0.0, max_dist=60, max_angle = pi/4.):
left,right = group_ends( list(filter_ends(wv,100,im.shape)) )
theta = lambda w,side: reduce(arctan2, reversed( end_direction(w,side) ) )
dtheta = lambda left,right: fabs(theta(left,0) - theta(right,-1))
for i,a in enumerate(left):
for j,b in enumerate(right):
dx = a.x[ 0]-b.x[-1]
dy = a.y[ 0]-b.y[-1]
d = hypot(dx,dy)
dth = dtheta(a,b)
v = end_direction(a,0)
norm = hypot(*v)
proj = dot( v/norm, (dx,dy) )
jth = fabs(arctan2( hypot(*( dx-proj*v[0]/norm, dy-proj*v[1]/norm )) , proj ))
l=0;
if d < max_dist and jth < max_angle and proj > 0:
px,py = solve_polynomial_join( b, a )
l = compute_join_score(im,px,py)
if l < -signal_per_pixel:
print("\tScore: %g Theta: %g"%(l,jth*180/pi))
e = make_joining_whisker(px,py,d,b.thick[-1],b.scores[-1],a.thick[ 0],a.scores[ 0])
yield (b,e,a),l
def gap_measures(im,wv):
pmetric = lambda p: sqrt(dot(p[:-1],p[:-1]))
left,right = group_ends( list(filter_ends(wv,100,im.shape)) )
shape = (len(left),len(right) )
d = zeros( shape )
l = zeros( shape )
c = zeros( shape )
cx = zeros( shape )
cy = zeros( shape )
for i,a in enumerate(left):
for j,b in enumerate(right):
dx = a.x[0 ]-b.x[-1]
dy = a.y[0 ]-b.y[-1]
d[i,j] = hypot(dx,dy)
px,py = solve_polynomial_join( b, a )
lpx,lpy = solve_polynomial_join( a, a, reverse = 1 )
rpx,rpy = solve_polynomial_join( b, b, reverse = 1 )
cx[i,j] = max( pmetric( px - lpx ) , pmetric( px - rpx ) )
cy[i,j] = max( pmetric( px - lpx ) , pmetric( py - rpy ) )
l[i,j] = compute_join_score(im,px,py)
plot_test(px,py)
return d,l,cx,cy
def trace_overlap(xxx_todo_changeme, xxx_todo_changeme1, thresh = 2.0 ):
(wa,i) = xxx_todo_changeme
(wb,j) = xxx_todo_changeme1
def dist(ia,ib):
a,b = wa[ia], wb[ib]
return hypot( a[0] - b[0], a[1] - b[1] )
ia,ib = i,j
if ia == len(wa)-1 or ib == len(wb)-1:
if ia != 0 and ib != 0:
dax = wa.x[ia-1] - wa.x[ia]
day = wa.y[ia-1] - wa.y[ia]
dbx = wb.x[ib-1] - wb.x[ib]
dby = wb.y[ib-1] - wb.y[ib]
elif ia == 0:
dax = wa.x[ia+1] - wa.x[ia]
day = wa.y[ia+1] - wa.y[ia]
dbx = - wb.x[ib-1] + wb.x[ib]
dby = - wb.y[ib-1] + wb.y[ib]
elif ib == 0:
dax = - wa.x[ia-1] + wa.x[ia]
day = - wa.y[ia-1] + wa.y[ia]
dbx = wb.x[ib+1] - wb.x[ib]
dby = wb.y[ib+1] - wb.y[ib]
else:
dax = wa.x[ia+1] - wa.x[ia]
day = wa.y[ia+1] - wa.y[ia]
dbx = wb.x[ib+1] - wb.x[ib]
dby = wb.y[ib+1] - wb.y[ib]
stepa = -1;
enda = 0;
notend = lambda i,n: i>n
if( abs(dax) > abs(day) ):
if( dax*dbx < 0 ):
stepa = 1
enda = len(wa)
notend = lambda i,n: i<n-1
else:
if( day*dby < 0 ):
stepa = 1
enda = len(wa)
notend = lambda i,n: i<n-1
bnda = [i,i]
bndb = [j,j]
ms = 0
while ms < thresh and notend(ia,enda) and ib > 0:
moves = ( ( ia + stepa, ib - 1 ),
( ia + stepa, ib ),
( ia , ib - 1 ) )
scores = [dist( iam, ibm ) for iam, ibm in moves]
ms = min(scores)
for idx,s in enumerate( scores ):
if s == ms:
ia,ib = moves[idx]
break
if not notend(ia,enda) and ib == 0:
pass
elif not notend(ia,enda):
last = ms
s = dist( ia, ib - 1 )
while s < last and ib > 1:
ib -= 1
last = s
s = dist( ia, ib - 1 )
elif ib == 0:
last = ms
s = dist( ia + stepa, ib )
while s < last and notend(ia,enda-stepa):
ia += stepa
last = s
s = dist( ia + stepa, ib )
bnda[0] = ia
bndb[0] = ib
if stepa == -1:
stepa = 1
enda = len(wa)
notend = lambda i,n:i<n-1
else:
stepa = -1
enda = 0
notend = lambda i,n: i>n
ia,ib = i,j
ms = 0
while ms < thresh and notend(ia,enda) and ib < len(wb)-1:
moves = ( ( ia + stepa, ib + 1 ),
( ia + stepa, ib ),
( ia , ib + 1 ) )
scores = [dist( iam, ibm ) for iam, ibm in moves]
ms = min(scores)
for idx,s in enumerate(scores):
if s == ms:
ia,ib = moves[idx]
break
if not notend(ia,enda) and ib == len(wb)-1:
pass
elif not notend(ia,enda):
last = ms
s = dist( ia, ib + 1 )
while s < last and ib < len(wb)-2:
ib += 1
last = s
s = dist( ia, ib + 1 )
elif ib == len(wb)-1:
last = ms
s = dist( ia + stepa, ib )
while s < last and notend(ia,enda-stepa):
ia += stepa
last = s
s = dist( ia + stepa, ib )
bnda[1] = ia
bndb[1] = ib
bnda.sort()
return bnda, bndb
def resolution(table, wvd):
rest = set(wvd.values())
match = next(table)
while match:
keep,discard = merge(match)
if discard:
for a in discard:
table.remove( a )
for a in keep:
yield a
for a,i in match:
rest.discard(a)
match = next(table)
for a in rest:
yield a
def pairwise_merge( match ):
overhang = 8
wa = match[0][0]
wb = match[1][0]
bnda, bndb = trace_overlap(*match)
iscomplete = lambda bnd,w: bnd[0] < overhang and bnd[1] >= len(w)-overhang
if iscomplete(bnda,wa) or iscomplete(bndb,wb):
sa = wa.scores.sum()
sb = wb.scores.sum()
if sa > sb:
return wa,None
else:
return None,wb
return None,None
def merge( match ):
dep = dict( [ (e[0],0) for e in match ] )
match = list(match)
for i,ma in enumerate(match):
for j,mb in enumerate(match[ (i+1): ]):
ra,rb = pairwise_merge( (ma,mb) )
if ra or rb:
if not ra:
dep[ma[0]] = 1
if not rb:
dep[mb[0]] = 1
return [ k for k,v in dep.items() if v==0 ], \
[ k for k,v in dep.items() if v!=0 ]
class CollisionTable(object):
def __init__(self, wvd, shape, scale):
object.__init__(self)
self._map = {}
self._shape = shape
self._scale = scale
self._stride = stride = shape[1]/scale
self.topx = lambda p: int(p[0]/scale) + stride * int(p[1]/scale)
self._build_inverse_table( wvd )
def _build_inverse_table(self, wvd ):
g = enumerate(wvd)
if isinstance(wvd, dict):
g = iter(wvd.items())
for i,w in g:
self.add(w)
def update( self, changes ):
last = None
for w,p in changes.items():
self.remove(w)
if p:
self.add(p[0])
self.add(p[-1])
last = p[1]
if last:
self.add(last)
def add(self, w):
if not w: return
hash = lambda e: enumerate( map(self.topx,list(zip(e.x,e.y))) )
for i,px in hash(w):
self._map.setdefault(px,set()).add( (w,i) )
for i,px in hash(w):
for x in [e for e in self._map[px] if e[0] == w][1:]:
self._map[px].remove(x)
def remove(self, w):
if not w: return
hash = lambda e: enumerate( map(self.topx,list(zip(e.x,e.y))) )
for i,px in hash(w):
s = self._map.get(px)
if s:
s.discard( (w,i) )
def __iter__(self):
m = next(self)
while m:
yield m
m = next(self)
def __next__(self):
todelete = []
retval = None
for px,s in self._map.items():
todelete.append(px)
if len(s) > 1:
retval = s
break
for k in todelete:
del self._map[k]
return retval
def counts( self ):
tosc = lambda e: e/self._scale
im = zeros(list(map(tosc, self._shape)))
imr = im.ravel()
for px,s in self._map.items():
imr[px] = len(s)
return im
| true
| true
|
790665b03cae28ec6403cb0697d66a4ee43fe3ae
| 4,283
|
py
|
Python
|
otp/launcher/DownloadWatcher.py
|
itsyaboyrocket/pirates
|
6ca1e7d571c670b0d976f65e608235707b5737e3
|
[
"BSD-3-Clause"
] | 3
|
2021-02-25T06:38:13.000Z
|
2022-03-22T07:00:15.000Z
|
otp/launcher/DownloadWatcher.py
|
itsyaboyrocket/pirates
|
6ca1e7d571c670b0d976f65e608235707b5737e3
|
[
"BSD-3-Clause"
] | null | null | null |
otp/launcher/DownloadWatcher.py
|
itsyaboyrocket/pirates
|
6ca1e7d571c670b0d976f65e608235707b5737e3
|
[
"BSD-3-Clause"
] | 1
|
2021-02-25T06:38:17.000Z
|
2021-02-25T06:38:17.000Z
|
# uncompyle6 version 3.2.0
# Python bytecode 2.4 (62061)
# Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)]
# Embedded file name: otp.launcher.DownloadWatcher
from direct.task import Task
from otp.otpbase import OTPLocalizer
from direct.gui.DirectGui import *
from pandac.PandaModules import *
from direct.showbase.DirectObject import DirectObject
class DownloadWatcher(DirectObject):
__module__ = __name__
def __init__(self, phaseNames):
self.phaseNames = phaseNames
self.text = DirectLabel(relief=None, guiId='DownloadWatcherText', pos=(-0.96, 0, -0.91), text=OTPLocalizer.DownloadWatcherInitializing, text_fg=(1,
1,
1,
1), text_scale=0.05, textMayChange=1, text_align=TextNode.ALeft, sortOrder=50)
self.bar = DirectWaitBar(guiId='DownloadWatcherBar', pos=(-0.81, 0, -0.96), relief=DGG.SUNKEN, frameSize=(-0.6, 0.6, -0.1, 0.1), borderWidth=(0.02,
0.02), scale=0.25, range=100, sortOrder=50, frameColor=(0.5,
0.5,
0.5,
0.5), barColor=(0.2,
0.7,
0.2,
0.5), text='0%', text_scale=0.16, text_fg=(1,
1,
1,
1), text_align=TextNode.ACenter, text_pos=(0, -0.05))
self.accept('launcherPercentPhaseComplete', self.update)
return
def update(self, phase, percent, reqByteRate, actualByteRate):
phaseName = self.phaseNames[phase]
self.text['text'] = OTPLocalizer.DownloadWatcherUpdate % phaseName
self.bar['text'] = '%s %%' % percent
self.bar['value'] = percent
def cleanup(self):
self.text.destroy()
self.bar.destroy()
self.ignoreAll()
| 99.604651
| 318
| 0.257063
|
from direct.task import Task
from otp.otpbase import OTPLocalizer
from direct.gui.DirectGui import *
from pandac.PandaModules import *
from direct.showbase.DirectObject import DirectObject
class DownloadWatcher(DirectObject):
__module__ = __name__
def __init__(self, phaseNames):
self.phaseNames = phaseNames
self.text = DirectLabel(relief=None, guiId='DownloadWatcherText', pos=(-0.96, 0, -0.91), text=OTPLocalizer.DownloadWatcherInitializing, text_fg=(1,
1,
1,
1), text_scale=0.05, textMayChange=1, text_align=TextNode.ALeft, sortOrder=50)
self.bar = DirectWaitBar(guiId='DownloadWatcherBar', pos=(-0.81, 0, -0.96), relief=DGG.SUNKEN, frameSize=(-0.6, 0.6, -0.1, 0.1), borderWidth=(0.02,
0.02), scale=0.25, range=100, sortOrder=50, frameColor=(0.5,
0.5,
0.5,
0.5), barColor=(0.2,
0.7,
0.2,
0.5), text='0%', text_scale=0.16, text_fg=(1,
1,
1,
1), text_align=TextNode.ACenter, text_pos=(0, -0.05))
self.accept('launcherPercentPhaseComplete', self.update)
return
def update(self, phase, percent, reqByteRate, actualByteRate):
phaseName = self.phaseNames[phase]
self.text['text'] = OTPLocalizer.DownloadWatcherUpdate % phaseName
self.bar['text'] = '%s %%' % percent
self.bar['value'] = percent
def cleanup(self):
self.text.destroy()
self.bar.destroy()
self.ignoreAll()
| true
| true
|
790667ab3346fc7bd8fc01edc7ecb50ef8669b01
| 9,962
|
py
|
Python
|
Allura/allura/model/index.py
|
rohankumardubey/allura
|
9c490a051ca912d28b81ce656441d6fed100cb24
|
[
"Apache-2.0"
] | null | null | null |
Allura/allura/model/index.py
|
rohankumardubey/allura
|
9c490a051ca912d28b81ce656441d6fed100cb24
|
[
"Apache-2.0"
] | null | null | null |
Allura/allura/model/index.py
|
rohankumardubey/allura
|
9c490a051ca912d28b81ce656441d6fed100cb24
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from __future__ import absolute_import
import re
import logging
from itertools import groupby
from six.moves.cPickle import dumps, loads
from collections import defaultdict
from six.moves.urllib.parse import unquote
import bson
import pymongo
from tg import tmpl_context as c
from ming import collection, Field, Index
from ming import schema as S
from ming.utils import LazyProperty
from ming.orm import session, mapper
from ming.orm import ForeignIdProperty, RelationProperty
from allura.lib import helpers as h
from .session import main_doc_session, main_orm_session
from .project import Project
import six
log = logging.getLogger(__name__)
# Collection definitions
ArtifactReferenceDoc = collection(
str('artifact_reference'), main_doc_session,
Field('_id', str),
Field('artifact_reference', dict(
cls=S.Binary(),
project_id=S.ObjectId(),
app_config_id=S.ObjectId(),
artifact_id=S.Anything(if_missing=None))),
Field('references', [str], index=True),
Index('artifact_reference.project_id'), # used in ReindexCommand
)
ShortlinkDoc = collection(
str('shortlink'), main_doc_session,
Field('_id', S.ObjectId()),
# index needed for from_artifact() and index_tasks.py:del_artifacts
Field('ref_id', str, index=True),
Field('project_id', S.ObjectId()),
Field('app_config_id', S.ObjectId()),
Field('link', str),
Field('url', str),
# used by from_links() More helpful to have project_id first, for other
# queries
Index('project_id', 'link'),
)
# Class definitions
class ArtifactReference(object):
@classmethod
def from_artifact(cls, artifact):
'''Upsert logic to generate an ArtifactReference object from an artifact'''
obj = cls.query.get(_id=artifact.index_id())
if obj is not None:
return obj
try:
obj = cls(
_id=artifact.index_id(),
artifact_reference=dict(
cls=bson.Binary(dumps(artifact.__class__, protocol=2)),
project_id=artifact.app_config.project_id,
app_config_id=artifact.app_config._id,
artifact_id=artifact._id))
session(obj).flush(obj)
return obj
except pymongo.errors.DuplicateKeyError: # pragma no cover
session(obj).expunge(obj)
return cls.query.get(_id=artifact.index_id())
@LazyProperty
def artifact(self):
'''Look up the artifact referenced'''
aref = self.artifact_reference
try:
cls = loads(six.binary_type(aref.cls))
with h.push_context(aref.project_id):
return cls.query.get(_id=aref.artifact_id)
except Exception:
log.exception('Error loading artifact for %s: %r',
self._id, aref)
class Shortlink(object):
'''Collection mapping shorthand_ids for artifacts to ArtifactReferences'''
# Regexes used to find shortlinks
_core_re = r'''(\[
(?:(?P<project_id>.*?):)? # optional project ID
(?:(?P<app_id>.*?):)? # optional tool ID
(?P<artifact_id>.*) # artifact ID
\])'''
re_link_1 = re.compile(r'\s' + _core_re, re.VERBOSE)
re_link_2 = re.compile(r'^' + _core_re, re.VERBOSE)
def __repr__(self):
return '<Shortlink %s %s %s -> %s>' % (
self.project_id,
self.app_config_id,
self.link,
self.ref_id)
@classmethod
def lookup(cls, link):
return cls.from_links(link)[link]
@classmethod
def from_artifact(cls, a):
result = cls.query.get(ref_id=a.index_id())
if result is None:
try:
result = cls(
ref_id=a.index_id(),
project_id=a.app_config.project_id,
app_config_id=a.app_config._id)
session(result).flush(result)
except pymongo.errors.DuplicateKeyError: # pragma no cover
session(result).expunge(result)
result = cls.query.get(ref_id=a.index_id())
result.link = a.shorthand_id()
result.url = a.url()
if result.link is None:
result.delete()
return None
return result
@classmethod
def from_links(cls, *links):
'''Convert a sequence of shortlinks to the matching Shortlink objects'''
if len(links):
result = {}
# Parse all the links
parsed_links = dict((link, cls._parse_link(link))
for link in links)
links_by_artifact = defaultdict(list)
project_ids = set()
for link, d in list(parsed_links.items()):
if d:
project_ids.add(d['project_id'])
links_by_artifact[unquote(d['artifact'])].append(d)
else:
result[link] = parsed_links.pop(link)
q = cls.query.find(
dict(
link={'$in': list(links_by_artifact.keys())},
project_id={'$in': list(project_ids)}
),
validate=False,
sort=[('_id', pymongo.DESCENDING)], # if happen to be multiple (ticket move?) have newest first
)
matches_by_artifact = dict(
(link, list(matches))
for link, matches in groupby(q, key=lambda s: unquote(s.link)))
for link, d in six.iteritems(parsed_links):
matches = matches_by_artifact.get(unquote(d['artifact']), [])
matches = (
m for m in matches
if m.project.shortname == d['project'] and
m.project.neighborhood_id == d['nbhd'] and
m.app_config is not None and
m.project.app_instance(m.app_config.options.mount_point))
if d['app']:
matches = (
m for m in matches
if m.app_config.options.mount_point == d['app'])
result[link] = cls._get_correct_match(link, list(matches))
return result
else:
return {}
@classmethod
def _get_correct_match(cls, link, matches):
result = None
if len(matches) == 1:
result = matches[0]
elif len(matches) > 1 and getattr(c, 'app', None):
# use current app's link
for m in matches:
if m.app_config_id == c.app.config._id:
result = m
break
if not result:
cls.log_ambiguous_link('Can not remove ambiguity for link %s with c.app %s', matches, link, c.app)
result = matches[0]
elif len(matches) > 1 and not getattr(c, 'app', None):
cls.log_ambiguous_link('Ambiguous link to %s and c.app is not present to remove ambiguity', matches, link)
result = matches[0]
return result
@classmethod
def log_ambiguous_link(cls, msg, matches, *args):
log.warn(msg, *args)
for m in matches:
log.warn('... %r', m)
@classmethod
def _parse_link(cls, s):
'''Parse a shortlink into its nbhd/project/app/artifact parts'''
s = s.strip()
if s.startswith('['):
s = s[1:]
if s.endswith(']'):
s = s[:-1]
parts = s.split(':')
p_shortname = None
p_id = None
p_nbhd = None
if getattr(c, 'project', None):
p_shortname = getattr(c.project, 'shortname', None)
p_id = getattr(c.project, '_id', None)
p_nbhd = c.project.neighborhood_id
if len(parts) == 3:
p = Project.query.get(shortname=parts[0], neighborhood_id=p_nbhd)
if p:
p_id = p._id
return dict(
nbhd=p_nbhd,
project=parts[0],
project_id=p_id,
app=parts[1],
artifact=parts[2])
elif len(parts) == 2:
return dict(
nbhd=p_nbhd,
project=p_shortname,
project_id=p_id,
app=parts[0],
artifact=parts[1])
elif len(parts) == 1:
return dict(
nbhd=p_nbhd,
project=p_shortname,
project_id=p_id,
app=None,
artifact=parts[0])
else:
return None
# Mapper definitions
mapper(ArtifactReference, ArtifactReferenceDoc, main_orm_session)
mapper(Shortlink, ShortlinkDoc, main_orm_session, properties=dict(
ref_id=ForeignIdProperty(ArtifactReference),
project_id=ForeignIdProperty('Project'),
app_config_id=ForeignIdProperty('AppConfig'),
project=RelationProperty('Project'),
app_config=RelationProperty('AppConfig'),
ref=RelationProperty(ArtifactReference)))
| 36.225455
| 118
| 0.57639
|
from __future__ import unicode_literals
from __future__ import absolute_import
import re
import logging
from itertools import groupby
from six.moves.cPickle import dumps, loads
from collections import defaultdict
from six.moves.urllib.parse import unquote
import bson
import pymongo
from tg import tmpl_context as c
from ming import collection, Field, Index
from ming import schema as S
from ming.utils import LazyProperty
from ming.orm import session, mapper
from ming.orm import ForeignIdProperty, RelationProperty
from allura.lib import helpers as h
from .session import main_doc_session, main_orm_session
from .project import Project
import six
log = logging.getLogger(__name__)
ArtifactReferenceDoc = collection(
str('artifact_reference'), main_doc_session,
Field('_id', str),
Field('artifact_reference', dict(
cls=S.Binary(),
project_id=S.ObjectId(),
app_config_id=S.ObjectId(),
artifact_id=S.Anything(if_missing=None))),
Field('references', [str], index=True),
Index('artifact_reference.project_id'),
)
ShortlinkDoc = collection(
str('shortlink'), main_doc_session,
Field('_id', S.ObjectId()),
Field('ref_id', str, index=True),
Field('project_id', S.ObjectId()),
Field('app_config_id', S.ObjectId()),
Field('link', str),
Field('url', str),
Index('project_id', 'link'),
)
class ArtifactReference(object):
@classmethod
def from_artifact(cls, artifact):
obj = cls.query.get(_id=artifact.index_id())
if obj is not None:
return obj
try:
obj = cls(
_id=artifact.index_id(),
artifact_reference=dict(
cls=bson.Binary(dumps(artifact.__class__, protocol=2)),
project_id=artifact.app_config.project_id,
app_config_id=artifact.app_config._id,
artifact_id=artifact._id))
session(obj).flush(obj)
return obj
except pymongo.errors.DuplicateKeyError:
session(obj).expunge(obj)
return cls.query.get(_id=artifact.index_id())
@LazyProperty
def artifact(self):
aref = self.artifact_reference
try:
cls = loads(six.binary_type(aref.cls))
with h.push_context(aref.project_id):
return cls.query.get(_id=aref.artifact_id)
except Exception:
log.exception('Error loading artifact for %s: %r',
self._id, aref)
class Shortlink(object):
_core_re = r'''(\[
(?:(?P<project_id>.*?):)? # optional project ID
(?:(?P<app_id>.*?):)? # optional tool ID
(?P<artifact_id>.*) # artifact ID
\])'''
re_link_1 = re.compile(r'\s' + _core_re, re.VERBOSE)
re_link_2 = re.compile(r'^' + _core_re, re.VERBOSE)
def __repr__(self):
return '<Shortlink %s %s %s -> %s>' % (
self.project_id,
self.app_config_id,
self.link,
self.ref_id)
@classmethod
def lookup(cls, link):
return cls.from_links(link)[link]
@classmethod
def from_artifact(cls, a):
result = cls.query.get(ref_id=a.index_id())
if result is None:
try:
result = cls(
ref_id=a.index_id(),
project_id=a.app_config.project_id,
app_config_id=a.app_config._id)
session(result).flush(result)
except pymongo.errors.DuplicateKeyError:
session(result).expunge(result)
result = cls.query.get(ref_id=a.index_id())
result.link = a.shorthand_id()
result.url = a.url()
if result.link is None:
result.delete()
return None
return result
@classmethod
def from_links(cls, *links):
if len(links):
result = {}
parsed_links = dict((link, cls._parse_link(link))
for link in links)
links_by_artifact = defaultdict(list)
project_ids = set()
for link, d in list(parsed_links.items()):
if d:
project_ids.add(d['project_id'])
links_by_artifact[unquote(d['artifact'])].append(d)
else:
result[link] = parsed_links.pop(link)
q = cls.query.find(
dict(
link={'$in': list(links_by_artifact.keys())},
project_id={'$in': list(project_ids)}
),
validate=False,
sort=[('_id', pymongo.DESCENDING)],
)
matches_by_artifact = dict(
(link, list(matches))
for link, matches in groupby(q, key=lambda s: unquote(s.link)))
for link, d in six.iteritems(parsed_links):
matches = matches_by_artifact.get(unquote(d['artifact']), [])
matches = (
m for m in matches
if m.project.shortname == d['project'] and
m.project.neighborhood_id == d['nbhd'] and
m.app_config is not None and
m.project.app_instance(m.app_config.options.mount_point))
if d['app']:
matches = (
m for m in matches
if m.app_config.options.mount_point == d['app'])
result[link] = cls._get_correct_match(link, list(matches))
return result
else:
return {}
@classmethod
def _get_correct_match(cls, link, matches):
result = None
if len(matches) == 1:
result = matches[0]
elif len(matches) > 1 and getattr(c, 'app', None):
for m in matches:
if m.app_config_id == c.app.config._id:
result = m
break
if not result:
cls.log_ambiguous_link('Can not remove ambiguity for link %s with c.app %s', matches, link, c.app)
result = matches[0]
elif len(matches) > 1 and not getattr(c, 'app', None):
cls.log_ambiguous_link('Ambiguous link to %s and c.app is not present to remove ambiguity', matches, link)
result = matches[0]
return result
@classmethod
def log_ambiguous_link(cls, msg, matches, *args):
log.warn(msg, *args)
for m in matches:
log.warn('... %r', m)
@classmethod
def _parse_link(cls, s):
s = s.strip()
if s.startswith('['):
s = s[1:]
if s.endswith(']'):
s = s[:-1]
parts = s.split(':')
p_shortname = None
p_id = None
p_nbhd = None
if getattr(c, 'project', None):
p_shortname = getattr(c.project, 'shortname', None)
p_id = getattr(c.project, '_id', None)
p_nbhd = c.project.neighborhood_id
if len(parts) == 3:
p = Project.query.get(shortname=parts[0], neighborhood_id=p_nbhd)
if p:
p_id = p._id
return dict(
nbhd=p_nbhd,
project=parts[0],
project_id=p_id,
app=parts[1],
artifact=parts[2])
elif len(parts) == 2:
return dict(
nbhd=p_nbhd,
project=p_shortname,
project_id=p_id,
app=parts[0],
artifact=parts[1])
elif len(parts) == 1:
return dict(
nbhd=p_nbhd,
project=p_shortname,
project_id=p_id,
app=None,
artifact=parts[0])
else:
return None
# Mapper definitions
mapper(ArtifactReference, ArtifactReferenceDoc, main_orm_session)
mapper(Shortlink, ShortlinkDoc, main_orm_session, properties=dict(
ref_id=ForeignIdProperty(ArtifactReference),
project_id=ForeignIdProperty('Project'),
app_config_id=ForeignIdProperty('AppConfig'),
project=RelationProperty('Project'),
app_config=RelationProperty('AppConfig'),
ref=RelationProperty(ArtifactReference)))
| true
| true
|
790667b9e8d7f3a6d2c1475a4dbdaaabec469b79
| 3,463
|
py
|
Python
|
rustici_software_cloud_v2/models/destination_id_schema.py
|
ryanhope2/scormcloud-api-v2-client-python
|
fcc392933218d32b70987f8bfb1711f891f31c06
|
[
"Apache-2.0"
] | null | null | null |
rustici_software_cloud_v2/models/destination_id_schema.py
|
ryanhope2/scormcloud-api-v2-client-python
|
fcc392933218d32b70987f8bfb1711f891f31c06
|
[
"Apache-2.0"
] | null | null | null |
rustici_software_cloud_v2/models/destination_id_schema.py
|
ryanhope2/scormcloud-api-v2-client-python
|
fcc392933218d32b70987f8bfb1711f891f31c06
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
SCORM Cloud Rest API
REST API used for SCORM Cloud integrations.
OpenAPI spec version: 2.0
Contact: systems@rusticisoftware.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class DestinationIdSchema(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, id=None, data=None):
"""
DestinationIdSchema - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'id': 'str',
'data': 'DestinationSchema'
}
self.attribute_map = {
'id': 'id',
'data': 'data'
}
self._id = id
self._data = data
@property
def id(self):
"""
Gets the id of this DestinationIdSchema.
:return: The id of this DestinationIdSchema.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this DestinationIdSchema.
:param id: The id of this DestinationIdSchema.
:type: str
"""
self._id = id
@property
def data(self):
"""
Gets the data of this DestinationIdSchema.
:return: The data of this DestinationIdSchema.
:rtype: DestinationSchema
"""
return self._data
@data.setter
def data(self, data):
"""
Sets the data of this DestinationIdSchema.
:param data: The data of this DestinationIdSchema.
:type: DestinationSchema
"""
self._data = data
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, DestinationIdSchema):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 24.387324
| 77
| 0.524978
|
from pprint import pformat
from six import iteritems
import re
class DestinationIdSchema(object):
def __init__(self, id=None, data=None):
self.swagger_types = {
'id': 'str',
'data': 'DestinationSchema'
}
self.attribute_map = {
'id': 'id',
'data': 'data'
}
self._id = id
self._data = data
@property
def id(self):
return self._id
@id.setter
def id(self, id):
self._id = id
@property
def data(self):
return self._data
@data.setter
def data(self, data):
self._data = data
def to_dict(self):
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, DestinationIdSchema):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true
| true
|
790667c757587de0f3d043867d3b48b90b9679be
| 2,324
|
py
|
Python
|
rest_waspmote/models.py
|
Infinityloopsistemas/SIVA
|
92b6c82f018d39ef405989639974d1f2757476ed
|
[
"BSD-3-Clause"
] | null | null | null |
rest_waspmote/models.py
|
Infinityloopsistemas/SIVA
|
92b6c82f018d39ef405989639974d1f2757476ed
|
[
"BSD-3-Clause"
] | 1
|
2018-09-27T12:07:19.000Z
|
2018-10-08T15:56:30.000Z
|
rest_waspmote/models.py
|
Infinityloopsistemas/SIVA
|
92b6c82f018d39ef405989639974d1f2757476ed
|
[
"BSD-3-Clause"
] | 1
|
2018-10-12T13:41:20.000Z
|
2018-10-12T13:41:20.000Z
|
from django.utils import timezone
from maestros.models import Unidades
from maestros_generales.models import Empresas
__author__ = 'julian'
from django.contrib.gis.db import models
import datetime
class WaspTypeSensor(models.Model):
name = models.CharField(max_length=50)
units = models.ForeignKey(Unidades)
fechaalta = models.DateField(auto_now_add=True,verbose_name=("Fecha Alta"),blank=True,null=True)
fechabaja = models.DateField(verbose_name=("Fecha Baja"), blank=True,null=True)
class WaspMote(models.Model):
DeviceName = models.CharField(max_length=30)
Imei = models.BigIntegerField()
fechaalta = models.DateField(auto_now_add=True,verbose_name=("Fecha Alta"),blank=True,null=True)
fechabaja = models.DateField(verbose_name=("Fecha Baja"), blank=True,null=True)
empresa = models.ForeignKey(Empresas,null=True, blank=True,verbose_name=('Empresa'),on_delete=models.PROTECT)
class WaspSensor(models.Model):
waspmote = models.ForeignKey(WaspMote, on_delete=models.PROTECT)
probestype = models.ForeignKey(WaspTypeSensor,on_delete=models.PROTECT)
fechaalta = models.DateField(auto_now_add=True,verbose_name=("Fecha Alta"),blank=True,null=True)
fechabaja = models.DateField(verbose_name=("Fecha Baja"), blank=True,null=True)
empresa = models.ForeignKey(Empresas,null=True, blank=True,verbose_name=('Empresa'),on_delete=models.PROTECT)
class WaspData(models.Model):
waspsensor = models.ForeignKey(WaspSensor)
timestamp_waspmote = models.DateTimeField()
status = models.CharField(max_length=1)
#loc = models.PointField(srid=4326)
alt = models.FloatField()
lat = models.FloatField()
long = models.FloatField()
speed = models.FloatField()
course = models.FloatField()
voltage = models.IntegerField()
notes = models.TextField()
#objects = models.GeoManager()
valorsensor = models.FloatField()
#timestamp_server = models.DateTimeField()
timestamp_server = models.DateTimeField(default= lambda: timezone.now() + datetime.timedelta(hours=1), blank=True)
| 45.568627
| 121
| 0.671687
|
from django.utils import timezone
from maestros.models import Unidades
from maestros_generales.models import Empresas
__author__ = 'julian'
from django.contrib.gis.db import models
import datetime
class WaspTypeSensor(models.Model):
name = models.CharField(max_length=50)
units = models.ForeignKey(Unidades)
fechaalta = models.DateField(auto_now_add=True,verbose_name=("Fecha Alta"),blank=True,null=True)
fechabaja = models.DateField(verbose_name=("Fecha Baja"), blank=True,null=True)
class WaspMote(models.Model):
DeviceName = models.CharField(max_length=30)
Imei = models.BigIntegerField()
fechaalta = models.DateField(auto_now_add=True,verbose_name=("Fecha Alta"),blank=True,null=True)
fechabaja = models.DateField(verbose_name=("Fecha Baja"), blank=True,null=True)
empresa = models.ForeignKey(Empresas,null=True, blank=True,verbose_name=('Empresa'),on_delete=models.PROTECT)
class WaspSensor(models.Model):
waspmote = models.ForeignKey(WaspMote, on_delete=models.PROTECT)
probestype = models.ForeignKey(WaspTypeSensor,on_delete=models.PROTECT)
fechaalta = models.DateField(auto_now_add=True,verbose_name=("Fecha Alta"),blank=True,null=True)
fechabaja = models.DateField(verbose_name=("Fecha Baja"), blank=True,null=True)
empresa = models.ForeignKey(Empresas,null=True, blank=True,verbose_name=('Empresa'),on_delete=models.PROTECT)
class WaspData(models.Model):
waspsensor = models.ForeignKey(WaspSensor)
timestamp_waspmote = models.DateTimeField()
status = models.CharField(max_length=1)
alt = models.FloatField()
lat = models.FloatField()
long = models.FloatField()
speed = models.FloatField()
course = models.FloatField()
voltage = models.IntegerField()
notes = models.TextField()
valorsensor = models.FloatField()
timestamp_server = models.DateTimeField(default= lambda: timezone.now() + datetime.timedelta(hours=1), blank=True)
| true
| true
|
7906682ea597efb6454e0a11ae1047ad087ef53b
| 1,700
|
py
|
Python
|
fuzzers/ECP5/143-bankref8/fuzzer.py
|
Keno/prjtrellis
|
3311e6d814e0001c8785d6d77a4c93e327875b6d
|
[
"ISC"
] | 256
|
2018-03-05T00:28:46.000Z
|
2022-03-04T22:33:29.000Z
|
fuzzers/ECP5/143-bankref8/fuzzer.py
|
Keno/prjtrellis
|
3311e6d814e0001c8785d6d77a4c93e327875b6d
|
[
"ISC"
] | 70
|
2018-03-12T21:55:02.000Z
|
2020-06-22T12:06:08.000Z
|
fuzzers/ECP5/143-bankref8/fuzzer.py
|
Keno/prjtrellis
|
3311e6d814e0001c8785d6d77a4c93e327875b6d
|
[
"ISC"
] | 68
|
2018-03-12T21:05:01.000Z
|
2021-03-14T21:08:33.000Z
|
from fuzzconfig import FuzzConfig
import nonrouting
import nets
import pytrellis
import re
import fuzzloops
jobs = [
{
"cfg": FuzzConfig(job="BANKREF8", family="ECP5", device="LFE5U-45F", ncl="empty.ncl",
tiles=["MIB_R71C3:BANKREF8"]),
"side": "B",
"pin": "R1"
},
]
def main():
pytrellis.load_database("../../../database")
for job in jobs:
cfg = job["cfg"]
side = job["side"]
pin = job["pin"]
cfg.setup()
empty_bitfile = cfg.build_design(cfg.ncl, {})
cfg.ncl = "pio.v"
def get_substs(iomode, vcc, extracfg=None):
if iomode == "NONE":
iodir, type = "NONE", ""
else:
iodir, type = iomode.split("_", 1)
substs = {
"dir": iodir,
"io_type": type,
"loc": pin,
"extra_attrs": "",
"vcc": vcc
}
if extracfg is not None:
substs["extra_attrs"] = '(* {}="{}" *)'.format(extracfg[0], extracfg[1])
return substs
vcco_opts = {
"1V2": "OUTPUT_LVCMOS12",
"1V5": "OUTPUT_LVCMOS15",
"1V8": "OUTPUT_LVCMOS18",
"2V5": "OUTPUT_LVCMOS25",
"3V3": "OUTPUT_LVCMOS33",
"NONE": "INPUT_LVCMOS12",
}
nonrouting.fuzz_enum_setting(cfg, "BANK.VCCIO", list(sorted(vcco_opts.keys())),
lambda x: get_substs(iomode=vcco_opts[x], vcc=x.replace("V", ".") if x != "NONE" else "2.5"),
empty_bitfile)
if __name__ == "__main__":
main()
| 26.984127
| 130
| 0.468235
|
from fuzzconfig import FuzzConfig
import nonrouting
import nets
import pytrellis
import re
import fuzzloops
jobs = [
{
"cfg": FuzzConfig(job="BANKREF8", family="ECP5", device="LFE5U-45F", ncl="empty.ncl",
tiles=["MIB_R71C3:BANKREF8"]),
"side": "B",
"pin": "R1"
},
]
def main():
pytrellis.load_database("../../../database")
for job in jobs:
cfg = job["cfg"]
side = job["side"]
pin = job["pin"]
cfg.setup()
empty_bitfile = cfg.build_design(cfg.ncl, {})
cfg.ncl = "pio.v"
def get_substs(iomode, vcc, extracfg=None):
if iomode == "NONE":
iodir, type = "NONE", ""
else:
iodir, type = iomode.split("_", 1)
substs = {
"dir": iodir,
"io_type": type,
"loc": pin,
"extra_attrs": "",
"vcc": vcc
}
if extracfg is not None:
substs["extra_attrs"] = '(* {}="{}" *)'.format(extracfg[0], extracfg[1])
return substs
vcco_opts = {
"1V2": "OUTPUT_LVCMOS12",
"1V5": "OUTPUT_LVCMOS15",
"1V8": "OUTPUT_LVCMOS18",
"2V5": "OUTPUT_LVCMOS25",
"3V3": "OUTPUT_LVCMOS33",
"NONE": "INPUT_LVCMOS12",
}
nonrouting.fuzz_enum_setting(cfg, "BANK.VCCIO", list(sorted(vcco_opts.keys())),
lambda x: get_substs(iomode=vcco_opts[x], vcc=x.replace("V", ".") if x != "NONE" else "2.5"),
empty_bitfile)
if __name__ == "__main__":
main()
| true
| true
|
7906684775fc8960a9d705006ec1d98ea3ffcb4c
| 7,715
|
py
|
Python
|
experiments/digit/unsupervised_digit_inspect.py
|
viniciusarruda/SHOT
|
46e122026805833df36b40d68fe8d815f8d614af
|
[
"MIT"
] | null | null | null |
experiments/digit/unsupervised_digit_inspect.py
|
viniciusarruda/SHOT
|
46e122026805833df36b40d68fe8d815f8d614af
|
[
"MIT"
] | null | null | null |
experiments/digit/unsupervised_digit_inspect.py
|
viniciusarruda/SHOT
|
46e122026805833df36b40d68fe8d815f8d614af
|
[
"MIT"
] | null | null | null |
import argparse
import os, sys
import os.path as osp
import torchvision
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from torchvision import transforms
import network, loss
from torch.utils.data import DataLoader
import random, pdb, math, copy
from tqdm import tqdm
from scipy.spatial.distance import cdist
import pickle
from data_load import mnist, svhn, usps
# inverse_transform = None
# class InverseTransform(torchvision.transforms.Normalize):
# """
# Undoes the normalization and returns the reconstructed images in the input domain.
# """
# def __init__(self, mean, std):
# mean = torch.as_tensor(mean)
# std = torch.as_tensor(std)
# std_inv = 1 / (std + 1e-7)
# mean_inv = -mean * std_inv
# super().__init__(mean=mean_inv, std=std_inv)
# def __call__(self, tensor):
# t = super().__call__(tensor.clone())
# # return transforms.ToPILImage()(t)
# return t
def digit_load(args):
global inverse_transform
train_bs = args.batch_size
if args.dset == 's':
test_source = svhn.SVHN('./data/svhn/', split='test', download=True,
transform=transforms.Compose([
transforms.Resize(32),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
]))
# assert inverse_transform == None
# inverse_transform = InverseTransform((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
elif args.dset == 'u':
test_source = usps.USPS('./data/usps/', train=False, download=True,
transform=transforms.Compose([
transforms.RandomCrop(28, padding=4),
transforms.RandomRotation(10),
transforms.ToTensor(),
transforms.Normalize((0.5,), (0.5,))
]))
# assert inverse_transform == None
# inverse_transform = InverseTransform((0.5,), (0.5,))
elif args.dset == 'm':
test_source = mnist.MNIST('./data/mnist/', train=False, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.5,), (0.5,))
]))
# assert inverse_transform == None
# inverse_transform = InverseTransform((0.5,), (0.5,))
dset_loaders = {}
dset_loaders["test"] = DataLoader(test_source, batch_size=train_bs*2, shuffle=False,
num_workers=args.worker, drop_last=False)
return dset_loaders
def cal_acc(loader, netF, netB, netC):
k = 0
start_test = True
with torch.no_grad():
iter_test = iter(loader)
for i in range(len(loader)):
data = iter_test.next()
input_images = []
inputs = data[0]
inputs_clone = inputs.clone()
for j in range(inputs_clone.size(0)):
x = transforms.Normalize((-1,), (2,))(inputs_clone[j])
input_images.append(transforms.ToPILImage()(x))
labels = data[1]
outputs = netC(netB(netF(inputs)))
#
_, predict = torch.max(outputs.float().cpu(), 1)
for j in range(inputs.size(0)):
folder = args.output_dir + '/inspect/label-{}'.format(labels[j])
if not osp.exists(folder):
os.makedirs(folder)
subfolder = folder + '/pred-{}'.format(predict[j])
if not osp.exists(subfolder):
os.makedirs(subfolder)
input_images[j].save(subfolder + '/{}.jpg'.format(k))
k += 1
#
if start_test:
all_output = outputs.float().cpu()
all_label = labels.float()
start_test = False
else:
all_output = torch.cat((all_output, outputs.float().cpu()), 0)
all_label = torch.cat((all_label, labels.float()), 0)
_, predict = torch.max(all_output, 1)
accuracy = torch.sum(torch.squeeze(predict).float() == all_label).item() / float(all_label.size()[0])
mean_ent = torch.mean(loss.Entropy(nn.Softmax(dim=1)(all_output))).cpu().data.item()
return accuracy*100, mean_ent
def test(args):
dset_loaders = digit_load(args)
## set base network
if args.dset == 'u':
netF = network.LeNetBase()#.cuda()
elif args.dset == 'm':
netF = network.LeNetBase()#.cuda()
elif args.dset == 's':
netF = network.DTNBase()#.cuda()
netB = network.feat_bootleneck(type=args.classifier, feature_dim=netF.in_features, bottleneck_dim=args.bottleneck)#.cuda()
netC = network.feat_classifier(type=args.layer, class_num = args.class_num, bottleneck_dim=args.bottleneck)#.cuda()
args.modelpath = args.output_dir + '/F.pt'
netF.load_state_dict(torch.load(args.modelpath))
args.modelpath = args.output_dir + '/B.pt'
netB.load_state_dict(torch.load(args.modelpath))
args.modelpath = args.output_dir + '/C.pt'
netC.load_state_dict(torch.load(args.modelpath))
netF.eval()
netB.eval()
netC.eval()
acc, _ = cal_acc(dset_loaders['test'], netF, netB, netC)
log_str = 'Task: {}, Accuracy = {:.2f}%'.format(args.dset, acc)
try:
args.out_file.write(log_str + '\n')
args.out_file.flush()
except:
pass
print(log_str+'\n')
def print_args(args):
s = "==========================================\n"
for arg, content in args.__dict__.items():
s += "{}:{}\n".format(arg, content)
return s
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='SHOT')
parser.add_argument('--gpu_id', type=str, nargs='?', default='0', help="device id to run")
parser.add_argument('--s', type=int, default=0, help="source")
parser.add_argument('--t', type=int, default=1, help="target")
parser.add_argument('--max_epoch', type=int, default=30, help="maximum epoch")
parser.add_argument('--batch_size', type=int, default=64, help="batch_size")
parser.add_argument('--worker', type=int, default=4, help="number of workers")
parser.add_argument('--dset', type=str, default='s', choices=['u', 'm','s'])
parser.add_argument('--lr', type=float, default=0.01, help="learning rate")
parser.add_argument('--seed', type=int, default=2020, help="random seed")
parser.add_argument('--bottleneck', type=int, default=256)
parser.add_argument('--layer', type=str, default="wn", choices=["linear", "wn"])
parser.add_argument('--classifier', type=str, default="bn", choices=["ori", "bn"])
parser.add_argument('--output', type=str, default='')
parser.add_argument('--issave', type=bool, default=True)
args = parser.parse_args()
args.class_num = 10
# os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu_id
SEED = args.seed
torch.manual_seed(SEED)
# torch.cuda.manual_seed(SEED)
np.random.seed(SEED)
random.seed(SEED)
# torch.backends.cudnn.deterministic = True
args.output_dir = osp.join(args.output, 'seed' + str(args.seed), args.dset)
test(args)
# python unsupervised_digit.py --dset m --gpu_id 0 --output ckps_unsupervised_digit
# python unsupervised_digit.py --dset m --gpu_id 0 --ent --output ckps_unsupervised_digit_ent
# python unsupervised_digit.py --dset m --gpu_id 0 --gent --output ckps_unsupervised_digit_gent
# python unsupervised_digit.py --dset m --gpu_id 0 --ent --gent --output ckps_unsupervised_digit_ent_gent
# na verdade n sem como saber qual classe vai sair .. ideal é ver tsne? ou mostrar as classificacoes primeiro?
# show classification + gradcam (versao mais rapida)
| 40.820106
| 126
| 0.61024
|
import argparse
import os, sys
import os.path as osp
import torchvision
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from torchvision import transforms
import network, loss
from torch.utils.data import DataLoader
import random, pdb, math, copy
from tqdm import tqdm
from scipy.spatial.distance import cdist
import pickle
from data_load import mnist, svhn, usps
# Undoes the normalization and returns the reconstructed images in the input domain.
# """
al inverse_transform
train_bs = args.batch_size
if args.dset == 's':
test_source = svhn.SVHN('./data/svhn/', split='test', download=True,
transform=transforms.Compose([
transforms.Resize(32),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
]))
elif args.dset == 'u':
test_source = usps.USPS('./data/usps/', train=False, download=True,
transform=transforms.Compose([
transforms.RandomCrop(28, padding=4),
transforms.RandomRotation(10),
transforms.ToTensor(),
transforms.Normalize((0.5,), (0.5,))
]))
elif args.dset == 'm':
test_source = mnist.MNIST('./data/mnist/', train=False, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.5,), (0.5,))
]))
dset_loaders = {}
dset_loaders["test"] = DataLoader(test_source, batch_size=train_bs*2, shuffle=False,
num_workers=args.worker, drop_last=False)
return dset_loaders
def cal_acc(loader, netF, netB, netC):
k = 0
start_test = True
with torch.no_grad():
iter_test = iter(loader)
for i in range(len(loader)):
data = iter_test.next()
input_images = []
inputs = data[0]
inputs_clone = inputs.clone()
for j in range(inputs_clone.size(0)):
x = transforms.Normalize((-1,), (2,))(inputs_clone[j])
input_images.append(transforms.ToPILImage()(x))
labels = data[1]
outputs = netC(netB(netF(inputs)))
_, predict = torch.max(outputs.float().cpu(), 1)
for j in range(inputs.size(0)):
folder = args.output_dir + '/inspect/label-{}'.format(labels[j])
if not osp.exists(folder):
os.makedirs(folder)
subfolder = folder + '/pred-{}'.format(predict[j])
if not osp.exists(subfolder):
os.makedirs(subfolder)
input_images[j].save(subfolder + '/{}.jpg'.format(k))
k += 1
if start_test:
all_output = outputs.float().cpu()
all_label = labels.float()
start_test = False
else:
all_output = torch.cat((all_output, outputs.float().cpu()), 0)
all_label = torch.cat((all_label, labels.float()), 0)
_, predict = torch.max(all_output, 1)
accuracy = torch.sum(torch.squeeze(predict).float() == all_label).item() / float(all_label.size()[0])
mean_ent = torch.mean(loss.Entropy(nn.Softmax(dim=1)(all_output))).cpu().data.item()
return accuracy*100, mean_ent
def test(args):
dset_loaders = digit_load(args)
== 'u':
netF = network.LeNetBase()
elif args.dset == 'm':
netF = network.LeNetBase()
elif args.dset == 's':
netF = network.DTNBase()
netB = network.feat_bootleneck(type=args.classifier, feature_dim=netF.in_features, bottleneck_dim=args.bottleneck)
netC = network.feat_classifier(type=args.layer, class_num = args.class_num, bottleneck_dim=args.bottleneck)
args.modelpath = args.output_dir + '/F.pt'
netF.load_state_dict(torch.load(args.modelpath))
args.modelpath = args.output_dir + '/B.pt'
netB.load_state_dict(torch.load(args.modelpath))
args.modelpath = args.output_dir + '/C.pt'
netC.load_state_dict(torch.load(args.modelpath))
netF.eval()
netB.eval()
netC.eval()
acc, _ = cal_acc(dset_loaders['test'], netF, netB, netC)
log_str = 'Task: {}, Accuracy = {:.2f}%'.format(args.dset, acc)
try:
args.out_file.write(log_str + '\n')
args.out_file.flush()
except:
pass
print(log_str+'\n')
def print_args(args):
s = "==========================================\n"
for arg, content in args.__dict__.items():
s += "{}:{}\n".format(arg, content)
return s
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='SHOT')
parser.add_argument('--gpu_id', type=str, nargs='?', default='0', help="device id to run")
parser.add_argument('--s', type=int, default=0, help="source")
parser.add_argument('--t', type=int, default=1, help="target")
parser.add_argument('--max_epoch', type=int, default=30, help="maximum epoch")
parser.add_argument('--batch_size', type=int, default=64, help="batch_size")
parser.add_argument('--worker', type=int, default=4, help="number of workers")
parser.add_argument('--dset', type=str, default='s', choices=['u', 'm','s'])
parser.add_argument('--lr', type=float, default=0.01, help="learning rate")
parser.add_argument('--seed', type=int, default=2020, help="random seed")
parser.add_argument('--bottleneck', type=int, default=256)
parser.add_argument('--layer', type=str, default="wn", choices=["linear", "wn"])
parser.add_argument('--classifier', type=str, default="bn", choices=["ori", "bn"])
parser.add_argument('--output', type=str, default='')
parser.add_argument('--issave', type=bool, default=True)
args = parser.parse_args()
args.class_num = 10
SEED = args.seed
torch.manual_seed(SEED)
np.random.seed(SEED)
random.seed(SEED)
args.output_dir = osp.join(args.output, 'seed' + str(args.seed), args.dset)
test(args)
| true
| true
|
790668cb9fdd6ece569f358e157a257b12f0dcb2
| 1,291
|
py
|
Python
|
playlist_parser.py
|
MikeWent/applepd_bot
|
bf093d52f6f97f0e5bb2c969b4a31f0e44cb018f
|
[
"MIT"
] | null | null | null |
playlist_parser.py
|
MikeWent/applepd_bot
|
bf093d52f6f97f0e5bb2c969b4a31f0e44cb018f
|
[
"MIT"
] | null | null | null |
playlist_parser.py
|
MikeWent/applepd_bot
|
bf093d52f6f97f0e5bb2c969b4a31f0e44cb018f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Data schema:
# (start) (12b junk) artist (5* byte) (1b junk) title (col) (1b junk) date and time (col) (1b junk) url (urldur) duration (col) (1b junk) thumbnail url (end)
keybytes = {
"row_start": "80 09 80 00 80", # row start
"col": "5F 10", # column delimeter
"urldur": "58", # url/duration delimeter
"urldur2": "D8",
"urldur3": "D2",
"row_end": "D8 00 0A 00 2A 00 2B 00 2C 00 2D 00 2E 00 2F 00 30 00 31 00 32 00" # row end
}
# convert hex to bytes
for k, v in keybytes.items():
keybytes[k] = bytearray.fromhex(v)
def get_urls_from_playlist(filename):
with open(filename, "rb") as f:
content = f.read()
for row in content.split(keybytes["row_start"])[1:]:
try:
row = row.split(keybytes["row_end"])[0] # cut off everything after the row end
columns = row.split(keybytes["col"])
for col in columns:
if "http" in str(col):
# cut off junk bytes
url = col.split(keybytes["urldur"])[0].split(keybytes["urldur2"])[0].split(keybytes["urldur3"])[0]
url = url[1:].decode("utf-8")
yield url
except Exception as e:
pass
| 37.970588
| 157
| 0.543765
|
keybytes = {
"row_start": "80 09 80 00 80",
"col": "5F 10",
"urldur": "58",
"urldur2": "D8",
"urldur3": "D2",
"row_end": "D8 00 0A 00 2A 00 2B 00 2C 00 2D 00 2E 00 2F 00 30 00 31 00 32 00"
}
for k, v in keybytes.items():
keybytes[k] = bytearray.fromhex(v)
def get_urls_from_playlist(filename):
with open(filename, "rb") as f:
content = f.read()
for row in content.split(keybytes["row_start"])[1:]:
try:
row = row.split(keybytes["row_end"])[0]
columns = row.split(keybytes["col"])
for col in columns:
if "http" in str(col):
url = col.split(keybytes["urldur"])[0].split(keybytes["urldur2"])[0].split(keybytes["urldur3"])[0]
url = url[1:].decode("utf-8")
yield url
except Exception as e:
pass
| true
| true
|
79066a340dfaa07ea5013899ead5c2fd00fc74b5
| 4,223
|
py
|
Python
|
examples/termui/termui.py
|
D4N/asyncclick
|
fa4decbe3537cf280c8e5371d7ab0b2db4b8a706
|
[
"BSD-3-Clause"
] | null | null | null |
examples/termui/termui.py
|
D4N/asyncclick
|
fa4decbe3537cf280c8e5371d7ab0b2db4b8a706
|
[
"BSD-3-Clause"
] | null | null | null |
examples/termui/termui.py
|
D4N/asyncclick
|
fa4decbe3537cf280c8e5371d7ab0b2db4b8a706
|
[
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
import math
import random
import time
import asyncclick as click
@click.group()
def cli():
"""This script showcases different terminal UI helpers in Click."""
pass
@cli.command()
def colordemo():
"""Demonstrates ANSI color support."""
for color in "red", "green", "blue":
click.echo(click.style("I am colored {}".format(color), fg=color))
click.echo(click.style("I am background colored {}".format(color), bg=color))
@cli.command()
def pager():
"""Demonstrates using the pager."""
lines = []
for x in range(200):
lines.append("{}. Hello World!".format(click.style(str(x), fg="green")))
click.echo_via_pager("\n".join(lines))
@cli.command()
@click.option(
"--count",
default=8000,
type=click.IntRange(1, 100000),
help="The number of items to process.",
)
def progress(count):
"""Demonstrates the progress bar."""
items = range(count)
def process_slowly(item):
time.sleep(0.002 * random.random())
def filter(items):
for item in items:
if random.random() > 0.3:
yield item
with click.progressbar(
items, label="Processing accounts", fill_char=click.style("#", fg="green")
) as bar:
for item in bar:
process_slowly(item)
def show_item(item):
if item is not None:
return "Item #{}".format(item)
with click.progressbar(
filter(items),
label="Committing transaction",
fill_char=click.style("#", fg="yellow"),
item_show_func=show_item,
) as bar:
for item in bar:
process_slowly(item)
with click.progressbar(
length=count,
label="Counting",
bar_template="%(label)s %(bar)s | %(info)s",
fill_char=click.style(u"█", fg="cyan"),
empty_char=" ",
) as bar:
for item in bar:
process_slowly(item)
with click.progressbar(
length=count,
width=0,
show_percent=False,
show_eta=False,
fill_char=click.style("#", fg="magenta"),
) as bar:
for item in bar:
process_slowly(item)
# 'Non-linear progress bar'
steps = [math.exp(x * 1.0 / 20) - 1 for x in range(20)]
count = int(sum(steps))
with click.progressbar(
length=count,
show_percent=False,
label="Slowing progress bar",
fill_char=click.style(u"█", fg="green"),
) as bar:
for item in steps:
time.sleep(item)
bar.update(item)
@cli.command()
@click.argument("url")
def open(url):
"""Opens a file or URL In the default application."""
click.launch(url)
@cli.command()
@click.argument("url")
def locate(url):
"""Opens a file or URL In the default application."""
click.launch(url, locate=True)
@cli.command()
def edit():
"""Opens an editor with some text in it."""
MARKER = "# Everything below is ignored\n"
message = click.edit("\n\n{}".format(MARKER))
if message is not None:
msg = message.split(MARKER, 1)[0].rstrip("\n")
if not msg:
click.echo("Empty message!")
else:
click.echo("Message:\n{}".format(msg))
else:
click.echo("You did not enter anything!")
@cli.command()
def clear():
"""Clears the entire screen."""
click.clear()
@cli.command()
def pause():
"""Waits for the user to press a button."""
click.pause()
@cli.command()
def menu():
"""Shows a simple menu."""
menu = "main"
while 1:
if menu == "main":
click.echo("Main menu:")
click.echo(" d: debug menu")
click.echo(" q: quit")
char = click.getchar()
if char == "d":
menu = "debug"
elif char == "q":
menu = "quit"
else:
click.echo("Invalid input")
elif menu == "debug":
click.echo("Debug menu")
click.echo(" b: back")
char = click.getchar()
if char == "b":
menu = "main"
else:
click.echo("Invalid input")
elif menu == "quit":
return
| 24.695906
| 85
| 0.550556
|
import math
import random
import time
import asyncclick as click
@click.group()
def cli():
pass
@cli.command()
def colordemo():
for color in "red", "green", "blue":
click.echo(click.style("I am colored {}".format(color), fg=color))
click.echo(click.style("I am background colored {}".format(color), bg=color))
@cli.command()
def pager():
lines = []
for x in range(200):
lines.append("{}. Hello World!".format(click.style(str(x), fg="green")))
click.echo_via_pager("\n".join(lines))
@cli.command()
@click.option(
"--count",
default=8000,
type=click.IntRange(1, 100000),
help="The number of items to process.",
)
def progress(count):
items = range(count)
def process_slowly(item):
time.sleep(0.002 * random.random())
def filter(items):
for item in items:
if random.random() > 0.3:
yield item
with click.progressbar(
items, label="Processing accounts", fill_char=click.style("#", fg="green")
) as bar:
for item in bar:
process_slowly(item)
def show_item(item):
if item is not None:
return "Item #{}".format(item)
with click.progressbar(
filter(items),
label="Committing transaction",
fill_char=click.style("#", fg="yellow"),
item_show_func=show_item,
) as bar:
for item in bar:
process_slowly(item)
with click.progressbar(
length=count,
label="Counting",
bar_template="%(label)s %(bar)s | %(info)s",
fill_char=click.style(u"█", fg="cyan"),
empty_char=" ",
) as bar:
for item in bar:
process_slowly(item)
with click.progressbar(
length=count,
width=0,
show_percent=False,
show_eta=False,
fill_char=click.style("#", fg="magenta"),
) as bar:
for item in bar:
process_slowly(item)
steps = [math.exp(x * 1.0 / 20) - 1 for x in range(20)]
count = int(sum(steps))
with click.progressbar(
length=count,
show_percent=False,
label="Slowing progress bar",
fill_char=click.style(u"█", fg="green"),
) as bar:
for item in steps:
time.sleep(item)
bar.update(item)
@cli.command()
@click.argument("url")
def open(url):
click.launch(url)
@cli.command()
@click.argument("url")
def locate(url):
click.launch(url, locate=True)
@cli.command()
def edit():
MARKER = "# Everything below is ignored\n"
message = click.edit("\n\n{}".format(MARKER))
if message is not None:
msg = message.split(MARKER, 1)[0].rstrip("\n")
if not msg:
click.echo("Empty message!")
else:
click.echo("Message:\n{}".format(msg))
else:
click.echo("You did not enter anything!")
@cli.command()
def clear():
click.clear()
@cli.command()
def pause():
click.pause()
@cli.command()
def menu():
menu = "main"
while 1:
if menu == "main":
click.echo("Main menu:")
click.echo(" d: debug menu")
click.echo(" q: quit")
char = click.getchar()
if char == "d":
menu = "debug"
elif char == "q":
menu = "quit"
else:
click.echo("Invalid input")
elif menu == "debug":
click.echo("Debug menu")
click.echo(" b: back")
char = click.getchar()
if char == "b":
menu = "main"
else:
click.echo("Invalid input")
elif menu == "quit":
return
| true
| true
|
79066bbfa4d827febdef30e544ab1b3979535a20
| 3,230
|
py
|
Python
|
feast_trino/trino_type_map.py
|
tpvasconcelos/feast-trino
|
d628182753c23aa2e851014e54f48aff28d43207
|
[
"MIT"
] | 810
|
2018-12-25T15:16:11.000Z
|
2020-05-14T09:49:40.000Z
|
feast_trino/trino_type_map.py
|
tpvasconcelos/feast-trino
|
d628182753c23aa2e851014e54f48aff28d43207
|
[
"MIT"
] | 701
|
2018-12-21T05:18:43.000Z
|
2020-05-16T01:30:21.000Z
|
feast_trino/trino_type_map.py
|
tpvasconcelos/feast-trino
|
d628182753c23aa2e851014e54f48aff28d43207
|
[
"MIT"
] | 155
|
2018-12-22T11:05:04.000Z
|
2020-05-14T07:33:41.000Z
|
from typing import Dict
import pyarrow as pa
import regex as re
from feast import ValueType
def trino_to_feast_value_type(trino_type_as_str: str) -> ValueType:
type_map: Dict[str, ValueType] = {
"tinyint": ValueType.INT32,
"smallint": ValueType.INT32,
"int": ValueType.INT32,
"integer": ValueType.INT32,
"bigint": ValueType.INT64,
"double": ValueType.DOUBLE,
"decimal": ValueType.FLOAT,
"timestamp": ValueType.UNIX_TIMESTAMP,
"char": ValueType.STRING,
"varchar": ValueType.STRING,
"boolean": ValueType.BOOL,
}
return type_map[trino_type_as_str.lower()]
def pa_to_trino_value_type(pa_type_as_str: str) -> str:
# PyArrow types: https://arrow.apache.org/docs/python/api/datatypes.html
# Trino type: https://trino.io/docs/current/language/types.html
pa_type_as_str = pa_type_as_str.lower()
trino_type = "{}"
if pa_type_as_str.startswith("list"):
trino_type = "array<{}>"
pa_type_as_str = re.search(r"^list<item:\s(.+)>$", pa_type_as_str).group(1)
if pa_type_as_str.startswith("date"):
return trino_type.format("date")
if pa_type_as_str.startswith("timestamp"):
if "tz=" in pa_type_as_str:
return trino_type.format("timestamp with time zone")
else:
return trino_type.format("timestamp")
if pa_type_as_str.startswith("decimal"):
return trino_type.format(pa_type_as_str)
type_map = {
"null": "null",
"bool": "boolean",
"int8": "tinyint",
"int16": "smallint",
"int32": "int",
"int64": "bigint",
"uint8": "smallint",
"uint16": "int",
"uint32": "bigint",
"uint64": "bigint",
"float": "double",
"double": "double",
"binary": "binary",
"string": "varchar",
}
return trino_type.format(type_map[pa_type_as_str])
_TRINO_TO_PA_TYPE_MAP = {
"null": pa.null(),
"boolean": pa.bool_(),
"date": pa.date32(),
"tinyint": pa.int8(),
"smallint": pa.int16(),
"integer": pa.int32(),
"bigint": pa.int64(),
"double": pa.float64(),
"binary": pa.binary(),
"char": pa.string(),
}
def trino_to_pa_value_type(trino_type_as_str: str) -> pa.DataType:
trino_type_as_str = trino_type_as_str.lower()
_is_list: bool = False
if trino_type_as_str.startswith("array"):
_is_list = True
trino_type_as_str = re.search(r"^array\((\w+)\)$", trino_type_as_str).group(1)
if trino_type_as_str.startswith("decimal"):
search_precision = re.search(
r"^decimal\((\d+)(?>,\s?\d+)?\)$", trino_type_as_str
)
if search_precision:
precision = int(search_precision.group(1))
if precision > 32:
pa_type = pa.float64()
else:
pa_type = pa.float32()
elif trino_type_as_str.startswith("timestamp"):
pa_type = pa.timestamp("us")
elif trino_type_as_str.startswith("varchar"):
pa_type = pa.string()
else:
pa_type = _TRINO_TO_PA_TYPE_MAP[trino_type_as_str]
if _is_list:
return pa.list_(pa_type)
else:
return pa_type
| 28.839286
| 86
| 0.603406
|
from typing import Dict
import pyarrow as pa
import regex as re
from feast import ValueType
def trino_to_feast_value_type(trino_type_as_str: str) -> ValueType:
type_map: Dict[str, ValueType] = {
"tinyint": ValueType.INT32,
"smallint": ValueType.INT32,
"int": ValueType.INT32,
"integer": ValueType.INT32,
"bigint": ValueType.INT64,
"double": ValueType.DOUBLE,
"decimal": ValueType.FLOAT,
"timestamp": ValueType.UNIX_TIMESTAMP,
"char": ValueType.STRING,
"varchar": ValueType.STRING,
"boolean": ValueType.BOOL,
}
return type_map[trino_type_as_str.lower()]
def pa_to_trino_value_type(pa_type_as_str: str) -> str:
pa_type_as_str = pa_type_as_str.lower()
trino_type = "{}"
if pa_type_as_str.startswith("list"):
trino_type = "array<{}>"
pa_type_as_str = re.search(r"^list<item:\s(.+)>$", pa_type_as_str).group(1)
if pa_type_as_str.startswith("date"):
return trino_type.format("date")
if pa_type_as_str.startswith("timestamp"):
if "tz=" in pa_type_as_str:
return trino_type.format("timestamp with time zone")
else:
return trino_type.format("timestamp")
if pa_type_as_str.startswith("decimal"):
return trino_type.format(pa_type_as_str)
type_map = {
"null": "null",
"bool": "boolean",
"int8": "tinyint",
"int16": "smallint",
"int32": "int",
"int64": "bigint",
"uint8": "smallint",
"uint16": "int",
"uint32": "bigint",
"uint64": "bigint",
"float": "double",
"double": "double",
"binary": "binary",
"string": "varchar",
}
return trino_type.format(type_map[pa_type_as_str])
_TRINO_TO_PA_TYPE_MAP = {
"null": pa.null(),
"boolean": pa.bool_(),
"date": pa.date32(),
"tinyint": pa.int8(),
"smallint": pa.int16(),
"integer": pa.int32(),
"bigint": pa.int64(),
"double": pa.float64(),
"binary": pa.binary(),
"char": pa.string(),
}
def trino_to_pa_value_type(trino_type_as_str: str) -> pa.DataType:
trino_type_as_str = trino_type_as_str.lower()
_is_list: bool = False
if trino_type_as_str.startswith("array"):
_is_list = True
trino_type_as_str = re.search(r"^array\((\w+)\)$", trino_type_as_str).group(1)
if trino_type_as_str.startswith("decimal"):
search_precision = re.search(
r"^decimal\((\d+)(?>,\s?\d+)?\)$", trino_type_as_str
)
if search_precision:
precision = int(search_precision.group(1))
if precision > 32:
pa_type = pa.float64()
else:
pa_type = pa.float32()
elif trino_type_as_str.startswith("timestamp"):
pa_type = pa.timestamp("us")
elif trino_type_as_str.startswith("varchar"):
pa_type = pa.string()
else:
pa_type = _TRINO_TO_PA_TYPE_MAP[trino_type_as_str]
if _is_list:
return pa.list_(pa_type)
else:
return pa_type
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.