id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
314212 | <reponame>davidryanshay/AIT-Core
# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT)
# Bespoke Link to Instruments and Small Satellites (BLISS)
#
# Copyright 2013, by the California Institute of Technology. ALL RIGHTS
# RESERVED. United States Government Sponsorship acknowledged. Any
# commercial use must be negotiated with the Office of Technology Transfer
# at the California Institute of Technology.
#
# This software may be subject to U.S. export control laws. By accepting
# this software, the user agrees to comply with all applicable U.S. export
# laws and regulations. User has the responsibility to obtain export licenses,
# or other export authority as may be required before exporting such
# information to foreign countries or providing access to foreign persons.
"""AIT DeLorean Motor Company (DMC)
The ait.dmc module provides utilities to represent, translate, and
manipulate time, building upon Python's datetime and timedelta data
types.
Many functions assume the GPS (and ISS) epoch: January 6, 1980 at
midnight.
"""
import calendar
import datetime
import math
import os.path
import pickle
from typing import Tuple
import requests
import ait.core
from ait.core import log
GPS_Epoch = datetime.datetime(1980, 1, 6, 0, 0, 0)
TICs = []
TwoPi = 2 * math.pi
DOY_Format = "%Y-%jT%H:%M:%SZ"
ISO_8601_Format = "%Y-%m-%dT%H:%M:%SZ"
RFC3339_Format = "%Y-%m-%dT%H:%M:%S.%fZ"
_DEFAULT_FILE_NAME = "leapseconds.dat"
LeapSeconds = None
def get_timestamp_utc():
"""Returns the current UTC time in seconds and microseconds."""
utc = datetime.datetime.utcnow()
ts_sec = calendar.timegm(utc.timetuple())
ts_usec = utc.microsecond
return ts_sec, ts_usec
def get_utc_datetime_doy(days=0, hours=0, minutes=0, seconds=0) -> str:
"""Convert current UTC, plus some optional offset, to ISO 8601 DOY format
Arguments:
days (int): Optional days offset from current UTC time
hours (int): Optional hours offset from current UTC time
minutes (int): Optional minutes offset from current UTC time
seconds (int): Optional seconds offset from current UTC time
Returns:
String formatted datetime of the form "%Y-%jT%H:%M:%SZ"
"""
return (
datetime.datetime.utcnow()
+ datetime.timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds)
).strftime(DOY_Format)
def tic():
"""tic()
Records the current time for benchmarking purposes. See also toc().
"""
global TICs
begin = datetime.datetime.now()
TICs.append(begin)
def toc():
"""toc() -> float | None
Returns the total elapsed seconds since the most recent tic(), or
None if tic() was not called.
Examples:
>>> import time
>>> tic()
>>> time.sleep(1.2)
>>> elapsed = toc()
>>> assert abs(elapsed - 1.2) <= 1e-2
.. note:: The tic() and toc() functions are simplistic and may introduce
significant overhead, especially in tight loops. Their use should
be limited to one-off experiments and rough numbers. The Python
profile package (i.e. 'import profile') should be used for serious
and detailed profiling.
"""
end = datetime.datetime.now()
return (end - TICs.pop()).total_seconds() if len(TICs) else None
def to_gps_week_and_secs(timestamp=None) -> Tuple[int, int]:
"""Convert a timestamp (default current UTC) to GPS weeks / seconds
Arguments:
timestamp (optional): An optional datetimme value to convert. Current
UTC time is used if nothing is provided.
Returns:
A tuple of the form (GPS weeks, GPS seconds within week) for the timestamp
"""
if timestamp is None:
timestamp = datetime.datetime.utcnow()
leap = LeapSeconds.get_GPS_offset_for_date(timestamp) # type: ignore
secs_in_week = 604800
delta = (timestamp - GPS_Epoch).total_seconds() + leap
seconds = delta % secs_in_week
week = int(math.floor(delta / secs_in_week))
return (week, seconds)
def to_gps_seconds(timestamp) -> int:
"""Convert datetime object into number of second since GPS epoch.
Arguments:
timestamp (datetime.datetime): The datetime object to convert.
Return:
Number of seconds since the GPS epoch which the timestamp represents.
Examples:
>>> import datetime
>>> to_gps_seconds(datetime.datetime(1980, 1, 6))
0
>>> to_gps_seconds(datetime.datetime(1980, 1, 7))
86400
"""
delta = timestamp - GPS_Epoch
return (delta.days * 24 * 3600) + delta.seconds
def to_gmst(dt=None) -> float:
"""Convert datetime / Julian date to GMST.
Converts the given Python datetime or Julian date (float) to
Greenwich Mean Sidereal Time (GMST) (in radians) using the formula
from <NAME> (2004).
See:
<NAME>, Fundamentals of Astrodynamics and Applications, p. 192
http://books.google.com/books?id=PJLlWzMBKjkC&lpg=PA956&vq=192&pg=PA192
Arguments:
dt (datetime.datetime or float): The datetime or Julian date (as a float)
to convert to radians.
"""
if dt is None or type(dt) is datetime.datetime:
jd = to_julian(dt)
else:
jd = dt
t_ut1 = (jd - 2451545.0) / 36525.0
gmst = 67310.54841 + (876600 * 3600 + 8640184.812866) * t_ut1
gmst += 0.093104 * t_ut1 ** 2
gmst -= 6.2e-6 * t_ut1 ** 3
# Convert from seconds to degrees, i.e.
# 86400 seconds / 360 degrees = 240 seconds / degree
gmst /= 240.0
# Convert to radians
gmst = math.radians(gmst) % TwoPi
if gmst < 0:
gmst += TwoPi
return gmst
def to_julian(dt=None):
"""Convert datetime to a Julian date.
Converts a Python datetime to a Julian date, using the formula from
Meesus (1991). This formula is reproduced in <NAME> (2004).
See:
<NAME>, Fundamentals of Astrodynamics and Applications, p. 187
http://books.google.com/books?id=PJLlWzMBKjkC&lpg=PA956&vq=187&pg=PA187
Arguments:
dt (datetime.datetime): The datetime to convert.
Returns:
The converted Julian date.
"""
if dt is None:
dt = datetime.datetime.utcnow()
if dt.month < 3:
year = dt.year - 1
month = dt.month + 12
else:
year = dt.year
month = dt.month
A = int(year / 100.0) # noqa
B = 2 - A + int(A / 4.0) # noqa
C = ((dt.second / 60.0 + dt.minute) / 60.0 + dt.hour) / 24.0 # noqa
jd = int(365.25 * (year + 4716))
jd += int(30.6001 * (month + 1)) + dt.day + B - 1524.5 + C
return jd
def to_local_time(seconds: int, microseconds: int = 0) -> datetime.datetime:
"""Convert seconds / microseconds since GPS epoch to local time.
Converts the given number of seconds since the GPS Epoch (midnight
on January 6th, 1980) to this computer's local time.
Arguments:
seconds: The number of seconds since the GPS epoch.
microseconds (optional): The number of microseconds of the seconds
since the GPS epoch.
Returns:
The datetime object defined as the GPS epoch + the supplied seconds
and microseconds.
Examples:
>>> to_local_time(0)
datetime.datetime(1980, 1, 6, 0, 0)
>>> to_local_time(25 * 86400)
datetime.datetime(1980, 1, 31, 0, 0)
"""
delta = datetime.timedelta(seconds=seconds, microseconds=microseconds)
return GPS_Epoch + delta
def rfc3339_str_to_datetime(datestr: str) -> datetime.datetime:
"""Convert RFC3339 string to datetime.
Convert a RFC3339-formated date string into a datetime object whil
attempting to preserve timezone information.
Arguments:
datestr: The RFC3339-formated date string to convert to a datetime.
Returns:
The datetime object with preserved timezone information for the RFC3339
formatted string or None if no datestr is None.
"""
if datestr is None:
return None
return datetime.datetime.strptime(datestr, RFC3339_Format).replace(
tzinfo=datetime.timezone.utc
)
class UTCLeapSeconds(object):
def __init__(self):
self._data = None
self._load_leap_second_data()
@property
def leapseconds(self):
return self._data["leapseconds"]
@property
def valid_date(self):
return self._data["valid"]
def is_valid(self):
return datetime.datetime.now() < self._data["valid"]
def get_current_gps_offset(self):
return self._data["leapseconds"][-1][-1]
def get_gps_offset_for_date(self, timestamp=None):
if timestamp is None:
timestamp = datetime.datetime.utcnow()
if timestamp < GPS_Epoch:
e = "The timestamp date is before the GPS epoch"
raise ValueError(e)
for offset in reversed(self._data["leapseconds"]):
# Offsets are stored as a tuple (date, offset)
# indicating the `date` when `offset` took effect.
if timestamp >= offset[0]:
return offset[1]
else:
return 0
def _load_leap_second_data(self):
ls_file = ait.config.get(
"leapseconds.filename",
os.path.join(ait.config._directory, _DEFAULT_FILE_NAME),
)
try:
log.info("Attempting to load leapseconds.dat")
with open(ls_file, "rb") as outfile:
self._data = pickle.load(outfile)
log.info("Loaded leapseconds config file successfully")
except IOError:
log.info("Unable to locate leapseconds config file")
if not (self._data and self.is_valid()):
try:
self._update_leap_second_data()
except ValueError:
msg = (
"Leapsecond data update failed. "
"This may cause problems with some functionality"
)
log.warn(msg)
if self._data:
log.warn("Continuing with out of date leap second data")
else:
raise ValueError("Could not load leap second data")
else:
t = self._data["valid"]
log_t = t.strftime("%m/%d/%Y")
log.info("Leapseconds data valid until %s", log_t)
def _update_leap_second_data(self):
"""Updates the systems leap second information
Pulls the latest leap second information from
https://www.ietf.org/timezones/data/leap-seconds.list
and updates the leapsecond config file.
Raises:
ValueError: If the connection to IETF does not return 200
IOError: If the path to the leap seconds file is not valid
"""
log.info("Attempting to acquire latest leapsecond data")
ls_file = ait.config.get(
"leapseconds.filename",
os.path.join(ait.config._directory, _DEFAULT_FILE_NAME),
)
url = "https://www.ietf.org/timezones/data/leap-seconds.list"
r = requests.get(url)
if r.status_code != 200:
msg = "Unable to locate latest timezone data. Connection to IETF failed"
log.error(msg)
raise ValueError(msg)
text = r.text.split("\n")
lines = [line for line in text if line.startswith("#@") or not line.startswith("#")]
data = {"valid": None, "leapseconds": []}
data["valid"] = datetime.datetime(1900, 1, 1) + datetime.timedelta(
seconds=int(lines[0].split("\t")[1])
)
leap = 1
for line in lines[1:-1]:
t = datetime.datetime(1900, 1, 1) + datetime.timedelta(
seconds=int(line.split("\t")[0])
)
if t < GPS_Epoch:
continue
data["leapseconds"].append((t, leap))
leap += 1
log.info("Leapsecond data processed")
self._data = data
with open(ls_file, "wb") as outfile:
pickle.dump(data, outfile)
log.info("Successfully generated leapseconds config file")
if not LeapSeconds:
LeapSeconds = UTCLeapSeconds()
| StarcoderdataPython |
4859441 | <filename>lndmanage/lib/lncli.py
"""
Handling lncli interaction.
"""
import os
import subprocess
import json
from pygments import highlight, lexers, formatters
from lndmanage import settings
import logging
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class Lncli(object):
def __init__(self, lncli_path, config_file):
self.lncli_path = lncli_path
config = settings.read_config(config_file)
cert_file = os.path.expanduser(config['network']['tls_cert_file'])
macaroon_file = \
os.path.expanduser(config['network']['admin_macaroon_file'])
lnd_host = config['network']['lnd_grpc_host']
# assemble the command for lncli for execution with flags
self.lncli_command = [
self.lncli_path,
'--rpcserver=' + lnd_host,
'--macaroonpath=' + macaroon_file,
'--tlscertpath=' + cert_file
]
def lncli(self, command):
"""
Invokes the lncli command line interface for lnd.
:param command: list of command line arguments
:return:
int: error code
"""
cmd = self.lncli_command + command
logger.debug('executing lncli %s', ' '.join(cmd))
proc = subprocess.run(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# check if the output can be decoded from valid json
try:
json.loads(proc.stdout)
# convert json into color coded characters
colorful_json = highlight(
proc.stdout,
lexers.JsonLexer(),
formatters.TerminalFormatter()
)
logger.info(colorful_json)
# usually errors and help are not json, handle them here
except ValueError:
logger.info(proc.stdout.decode('utf-8'))
logger.info(proc.stderr.decode('utf-8'))
return proc.returncode
| StarcoderdataPython |
5135940 | <gh_stars>0
from .user import User
from .token import Token
from .client import Client
from .authorizationcode import AuthorizationCode
| StarcoderdataPython |
52873 | <gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from enum import Enum
class THOST_TE_RESUME_TYPE(Enum):
TERT_RESTART = 0
TERT_RESUME = 1
TERT_QUICK = 2
class TThostFtdcExchangePropertyType(Enum):
"""交易所属性类型"""
THOST_FTDC_EXP_Normal = 48
"""正常"""
THOST_FTDC_EXP_GenOrderByTrade = 49
"""根据成交生成报单"""
class TThostFtdcIdCardTypeType(Enum):
"""证件类型类型"""
THOST_FTDC_ICT_EID = 48
"""组织机构代码"""
THOST_FTDC_ICT_IDCard = 49
"""中国公民身份证"""
THOST_FTDC_ICT_OfficerIDCard = 50
"""军官证"""
THOST_FTDC_ICT_PoliceIDCard = 51
"""警官证"""
THOST_FTDC_ICT_SoldierIDCard = 52
"""士兵证"""
THOST_FTDC_ICT_HouseholdRegister = 53
"""户口簿"""
THOST_FTDC_ICT_Passport = 54
"""护照"""
THOST_FTDC_ICT_TaiwanCompatriotIDCard = 55
"""台胞证"""
THOST_FTDC_ICT_HomeComingCard = 56
"""回乡证"""
THOST_FTDC_ICT_LicenseNo = 57
"""营业执照号"""
THOST_FTDC_ICT_TaxNo = 65
"""税务登记号/当地纳税ID"""
THOST_FTDC_ICT_HMMainlandTravelPermit = 66
"""港澳居民来往内地通行证"""
THOST_FTDC_ICT_TwMainlandTravelPermit = 67
"""台湾居民来往大陆通行证"""
THOST_FTDC_ICT_DrivingLicense = 68
"""驾照"""
THOST_FTDC_ICT_SocialID = 70
"""当地社保ID"""
THOST_FTDC_ICT_LocalID = 71
"""当地身份证"""
THOST_FTDC_ICT_BusinessRegistration = 72
"""商业登记证"""
THOST_FTDC_ICT_HKMCIDCard = 73
"""港澳永久性居民身份证"""
THOST_FTDC_ICT_AccountsPermits = 74
"""人行开户许可证"""
THOST_FTDC_ICT_FrgPrmtRdCard = 75
"""外国人永久居留证"""
THOST_FTDC_ICT_CptMngPrdLetter = 76
"""资管产品备案函"""
THOST_FTDC_ICT_OtherCard = 120
"""其他证件"""
class TThostFtdcInvestorRangeType(Enum):
"""投资者范围类型"""
THOST_FTDC_IR_All = 49
"""所有"""
THOST_FTDC_IR_Group = 50
"""投资者组"""
THOST_FTDC_IR_Single = 51
"""单一投资者"""
class TThostFtdcDepartmentRangeType(Enum):
"""投资者范围类型"""
THOST_FTDC_DR_All = 49
"""所有"""
THOST_FTDC_DR_Group = 50
"""组织架构"""
THOST_FTDC_DR_Single = 51
"""单一投资者"""
class TThostFtdcDataSyncStatusType(Enum):
"""数据同步状态类型"""
THOST_FTDC_DS_Asynchronous = 49
"""未同步"""
THOST_FTDC_DS_Synchronizing = 50
"""同步中"""
THOST_FTDC_DS_Synchronized = 51
"""已同步"""
class TThostFtdcBrokerDataSyncStatusType(Enum):
"""经纪公司数据同步状态类型"""
THOST_FTDC_BDS_Synchronized = 49
"""已同步"""
THOST_FTDC_BDS_Synchronizing = 50
"""同步中"""
class TThostFtdcExchangeConnectStatusType(Enum):
"""交易所连接状态类型"""
THOST_FTDC_ECS_NoConnection = 49
"""没有任何连接"""
THOST_FTDC_ECS_QryInstrumentSent = 50
"""已经发出合约查询请求"""
THOST_FTDC_ECS_GotInformation = 57
"""已经获取信息"""
class TThostFtdcTraderConnectStatusType(Enum):
"""交易所交易员连接状态类型"""
THOST_FTDC_TCS_NotConnected = 49
"""没有任何连接"""
THOST_FTDC_TCS_Connected = 50
"""已经连接"""
THOST_FTDC_TCS_QryInstrumentSent = 51
"""已经发出合约查询请求"""
THOST_FTDC_TCS_SubPrivateFlow = 52
"""订阅私有流"""
class TThostFtdcFunctionCodeType(Enum):
"""功能代码类型"""
THOST_FTDC_FC_DataAsync = 49
"""数据异步化"""
THOST_FTDC_FC_ForceUserLogout = 50
"""强制用户登出"""
THOST_FTDC_FC_UserPasswordUpdate = 51
"""变更管理用户口令"""
THOST_FTDC_FC_BrokerPasswordUpdate = 52
"""变更经纪公司口令"""
THOST_FTDC_FC_InvestorPasswordUpdate = 53
"""变更投资者口令"""
THOST_FTDC_FC_OrderInsert = 54
"""报单插入"""
THOST_FTDC_FC_OrderAction = 55
"""报单操作"""
THOST_FTDC_FC_SyncSystemData = 56
"""同步系统数据"""
THOST_FTDC_FC_SyncBrokerData = 57
"""同步经纪公司数据"""
THOST_FTDC_FC_BachSyncBrokerData = 65
"""批量同步经纪公司数据"""
THOST_FTDC_FC_SuperQuery = 66
"""超级查询"""
THOST_FTDC_FC_ParkedOrderInsert = 67
"""预埋报单插入"""
THOST_FTDC_FC_ParkedOrderAction = 68
"""预埋报单操作"""
THOST_FTDC_FC_SyncOTP = 69
"""同步动态令牌"""
THOST_FTDC_FC_DeleteOrder = 70
"""删除未知单"""
class TThostFtdcBrokerFunctionCodeType(Enum):
"""经纪公司功能代码类型"""
THOST_FTDC_BFC_ForceUserLogout = 49
"""强制用户登出"""
THOST_FTDC_BFC_UserPasswordUpdate = 50
"""变更用户口令"""
THOST_FTDC_BFC_SyncBrokerData = 51
"""同步经纪公司数据"""
THOST_FTDC_BFC_BachSyncBrokerData = 52
"""批量同步经纪公司数据"""
THOST_FTDC_BFC_OrderInsert = 53
"""报单插入"""
THOST_FTDC_BFC_OrderAction = 54
"""报单操作"""
THOST_FTDC_BFC_AllQuery = 55
"""全部查询"""
THOST_FTDC_BFC_log = 97
"""系统功能:登入/登出/修改密码等"""
THOST_FTDC_BFC_BaseQry = 98
"""基本查询:查询基础数据,如合约,交易所等常量"""
THOST_FTDC_BFC_TradeQry = 99
"""交易查询:如查成交,委托"""
THOST_FTDC_BFC_Trade = 100
"""交易功能:报单,撤单"""
THOST_FTDC_BFC_Virement = 101
"""银期转账"""
THOST_FTDC_BFC_Risk = 102
"""风险监控"""
THOST_FTDC_BFC_Session = 103
"""查询/管理:查询会话,踢人等"""
THOST_FTDC_BFC_RiskNoticeCtl = 104
"""风控通知控制"""
THOST_FTDC_BFC_RiskNotice = 105
"""风控通知发送"""
THOST_FTDC_BFC_BrokerDeposit = 106
"""察看经纪公司资金权限"""
THOST_FTDC_BFC_QueryFund = 107
"""资金查询"""
THOST_FTDC_BFC_QueryOrder = 108
"""报单查询"""
THOST_FTDC_BFC_QueryTrade = 109
"""成交查询"""
THOST_FTDC_BFC_QueryPosition = 110
"""持仓查询"""
THOST_FTDC_BFC_QueryMarketData = 111
"""行情查询"""
THOST_FTDC_BFC_QueryUserEvent = 112
"""用户事件查询"""
THOST_FTDC_BFC_QueryRiskNotify = 113
"""风险通知查询"""
THOST_FTDC_BFC_QueryFundChange = 114
"""出入金查询"""
THOST_FTDC_BFC_QueryInvestor = 115
"""投资者信息查询"""
THOST_FTDC_BFC_QueryTradingCode = 116
"""交易编码查询"""
THOST_FTDC_BFC_ForceClose = 117
"""强平"""
THOST_FTDC_BFC_PressTest = 118
"""压力测试"""
THOST_FTDC_BFC_RemainCalc = 119
"""权益反算"""
THOST_FTDC_BFC_NetPositionInd = 120
"""净持仓保证金指标"""
THOST_FTDC_BFC_RiskPredict = 121
"""风险预算"""
THOST_FTDC_BFC_DataExport = 122
"""数据导出"""
THOST_FTDC_BFC_RiskTargetSetup = 65
"""风控指标设置"""
THOST_FTDC_BFC_MarketDataWarn = 66
"""行情预警"""
THOST_FTDC_BFC_QryBizNotice = 67
"""业务通知查询"""
THOST_FTDC_BFC_CfgBizNotice = 68
"""业务通知模板设置"""
THOST_FTDC_BFC_SyncOTP = 69
"""同步动态令牌"""
THOST_FTDC_BFC_SendBizNotice = 70
"""发送业务通知"""
THOST_FTDC_BFC_CfgRiskLevelStd = 71
"""风险级别标准设置"""
THOST_FTDC_BFC_TbCommand = 72
"""交易终端应急功能"""
THOST_FTDC_BFC_DeleteOrder = 74
"""删除未知单"""
THOST_FTDC_BFC_ParkedOrderInsert = 75
"""预埋报单插入"""
THOST_FTDC_BFC_ParkedOrderAction = 76
"""预埋报单操作"""
THOST_FTDC_BFC_ExecOrderNoCheck = 77
"""资金不够仍允许行权"""
THOST_FTDC_BFC_Designate = 78
"""指定"""
THOST_FTDC_BFC_StockDisposal = 79
"""证券处置"""
THOST_FTDC_BFC_BrokerDepositWarn = 81
"""席位资金预警"""
THOST_FTDC_BFC_CoverWarn = 83
"""备兑不足预警"""
THOST_FTDC_BFC_PreExecOrder = 84
"""行权试算"""
THOST_FTDC_BFC_ExecOrderRisk = 80
"""行权交收风险"""
THOST_FTDC_BFC_PosiLimitWarn = 85
"""持仓限额预警"""
THOST_FTDC_BFC_QryPosiLimit = 86
"""持仓限额查询"""
THOST_FTDC_BFC_FBSign = 87
"""银期签到签退"""
THOST_FTDC_BFC_FBAccount = 88
"""银期签约解约"""
class TThostFtdcOrderActionStatusType(Enum):
"""报单操作状态类型"""
THOST_FTDC_OAS_Submitted = 97
"""已经提交"""
THOST_FTDC_OAS_Accepted = 98
"""已经接受"""
THOST_FTDC_OAS_Rejected = 99
"""已经被拒绝"""
class TThostFtdcOrderStatusType(Enum):
"""报单状态类型"""
THOST_FTDC_OST_AllTraded = 48
"""全部成交"""
THOST_FTDC_OST_PartTradedQueueing = 49
"""部分成交还在队列中"""
THOST_FTDC_OST_PartTradedNotQueueing = 50
"""部分成交不在队列中"""
THOST_FTDC_OST_NoTradeQueueing = 51
"""未成交还在队列中"""
THOST_FTDC_OST_NoTradeNotQueueing = 52
"""未成交不在队列中"""
THOST_FTDC_OST_Canceled = 53
"""撤单"""
THOST_FTDC_OST_Unknown = 97
"""未知"""
THOST_FTDC_OST_NotTouched = 98
"""尚未触发"""
THOST_FTDC_OST_Touched = 99
"""已触发"""
class TThostFtdcOrderSubmitStatusType(Enum):
"""报单提交状态类型"""
THOST_FTDC_OSS_InsertSubmitted = 48
"""已经提交"""
THOST_FTDC_OSS_CancelSubmitted = 49
"""撤单已经提交"""
THOST_FTDC_OSS_ModifySubmitted = 50
"""修改已经提交"""
THOST_FTDC_OSS_Accepted = 51
"""已经接受"""
THOST_FTDC_OSS_InsertRejected = 52
"""报单已经被拒绝"""
THOST_FTDC_OSS_CancelRejected = 53
"""撤单已经被拒绝"""
THOST_FTDC_OSS_ModifyRejected = 54
"""改单已经被拒绝"""
class TThostFtdcPositionDateType(Enum):
"""持仓日期类型"""
THOST_FTDC_PSD_Today = 49
"""今日持仓"""
THOST_FTDC_PSD_History = 50
"""历史持仓"""
class TThostFtdcPositionDateTypeType(Enum):
"""持仓日期类型类型"""
THOST_FTDC_PDT_UseHistory = 49
"""使用历史持仓"""
THOST_FTDC_PDT_NoUseHistory = 50
"""不使用历史持仓"""
class TThostFtdcTradingRoleType(Enum):
"""交易角色类型"""
THOST_FTDC_ER_Broker = 49
"""代理"""
THOST_FTDC_ER_Host = 50
"""自营"""
THOST_FTDC_ER_Maker = 51
"""做市商"""
class TThostFtdcProductClassType(Enum):
"""产品类型类型"""
THOST_FTDC_PC_Futures = 49
"""期货"""
THOST_FTDC_PC_Options = 50
"""期货期权"""
THOST_FTDC_PC_Combination = 51
"""组合"""
THOST_FTDC_PC_Spot = 52
"""即期"""
THOST_FTDC_PC_EFP = 53
"""期转现"""
THOST_FTDC_PC_SpotOption = 54
"""现货期权"""
class TThostFtdcInstLifePhaseType(Enum):
"""合约生命周期状态类型"""
THOST_FTDC_IP_NotStart = 48
"""未上市"""
THOST_FTDC_IP_Started = 49
"""上市"""
THOST_FTDC_IP_Pause = 50
"""停牌"""
THOST_FTDC_IP_Expired = 51
"""到期"""
class TThostFtdcDirectionType(Enum):
"""买卖方向类型"""
THOST_FTDC_D_Buy = 48
"""买"""
THOST_FTDC_D_Sell = 49
"""卖"""
class TThostFtdcPositionTypeType(Enum):
"""持仓类型类型"""
THOST_FTDC_PT_Net = 49
"""净持仓"""
THOST_FTDC_PT_Gross = 50
"""综合持仓"""
class TThostFtdcPosiDirectionType(Enum):
"""持仓多空方向类型"""
THOST_FTDC_PD_Net = 49
"""净"""
THOST_FTDC_PD_Long = 50
"""多头"""
THOST_FTDC_PD_Short = 51
"""空头"""
class TThostFtdcSysSettlementStatusType(Enum):
"""系统结算状态类型"""
THOST_FTDC_SS_NonActive = 49
"""不活跃"""
THOST_FTDC_SS_Startup = 50
"""启动"""
THOST_FTDC_SS_Operating = 51
"""操作"""
THOST_FTDC_SS_Settlement = 52
"""结算"""
THOST_FTDC_SS_SettlementFinished = 53
"""结算完成"""
class TThostFtdcRatioAttrType(Enum):
"""费率属性类型"""
THOST_FTDC_RA_Trade = 48
"""交易费率"""
THOST_FTDC_RA_Settlement = 49
"""结算费率"""
class TThostFtdcHedgeFlagType(Enum):
"""投机套保标志类型"""
THOST_FTDC_HF_Speculation = 49
"""投机"""
THOST_FTDC_HF_Arbitrage = 50
"""套利"""
THOST_FTDC_HF_Hedge = 51
"""套保"""
THOST_FTDC_HF_MarketMaker = 53
"""做市商"""
THOST_FTDC_HF_SpecHedge = 54
"""第一腿投机第二腿套保 大商所专用"""
THOST_FTDC_HF_HedgeSpec = 55
"""第一腿套保第二腿投机 大商所专用"""
class TThostFtdcBillHedgeFlagType(Enum):
"""投机套保标志类型"""
THOST_FTDC_BHF_Speculation = 49
"""投机"""
THOST_FTDC_BHF_Arbitrage = 50
"""套利"""
THOST_FTDC_BHF_Hedge = 51
"""套保"""
class TThostFtdcClientIDTypeType(Enum):
"""交易编码类型类型"""
THOST_FTDC_CIDT_Speculation = 49
"""投机"""
THOST_FTDC_CIDT_Arbitrage = 50
"""套利"""
THOST_FTDC_CIDT_Hedge = 51
"""套保"""
THOST_FTDC_CIDT_MarketMaker = 53
"""做市商"""
class TThostFtdcOrderPriceTypeType(Enum):
"""报单价格条件类型"""
THOST_FTDC_OPT_AnyPrice = 49
"""任意价"""
THOST_FTDC_OPT_LimitPrice = 50
"""限价"""
THOST_FTDC_OPT_BestPrice = 51
"""最优价"""
THOST_FTDC_OPT_LastPrice = 52
"""最新价"""
THOST_FTDC_OPT_LastPricePlusOneTicks = 53
"""最新价浮动上浮1个ticks"""
THOST_FTDC_OPT_LastPricePlusTwoTicks = 54
"""最新价浮动上浮2个ticks"""
THOST_FTDC_OPT_LastPricePlusThreeTicks = 55
"""最新价浮动上浮3个ticks"""
THOST_FTDC_OPT_AskPrice1 = 56
"""卖一价"""
THOST_FTDC_OPT_AskPrice1PlusOneTicks = 57
"""卖一价浮动上浮1个ticks"""
THOST_FTDC_OPT_AskPrice1PlusTwoTicks = 65
"""卖一价浮动上浮2个ticks"""
THOST_FTDC_OPT_AskPrice1PlusThreeTicks = 66
"""卖一价浮动上浮3个ticks"""
THOST_FTDC_OPT_BidPrice1 = 67
"""买一价"""
THOST_FTDC_OPT_BidPrice1PlusOneTicks = 68
"""买一价浮动上浮1个ticks"""
THOST_FTDC_OPT_BidPrice1PlusTwoTicks = 69
"""买一价浮动上浮2个ticks"""
THOST_FTDC_OPT_BidPrice1PlusThreeTicks = 70
"""买一价浮动上浮3个ticks"""
THOST_FTDC_OPT_FiveLevelPrice = 71
"""五档价"""
class TThostFtdcOffsetFlagType(Enum):
"""开平标志类型"""
THOST_FTDC_OF_Open = 48
"""开仓"""
THOST_FTDC_OF_Close = 49
"""平仓"""
THOST_FTDC_OF_ForceClose = 50
"""强平"""
THOST_FTDC_OF_CloseToday = 51
"""平今"""
THOST_FTDC_OF_CloseYesterday = 52
"""平昨"""
THOST_FTDC_OF_ForceOff = 53
"""强减"""
THOST_FTDC_OF_LocalForceClose = 54
"""本地强平"""
class TThostFtdcForceCloseReasonType(Enum):
"""强平原因类型"""
THOST_FTDC_FCC_NotForceClose = 48
"""非强平"""
THOST_FTDC_FCC_LackDeposit = 49
"""资金不足"""
THOST_FTDC_FCC_ClientOverPositionLimit = 50
"""客户超仓"""
THOST_FTDC_FCC_MemberOverPositionLimit = 51
"""会员超仓"""
THOST_FTDC_FCC_NotMultiple = 52
"""持仓非整数倍"""
THOST_FTDC_FCC_Violation = 53
"""违规"""
THOST_FTDC_FCC_Other = 54
"""其它"""
THOST_FTDC_FCC_PersonDeliv = 55
"""自然人临近交割"""
class TThostFtdcOrderTypeType(Enum):
"""报单类型类型"""
THOST_FTDC_ORDT_Normal = 48
"""正常"""
THOST_FTDC_ORDT_DeriveFromQuote = 49
"""报价衍生"""
THOST_FTDC_ORDT_DeriveFromCombination = 50
"""组合衍生"""
THOST_FTDC_ORDT_Combination = 51
"""组合报单"""
THOST_FTDC_ORDT_ConditionalOrder = 52
"""条件单"""
THOST_FTDC_ORDT_Swap = 53
"""互换单"""
THOST_FTDC_ORDT_DeriveFromBlockTrade = 54
"""大宗交易成交衍生"""
THOST_FTDC_ORDT_DeriveFromEFPTrade = 55
"""期转现成交衍生"""
class TThostFtdcTimeConditionType(Enum):
"""有效期类型类型"""
THOST_FTDC_TC_IOC = 49
"""立即完成,否则撤销"""
THOST_FTDC_TC_GFS = 50
"""本节有效"""
THOST_FTDC_TC_GFD = 51
"""当日有效"""
THOST_FTDC_TC_GTD = 52
"""指定日期前有效"""
THOST_FTDC_TC_GTC = 53
"""撤销前有效"""
THOST_FTDC_TC_GFA = 54
"""集合竞价有效"""
class TThostFtdcVolumeConditionType(Enum):
"""成交量类型类型"""
THOST_FTDC_VC_AV = 49
"""任何数量"""
THOST_FTDC_VC_MV = 50
"""最小数量"""
THOST_FTDC_VC_CV = 51
"""全部数量"""
class TThostFtdcContingentConditionType(Enum):
"""触发条件类型"""
THOST_FTDC_CC_Immediately = 49
"""立即"""
THOST_FTDC_CC_Touch = 50
"""止损"""
THOST_FTDC_CC_TouchProfit = 51
"""止赢"""
THOST_FTDC_CC_ParkedOrder = 52
"""预埋单"""
THOST_FTDC_CC_LastPriceGreaterThanStopPrice = 53
"""最新价大于条件价"""
THOST_FTDC_CC_LastPriceGreaterEqualStopPrice = 54
"""最新价大于等于条件价"""
THOST_FTDC_CC_LastPriceLesserThanStopPrice = 55
"""最新价小于条件价"""
THOST_FTDC_CC_LastPriceLesserEqualStopPrice = 56
"""最新价小于等于条件价"""
THOST_FTDC_CC_AskPriceGreaterThanStopPrice = 57
"""卖一价大于条件价"""
THOST_FTDC_CC_AskPriceGreaterEqualStopPrice = 65
"""卖一价大于等于条件价"""
THOST_FTDC_CC_AskPriceLesserThanStopPrice = 66
"""卖一价小于条件价"""
THOST_FTDC_CC_AskPriceLesserEqualStopPrice = 67
"""卖一价小于等于条件价"""
THOST_FTDC_CC_BidPriceGreaterThanStopPrice = 68
"""买一价大于条件价"""
THOST_FTDC_CC_BidPriceGreaterEqualStopPrice = 69
"""买一价大于等于条件价"""
THOST_FTDC_CC_BidPriceLesserThanStopPrice = 70
"""买一价小于条件价"""
THOST_FTDC_CC_BidPriceLesserEqualStopPrice = 72
"""买一价小于等于条件价"""
class TThostFtdcActionFlagType(Enum):
"""操作标志类型"""
THOST_FTDC_AF_Delete = 48
"""删除"""
THOST_FTDC_AF_Modify = 51
"""修改"""
class TThostFtdcTradingRightType(Enum):
"""交易权限类型"""
THOST_FTDC_TR_Allow = 48
"""可以交易"""
THOST_FTDC_TR_CloseOnly = 49
"""只能平仓"""
THOST_FTDC_TR_Forbidden = 50
"""不能交易"""
class TThostFtdcOrderSourceType(Enum):
"""报单来源类型"""
THOST_FTDC_OSRC_Participant = 48
"""来自参与者"""
THOST_FTDC_OSRC_Administrator = 49
"""来自管理员"""
class TThostFtdcTradeTypeType(Enum):
"""成交类型类型"""
THOST_FTDC_TRDT_SplitCombinatio = 110
"""组合持仓拆分为单一持仓,初始化不应包含该类型的持仓"""
THOST_FTDC_TRDT_Common = 48
"""普通成交"""
THOST_FTDC_TRDT_OptionsExecution = 49
"""期权执行"""
THOST_FTDC_TRDT_OTC = 50
"""OTC成交"""
THOST_FTDC_TRDT_EFPDerived = 51
"""期转现衍生成交"""
THOST_FTDC_TRDT_CombinationDerived = 52
"""组合衍生成交"""
THOST_FTDC_TRDT_BlockTrade = 53
"""大宗交易成交"""
class TThostFtdcPriceSourceType(Enum):
"""成交价来源类型"""
THOST_FTDC_PSRC_LastPrice = 48
"""前成交价"""
THOST_FTDC_PSRC_Buy = 49
"""买委托价"""
THOST_FTDC_PSRC_Sell = 50
"""卖委托价"""
THOST_FTDC_PSRC_OTC = 51
"""场外成交价"""
class TThostFtdcInstrumentStatusType(Enum):
"""合约交易状态类型"""
THOST_FTDC_IS_BeforeTrading = 48
"""开盘前"""
THOST_FTDC_IS_NoTrading = 49
"""非交易"""
THOST_FTDC_IS_Continous = 50
"""连续交易"""
THOST_FTDC_IS_AuctionOrdering = 51
"""集合竞价报单"""
THOST_FTDC_IS_AuctionBalance = 52
"""集合竞价价格平衡"""
THOST_FTDC_IS_AuctionMatch = 53
"""集合竞价撮合"""
THOST_FTDC_IS_Closed = 54
"""收盘"""
class TThostFtdcInstStatusEnterReasonType(Enum):
"""品种进入交易状态原因类型"""
THOST_FTDC_IER_Automatic = 49
"""自动切换"""
THOST_FTDC_IER_Manual = 50
"""手动切换"""
THOST_FTDC_IER_Fuse = 51
"""熔断"""
class TThostFtdcBatchStatusType(Enum):
"""处理状态类型"""
THOST_FTDC_BS_NoUpload = 49
"""未上传"""
THOST_FTDC_BS_Uploaded = 50
"""已上传"""
THOST_FTDC_BS_Failed = 51
"""审核失败"""
class TThostFtdcReturnStyleType(Enum):
"""按品种返还方式类型"""
THOST_FTDC_RS_All = 49
"""按所有品种"""
THOST_FTDC_RS_ByProduct = 50
"""按品种"""
class TThostFtdcReturnPatternType(Enum):
"""返还模式类型"""
THOST_FTDC_RP_ByVolume = 49
"""按成交手数"""
THOST_FTDC_RP_ByFeeOnHand = 50
"""按留存手续费"""
class TThostFtdcReturnLevelType(Enum):
"""返还级别类型"""
THOST_FTDC_RL_Level1 = 49
"""级别1"""
THOST_FTDC_RL_Level2 = 50
"""级别2"""
THOST_FTDC_RL_Level3 = 51
"""级别3"""
THOST_FTDC_RL_Level4 = 52
"""级别4"""
THOST_FTDC_RL_Level5 = 53
"""级别5"""
THOST_FTDC_RL_Level6 = 54
"""级别6"""
THOST_FTDC_RL_Level7 = 55
"""级别7"""
THOST_FTDC_RL_Level8 = 56
"""级别8"""
THOST_FTDC_RL_Level9 = 57
"""级别9"""
class TThostFtdcReturnStandardType(Enum):
"""返还标准类型"""
THOST_FTDC_RSD_ByPeriod = 49
"""分阶段返还"""
THOST_FTDC_RSD_ByStandard = 50
"""按某一标准"""
class TThostFtdcMortgageTypeType(Enum):
"""质押类型类型"""
THOST_FTDC_MT_Out = 48
"""质出"""
THOST_FTDC_MT_In = 49
"""质入"""
class TThostFtdcInvestorSettlementParamIDType(Enum):
"""投资者结算参数代码类型"""
THOST_FTDC_ISPI_MortgageRatio = 52
"""质押比例"""
THOST_FTDC_ISPI_MarginWay = 53
"""保证金算法"""
THOST_FTDC_ISPI_BillDeposit = 57
"""结算单结存是否包含质押"""
class TThostFtdcExchangeSettlementParamIDType(Enum):
"""交易所结算参数代码类型"""
THOST_FTDC_ESPI_MortgageRatio = 49
"""质押比例"""
THOST_FTDC_ESPI_OtherFundItem = 50
"""分项资金导入项"""
THOST_FTDC_ESPI_OtherFundImport = 51
"""分项资金入交易所出入金"""
THOST_FTDC_ESPI_CFFEXMinPrepa = 54
"""中金所开户最低可用金额"""
THOST_FTDC_ESPI_CZCESettlementType = 55
"""郑商所结算方式"""
THOST_FTDC_ESPI_ExchDelivFeeMode = 57
"""交易所交割手续费收取方式"""
THOST_FTDC_ESPI_DelivFeeMode = 48
"""投资者交割手续费收取方式"""
THOST_FTDC_ESPI_CZCEComMarginType = 65
"""郑商所组合持仓保证金收取方式"""
THOST_FTDC_ESPI_DceComMarginType = 66
"""大商所套利保证金是否优惠"""
THOST_FTDC_ESPI_OptOutDisCountRate = 97
"""虚值期权保证金优惠比率"""
THOST_FTDC_ESPI_OptMiniGuarantee = 98
"""最低保障系数"""
class TThostFtdcSystemParamIDType(Enum):
"""系统参数代码类型"""
THOST_FTDC_SPI_InvestorIDMinLength = 49
"""投资者代码最小长度"""
THOST_FTDC_SPI_AccountIDMinLength = 50
"""投资者帐号代码最小长度"""
THOST_FTDC_SPI_UserRightLogon = 51
"""投资者开户默认登录权限"""
THOST_FTDC_SPI_SettlementBillTrade = 52
"""投资者交易结算单成交汇总方式"""
THOST_FTDC_SPI_TradingCode = 53
"""统一开户更新交易编码方式"""
THOST_FTDC_SPI_CheckFund = 54
"""结算是否判断存在未复核的出入金和分项资金"""
THOST_FTDC_SPI_CommModelRight = 55
"""是否启用手续费模板数据权限"""
THOST_FTDC_SPI_MarginModelRight = 57
"""是否启用保证金率模板数据权限"""
THOST_FTDC_SPI_IsStandardActive = 56
"""是否规范用户才能激活"""
THOST_FTDC_SPI_UploadSettlementFile = 85
"""上传的交易所结算文件路径"""
THOST_FTDC_SPI_DownloadCSRCFile = 68
"""上报保证金监控中心文件路径"""
THOST_FTDC_SPI_SettlementBillFile = 83
"""生成的结算单文件路径"""
THOST_FTDC_SPI_CSRCOthersFile = 67
"""证监会文件标识"""
THOST_FTDC_SPI_InvestorPhoto = 80
"""投资者照片路径"""
THOST_FTDC_SPI_CSRCData = 82
"""全结经纪公司上传文件路径"""
THOST_FTDC_SPI_InvestorPwdModel = 73
"""开户密码录入方式"""
THOST_FTDC_SPI_CFFEXInvestorSettleFile = 70
"""投资者中金所结算文件下载路径"""
THOST_FTDC_SPI_InvestorIDType = 97
"""投资者代码编码方式"""
THOST_FTDC_SPI_FreezeMaxReMain = 114
"""休眠户最高权益"""
THOST_FTDC_SPI_IsSync = 65
"""手续费相关操作实时上场开关"""
THOST_FTDC_SPI_RelieveOpenLimit = 79
"""解除开仓权限限制"""
THOST_FTDC_SPI_IsStandardFreeze = 88
"""是否规范用户才能休眠"""
THOST_FTDC_SPI_CZCENormalProductHedge = 66
"""郑商所是否开放所有品种套保交易"""
class TThostFtdcTradeParamIDType(Enum):
"""交易系统参数代码类型"""
THOST_FTDC_TPID_EncryptionStandard = 69
"""系统加密算法"""
THOST_FTDC_TPID_RiskMode = 82
"""系统风险算法"""
THOST_FTDC_TPID_RiskModeGlobal = 71
"""系统风险算法是否全局 0-否 1-是"""
THOST_FTDC_TPID_modeEncode = 80
"""密码加密算法"""
THOST_FTDC_TPID_tickMode = 84
"""价格小数位数参数"""
THOST_FTDC_TPID_SingleUserSessionMaxNum = 83
"""用户最大会话数"""
THOST_FTDC_TPID_LoginFailMaxNum = 76
"""最大连续登录失败数"""
THOST_FTDC_TPID_IsAuthForce = 65
"""是否强制认证"""
THOST_FTDC_TPID_IsPosiFreeze = 70
"""是否冻结证券持仓"""
THOST_FTDC_TPID_IsPosiLimit = 77
"""是否限仓"""
THOST_FTDC_TPID_ForQuoteTimeInterval = 81
"""郑商所询价时间间隔"""
THOST_FTDC_TPID_IsFuturePosiLimit = 66
"""是否期货限仓"""
THOST_FTDC_TPID_IsFutureOrderFreq = 67
"""是否期货下单频率限制"""
THOST_FTDC_TPID_IsExecOrderProfit = 72
"""行权冻结是否计算盈利"""
THOST_FTDC_TPID_IsCheckBankAcc = 73
"""银期开户是否验证开户银行卡号是否是预留银行账户"""
THOST_FTDC_TPID_PasswordDeadLine = 74
"""弱密码最后修改日期"""
THOST_FTDC_TPID_IsStrongPassword = 75
"""强密码校验"""
THOST_FTDC_TPID_BalanceMorgage = 97
"""自有资金质押比"""
THOST_FTDC_TPID_MinPwdLen = 79
"""最小密码长度"""
THOST_FTDC_TPID_LoginFailMaxNumForIP = 85
"""IP当日最大登陆失败次数"""
THOST_FTDC_TPID_PasswordPeriod = 86
"""密码有效期"""
class TThostFtdcFileIDType(Enum):
"""文件标识类型"""
THOST_FTDC_FI_SettlementFund = 70
"""资金数据"""
THOST_FTDC_FI_Trade = 84
"""成交数据"""
THOST_FTDC_FI_InvestorPosition = 80
"""投资者持仓数据"""
THOST_FTDC_FI_SubEntryFund = 79
"""投资者分项资金数据"""
THOST_FTDC_FI_CZCECombinationPos = 67
"""组合持仓数据"""
THOST_FTDC_FI_CSRCData = 82
"""上报保证金监控中心数据"""
THOST_FTDC_FI_CZCEClose = 76
"""郑商所平仓了结数据"""
THOST_FTDC_FI_CZCENoClose = 78
"""郑商所非平仓了结数据"""
THOST_FTDC_FI_PositionDtl = 68
"""持仓明细数据"""
THOST_FTDC_FI_OptionStrike = 83
"""期权执行文件"""
THOST_FTDC_FI_SettlementPriceComparison = 77
"""结算价比对文件"""
THOST_FTDC_FI_NonTradePosChange = 66
"""上期所非持仓变动明细"""
class TThostFtdcFileTypeType(Enum):
"""文件上传类型类型"""
THOST_FTDC_FUT_Settlement = 48
"""结算"""
THOST_FTDC_FUT_Check = 49
"""核对"""
class TThostFtdcFileFormatType(Enum):
"""文件格式类型"""
THOST_FTDC_FFT_Txt = 48
"""文本文件(.txt)"""
THOST_FTDC_FFT_Zip = 49
"""压缩文件(.zip)"""
THOST_FTDC_FFT_DBF = 50
"""DBF文件(.dbf)"""
class TThostFtdcFileUploadStatusType(Enum):
"""文件状态类型"""
THOST_FTDC_FUS_SucceedUpload = 49
"""上传成功"""
THOST_FTDC_FUS_FailedUpload = 50
"""上传失败"""
THOST_FTDC_FUS_SucceedLoad = 51
"""导入成功"""
THOST_FTDC_FUS_PartSucceedLoad = 52
"""导入部分成功"""
THOST_FTDC_FUS_FailedLoad = 53
"""导入失败"""
class TThostFtdcTransferDirectionType(Enum):
"""移仓方向类型"""
THOST_FTDC_TD_Out = 48
"""移出"""
THOST_FTDC_TD_In = 49
"""移入"""
class TThostFtdcSpecialCreateRuleType(Enum):
"""特殊的创建规则类型"""
THOST_FTDC_SC_NoSpecialRule = 48
"""没有特殊创建规则"""
THOST_FTDC_SC_NoSpringFestival = 49
"""不包含春节"""
class TThostFtdcBasisPriceTypeType(Enum):
"""挂牌基准价类型类型"""
THOST_FTDC_IPT_LastSettlement = 49
"""上一合约结算价"""
THOST_FTDC_IPT_LaseClose = 50
"""上一合约收盘价"""
class TThostFtdcProductLifePhaseType(Enum):
"""产品生命周期状态类型"""
THOST_FTDC_PLP_Active = 49
"""活跃"""
THOST_FTDC_PLP_NonActive = 50
"""不活跃"""
THOST_FTDC_PLP_Canceled = 51
"""注销"""
class TThostFtdcDeliveryModeType(Enum):
"""交割方式类型"""
THOST_FTDC_DM_CashDeliv = 49
"""现金交割"""
THOST_FTDC_DM_CommodityDeliv = 50
"""实物交割"""
class TThostFtdcFundIOTypeType(Enum):
"""出入金类型类型"""
THOST_FTDC_FIOT_FundIO = 49
"""出入金"""
THOST_FTDC_FIOT_Transfer = 50
"""银期转帐"""
THOST_FTDC_FIOT_SwapCurrency = 51
"""银期换汇"""
class TThostFtdcFundTypeType(Enum):
"""资金类型类型"""
THOST_FTDC_FT_Deposite = 49
"""银行存款"""
THOST_FTDC_FT_ItemFund = 50
"""分项资金"""
THOST_FTDC_FT_Company = 51
"""公司调整"""
THOST_FTDC_FT_InnerTransfer = 52
"""资金内转"""
class TThostFtdcFundDirectionType(Enum):
"""出入金方向类型"""
THOST_FTDC_FD_In = 49
"""入金"""
THOST_FTDC_FD_Out = 50
"""出金"""
class TThostFtdcFundStatusType(Enum):
"""资金状态类型"""
THOST_FTDC_FS_Record = 49
"""已录入"""
THOST_FTDC_FS_Check = 50
"""已复核"""
THOST_FTDC_FS_Charge = 51
"""已冲销"""
class TThostFtdcPublishStatusType(Enum):
"""发布状态类型"""
THOST_FTDC_PS_None = 49
"""未发布"""
THOST_FTDC_PS_Publishing = 50
"""正在发布"""
THOST_FTDC_PS_Published = 51
"""已发布"""
class TThostFtdcSystemStatusType(Enum):
"""系统状态类型"""
THOST_FTDC_ES_NonActive = 49
"""不活跃"""
THOST_FTDC_ES_Startup = 50
"""启动"""
THOST_FTDC_ES_Initialize = 51
"""交易开始初始化"""
THOST_FTDC_ES_Initialized = 52
"""交易完成初始化"""
THOST_FTDC_ES_Close = 53
"""收市开始"""
THOST_FTDC_ES_Closed = 54
"""收市完成"""
THOST_FTDC_ES_Settlement = 55
"""结算"""
class TThostFtdcSettlementStatusType(Enum):
"""结算状态类型"""
THOST_FTDC_STS_Initialize = 48
"""初始"""
THOST_FTDC_STS_Settlementing = 49
"""结算中"""
THOST_FTDC_STS_Settlemented = 50
"""已结算"""
THOST_FTDC_STS_Finished = 51
"""结算完成"""
class TThostFtdcInvestorTypeType(Enum):
"""投资者类型类型"""
THOST_FTDC_CT_Person = 48
"""自然人"""
THOST_FTDC_CT_Company = 49
"""法人"""
THOST_FTDC_CT_Fund = 50
"""投资基金"""
THOST_FTDC_CT_SpecialOrgan = 51
"""特殊法人"""
THOST_FTDC_CT_Asset = 52
"""资管户"""
class TThostFtdcBrokerTypeType(Enum):
"""经纪公司类型类型"""
THOST_FTDC_BT_Trade = 48
"""交易会员"""
THOST_FTDC_BT_TradeSettle = 49
"""交易结算会员"""
class TThostFtdcRiskLevelType(Enum):
"""风险等级类型"""
THOST_FTDC_FAS_Low = 49
"""低风险客户"""
THOST_FTDC_FAS_Normal = 50
"""普通客户"""
THOST_FTDC_FAS_Focus = 51
"""关注客户"""
THOST_FTDC_FAS_Risk = 52
"""风险客户"""
class TThostFtdcFeeAcceptStyleType(Enum):
"""手续费收取方式类型"""
THOST_FTDC_FAS_ByTrade = 49
"""按交易收取"""
THOST_FTDC_FAS_ByDeliv = 50
"""按交割收取"""
THOST_FTDC_FAS_None = 51
"""不收"""
THOST_FTDC_FAS_FixFee = 52
"""按指定手续费收取"""
class TThostFtdcPasswordTypeType(Enum):
"""密码类型类型"""
THOST_FTDC_PWDT_Trade = 49
"""交易密码"""
THOST_FTDC_PWDT_Account = 50
"""资金密码"""
class TThostFtdcAlgorithmType(Enum):
"""盈亏算法类型"""
THOST_FTDC_AG_All = 49
"""浮盈浮亏都计算"""
THOST_FTDC_AG_OnlyLost = 50
"""浮盈不计,浮亏计"""
THOST_FTDC_AG_OnlyGain = 51
"""浮盈计,浮亏不计"""
THOST_FTDC_AG_None = 52
"""浮盈浮亏都不计算"""
class TThostFtdcIncludeCloseProfitType(Enum):
"""是否包含平仓盈利类型"""
THOST_FTDC_ICP_Include = 48
"""包含平仓盈利"""
THOST_FTDC_ICP_NotInclude = 50
"""不包含平仓盈利"""
class TThostFtdcAllWithoutTradeType(Enum):
"""是否受可提比例限制类型"""
THOST_FTDC_AWT_Enable = 48
"""无仓无成交不受可提比例限制"""
THOST_FTDC_AWT_Disable = 50
"""受可提比例限制"""
THOST_FTDC_AWT_NoHoldEnable = 51
"""无仓不受可提比例限制"""
class TThostFtdcFuturePwdFlagType(Enum):
"""资金密码核对标志类型"""
THOST_FTDC_FPWD_UnCheck = 48
"""不核对"""
THOST_FTDC_FPWD_Check = 49
"""核对"""
class TThostFtdcTransferTypeType(Enum):
"""银期转账类型类型"""
THOST_FTDC_TT_BankToFuture = 48
"""银行转期货"""
THOST_FTDC_TT_FutureToBank = 49
"""期货转银行"""
class TThostFtdcTransferValidFlagType(Enum):
"""转账有效标志类型"""
THOST_FTDC_TVF_Invalid = 48
"""无效或失败"""
THOST_FTDC_TVF_Valid = 49
"""有效"""
THOST_FTDC_TVF_Reverse = 50
"""冲正"""
class TThostFtdcReasonType(Enum):
"""事由类型"""
THOST_FTDC_RN_CD = 48
"""错单"""
THOST_FTDC_RN_ZT = 49
"""资金在途"""
THOST_FTDC_RN_QT = 50
"""其它"""
class TThostFtdcSexType(Enum):
"""性别类型"""
THOST_FTDC_SEX_None = 48
"""未知"""
THOST_FTDC_SEX_Man = 49
"""男"""
THOST_FTDC_SEX_Woman = 50
"""女"""
class TThostFtdcUserTypeType(Enum):
"""用户类型类型"""
THOST_FTDC_UT_Investor = 48
"""投资者"""
THOST_FTDC_UT_Operator = 49
"""操作员"""
THOST_FTDC_UT_SuperUser = 50
"""管理员"""
class TThostFtdcRateTypeType(Enum):
"""费率类型类型"""
THOST_FTDC_RATETYPE_MarginRate = 50
"""保证金率"""
class TThostFtdcNoteTypeType(Enum):
"""通知类型类型"""
THOST_FTDC_NOTETYPE_TradeSettleBill = 49
"""交易结算单"""
THOST_FTDC_NOTETYPE_TradeSettleMonth = 50
"""交易结算月报"""
THOST_FTDC_NOTETYPE_CallMarginNotes = 51
"""追加保证金通知书"""
THOST_FTDC_NOTETYPE_ForceCloseNotes = 52
"""强行平仓通知书"""
THOST_FTDC_NOTETYPE_TradeNotes = 53
"""成交通知书"""
THOST_FTDC_NOTETYPE_DelivNotes = 54
"""交割通知书"""
class TThostFtdcSettlementStyleType(Enum):
"""结算单方式类型"""
THOST_FTDC_SBS_Day = 49
"""逐日盯市"""
THOST_FTDC_SBS_Volume = 50
"""逐笔对冲"""
class TThostFtdcSettlementBillTypeType(Enum):
"""结算单类型类型"""
THOST_FTDC_ST_Day = 48
"""日报"""
THOST_FTDC_ST_Month = 49
"""月报"""
class TThostFtdcUserRightTypeType(Enum):
"""客户权限类型类型"""
THOST_FTDC_URT_Logon = 49
"""登录"""
THOST_FTDC_URT_Transfer = 50
"""银期转帐"""
THOST_FTDC_URT_EMail = 51
"""邮寄结算单"""
THOST_FTDC_URT_Fax = 52
"""传真结算单"""
THOST_FTDC_URT_ConditionOrder = 53
"""条件单"""
class TThostFtdcMarginPriceTypeType(Enum):
"""保证金价格类型类型"""
THOST_FTDC_MPT_PreSettlementPrice = 49
"""昨结算价"""
THOST_FTDC_MPT_SettlementPrice = 50
"""最新价"""
THOST_FTDC_MPT_AveragePrice = 51
"""成交均价"""
THOST_FTDC_MPT_OpenPrice = 52
"""开仓价"""
class TThostFtdcBillGenStatusType(Enum):
"""结算单生成状态类型"""
THOST_FTDC_BGS_None = 48
"""未生成"""
THOST_FTDC_BGS_NoGenerated = 49
"""生成中"""
THOST_FTDC_BGS_Generated = 50
"""已生成"""
class TThostFtdcAlgoTypeType(Enum):
"""算法类型类型"""
THOST_FTDC_AT_HandlePositionAlgo = 49
"""持仓处理算法"""
THOST_FTDC_AT_FindMarginRateAlgo = 50
"""寻找保证金率算法"""
class TThostFtdcHandlePositionAlgoIDType(Enum):
"""持仓处理算法编号类型"""
THOST_FTDC_HPA_Base = 49
"""基本"""
THOST_FTDC_HPA_DCE = 50
"""大连商品交易所"""
THOST_FTDC_HPA_CZCE = 51
"""郑州商品交易所"""
class TThostFtdcFindMarginRateAlgoIDType(Enum):
"""寻找保证金率算法编号类型"""
THOST_FTDC_FMRA_Base = 49
"""基本"""
THOST_FTDC_FMRA_DCE = 50
"""大连商品交易所"""
THOST_FTDC_FMRA_CZCE = 51
"""郑州商品交易所"""
class TThostFtdcHandleTradingAccountAlgoIDType(Enum):
"""资金处理算法编号类型"""
THOST_FTDC_HTAA_Base = 49
"""基本"""
THOST_FTDC_HTAA_DCE = 50
"""大连商品交易所"""
THOST_FTDC_HTAA_CZCE = 51
"""郑州商品交易所"""
class TThostFtdcPersonTypeType(Enum):
"""联系人类型类型"""
THOST_FTDC_PST_Order = 49
"""指定下单人"""
THOST_FTDC_PST_Open = 50
"""开户授权人"""
THOST_FTDC_PST_Fund = 51
"""资金调拨人"""
THOST_FTDC_PST_Settlement = 52
"""结算单确认人"""
THOST_FTDC_PST_Company = 53
"""法人"""
THOST_FTDC_PST_Corporation = 54
"""法人代表"""
THOST_FTDC_PST_LinkMan = 55
"""投资者联系人"""
THOST_FTDC_PST_Ledger = 56
"""分户管理资产负责人"""
THOST_FTDC_PST_Trustee = 57
"""托(保)管人"""
THOST_FTDC_PST_TrusteeCorporation = 65
"""托(保)管机构法人代表"""
THOST_FTDC_PST_TrusteeOpen = 66
"""托(保)管机构开户授权人"""
THOST_FTDC_PST_TrusteeContact = 67
"""托(保)管机构联系人"""
THOST_FTDC_PST_ForeignerRefer = 68
"""境外自然人参考证件"""
THOST_FTDC_PST_CorporationRefer = 69
"""法人代表参考证件"""
class TThostFtdcQueryInvestorRangeType(Enum):
"""查询范围类型"""
THOST_FTDC_QIR_All = 49
"""所有"""
THOST_FTDC_QIR_Group = 50
"""查询分类"""
THOST_FTDC_QIR_Single = 51
"""单一投资者"""
class TThostFtdcInvestorRiskStatusType(Enum):
"""投资者风险状态类型"""
THOST_FTDC_IRS_Normal = 49
"""正常"""
THOST_FTDC_IRS_Warn = 50
"""警告"""
THOST_FTDC_IRS_Call = 51
"""追保"""
THOST_FTDC_IRS_Force = 52
"""强平"""
THOST_FTDC_IRS_Exception = 53
"""异常"""
class TThostFtdcUserEventTypeType(Enum):
"""用户事件类型类型"""
THOST_FTDC_UET_Login = 49
"""登录"""
THOST_FTDC_UET_Logout = 50
"""登出"""
THOST_FTDC_UET_Trading = 51
"""交易成功"""
THOST_FTDC_UET_TradingError = 52
"""交易失败"""
THOST_FTDC_UET_UpdatePassword = 53
"""修改密码"""
THOST_FTDC_UET_Authenticate = 54
"""客户端认证"""
THOST_FTDC_UET_Other = 57
"""其他"""
class TThostFtdcCloseStyleType(Enum):
"""平仓方式类型"""
THOST_FTDC_ICS_Close = 48
"""先开先平"""
THOST_FTDC_ICS_CloseToday = 49
"""先平今再平昨"""
class TThostFtdcStatModeType(Enum):
"""统计方式类型"""
THOST_FTDC_SM_Non = 48
"""----"""
THOST_FTDC_SM_Instrument = 49
"""按合约统计"""
THOST_FTDC_SM_Product = 50
"""按产品统计"""
THOST_FTDC_SM_Investor = 51
"""按投资者统计"""
class TThostFtdcParkedOrderStatusType(Enum):
"""预埋单状态类型"""
THOST_FTDC_PAOS_NotSend = 49
"""未发送"""
THOST_FTDC_PAOS_Send = 50
"""已发送"""
THOST_FTDC_PAOS_Deleted = 51
"""已删除"""
class TThostFtdcVirDealStatusType(Enum):
"""处理状态类型"""
THOST_FTDC_VDS_Dealing = 49
"""正在处理"""
THOST_FTDC_VDS_DeaclSucceed = 50
"""处理成功"""
class TThostFtdcOrgSystemIDType(Enum):
"""原有系统代码类型"""
THOST_FTDC_ORGS_Standard = 48
"""综合交易平台"""
THOST_FTDC_ORGS_ESunny = 49
"""易盛系统"""
THOST_FTDC_ORGS_KingStarV6 = 50
"""金仕达V6系统"""
class TThostFtdcVirTradeStatusType(Enum):
"""交易状态类型"""
THOST_FTDC_VTS_NaturalDeal = 48
"""正常处理中"""
THOST_FTDC_VTS_SucceedEnd = 49
"""成功结束"""
THOST_FTDC_VTS_FailedEND = 50
"""失败结束"""
THOST_FTDC_VTS_Exception = 51
"""异常中"""
THOST_FTDC_VTS_ManualDeal = 52
"""已人工异常处理"""
THOST_FTDC_VTS_MesException = 53
"""通讯异常 ,请人工处理"""
THOST_FTDC_VTS_SysException = 54
"""系统出错,请人工处理"""
class TThostFtdcVirBankAccTypeType(Enum):
"""银行帐户类型类型"""
THOST_FTDC_VBAT_BankBook = 49
"""存折"""
THOST_FTDC_VBAT_BankCard = 50
"""储蓄卡"""
THOST_FTDC_VBAT_CreditCard = 51
"""信用卡"""
class TThostFtdcVirementStatusType(Enum):
"""银行帐户类型类型"""
THOST_FTDC_VMS_Natural = 48
"""正常"""
THOST_FTDC_VMS_Canceled = 57
"""销户"""
class TThostFtdcVirementAvailAbilityType(Enum):
"""有效标志类型"""
THOST_FTDC_VAA_NoAvailAbility = 48
"""未确认"""
THOST_FTDC_VAA_AvailAbility = 49
"""有效"""
THOST_FTDC_VAA_Repeal = 50
"""冲正"""
class TThostFtdcVirementTradeCodeType(Enum):
"""交易代码类型"""
THOST_FTDC_VTC_BankBankToFuture = 49
"""银行发起银行资金转期货"""
THOST_FTDC_VTC_BankFutureToBank = 50
"""银行发起期货资金转银行"""
THOST_FTDC_VTC_FutureBankToFuture = 49
"""期货发起银行资金转期货"""
THOST_FTDC_VTC_FutureFutureToBank = 50
"""期货发起期货资金转银行"""
class TThostFtdcAMLGenStatusType(Enum):
"""Aml生成方式类型"""
THOST_FTDC_GEN_Program = 48
"""程序生成"""
THOST_FTDC_GEN_HandWork = 49
"""人工生成"""
class TThostFtdcCFMMCKeyKindType(Enum):
"""动态密钥类别(保证金监管)类型"""
THOST_FTDC_CFMMCKK_REQUEST = 82
"""主动请求更新"""
THOST_FTDC_CFMMCKK_AUTO = 65
"""CFMMC自动更新"""
THOST_FTDC_CFMMCKK_MANUAL = 77
"""CFMMC手动更新"""
class TThostFtdcCertificationTypeType(Enum):
"""证件类型类型"""
THOST_FTDC_CFT_IDCard = 48
"""身份证"""
THOST_FTDC_CFT_Passport = 49
"""护照"""
THOST_FTDC_CFT_OfficerIDCard = 50
"""军官证"""
THOST_FTDC_CFT_SoldierIDCard = 51
"""士兵证"""
THOST_FTDC_CFT_HomeComingCard = 52
"""回乡证"""
THOST_FTDC_CFT_HouseholdRegister = 53
"""户口簿"""
THOST_FTDC_CFT_LicenseNo = 54
"""营业执照号"""
THOST_FTDC_CFT_InstitutionCodeCard = 55
"""组织机构代码证"""
THOST_FTDC_CFT_TempLicenseNo = 56
"""临时营业执照号"""
THOST_FTDC_CFT_NoEnterpriseLicenseNo = 57
"""民办非企业登记证书"""
THOST_FTDC_CFT_OtherCard = 120
"""其他证件"""
THOST_FTDC_CFT_SuperDepAgree = 97
"""主管部门批文"""
class TThostFtdcFileBusinessCodeType(Enum):
"""文件业务功能类型"""
THOST_FTDC_FBC_Others = 48
"""其他"""
THOST_FTDC_FBC_TransferDetails = 49
"""转账交易明细对账"""
THOST_FTDC_FBC_CustAccStatus = 50
"""客户账户状态对账"""
THOST_FTDC_FBC_AccountTradeDetails = 51
"""账户类交易明细对账"""
THOST_FTDC_FBC_FutureAccountChangeInfoDetails = 52
"""期货账户信息变更明细对账"""
THOST_FTDC_FBC_CustMoneyDetail = 53
"""客户资金台账余额明细对账"""
THOST_FTDC_FBC_CustCancelAccountInfo = 54
"""客户销户结息明细对账"""
THOST_FTDC_FBC_CustMoneyResult = 55
"""客户资金余额对账结果"""
THOST_FTDC_FBC_OthersExceptionResult = 56
"""其它对账异常结果文件"""
THOST_FTDC_FBC_CustInterestNetMoneyDetails = 57
"""客户结息净额明细"""
THOST_FTDC_FBC_CustMoneySendAndReceiveDetails = 97
"""客户资金交收明细"""
THOST_FTDC_FBC_CorporationMoneyTotal = 98
"""法人存管银行资金交收汇总"""
THOST_FTDC_FBC_MainbodyMoneyTotal = 99
"""主体间资金交收汇总"""
THOST_FTDC_FBC_MainPartMonitorData = 100
"""总分平衡监管数据"""
THOST_FTDC_FBC_PreparationMoney = 101
"""存管银行备付金余额"""
THOST_FTDC_FBC_BankMoneyMonitorData = 102
"""协办存管银行资金监管数据"""
class TThostFtdcCashExchangeCodeType(Enum):
"""汇钞标志类型"""
THOST_FTDC_CEC_Exchange = 49
"""汇"""
THOST_FTDC_CEC_Cash = 50
"""钞"""
class TThostFtdcYesNoIndicatorType(Enum):
"""是或否标识类型"""
THOST_FTDC_YNI_Yes = 48
"""是"""
THOST_FTDC_YNI_No = 49
"""否"""
class TThostFtdcBanlanceTypeType(Enum):
"""余额类型类型"""
THOST_FTDC_BLT_CurrentMoney = 48
"""当前余额"""
THOST_FTDC_BLT_UsableMoney = 49
"""可用余额"""
THOST_FTDC_BLT_FetchableMoney = 50
"""可取余额"""
THOST_FTDC_BLT_FreezeMoney = 51
"""冻结余额"""
class TThostFtdcGenderType(Enum):
"""性别类型"""
THOST_FTDC_GD_Unknown = 48
"""未知状态"""
THOST_FTDC_GD_Male = 49
"""男"""
THOST_FTDC_GD_Female = 50
"""女"""
class TThostFtdcFeePayFlagType(Enum):
"""费用支付标志类型"""
THOST_FTDC_FPF_BEN = 48
"""由受益方支付费用"""
THOST_FTDC_FPF_OUR = 49
"""由发送方支付费用"""
THOST_FTDC_FPF_SHA = 50
"""由发送方支付发起的费用,受益方支付接受的费用"""
class TThostFtdcPassWordKeyTypeType(Enum):
"""密钥类型类型"""
THOST_FTDC_PWKT_ExchangeKey = 48
"""交换密钥"""
THOST_FTDC_PWKT_PassWordKey = 49
"""密码密钥"""
THOST_FTDC_PWKT_MACKey = 50
"""MAC密钥"""
THOST_FTDC_PWKT_MessageKey = 51
"""报文密钥"""
class TThostFtdcFBTPassWordTypeType(Enum):
"""密码类型类型"""
THOST_FTDC_PWT_Query = 48
"""查询"""
THOST_FTDC_PWT_Fetch = 49
"""取款"""
THOST_FTDC_PWT_Transfer = 50
"""转帐"""
THOST_FTDC_PWT_Trade = 51
"""交易"""
class TThostFtdcFBTEncryModeType(Enum):
"""加密方式类型"""
THOST_FTDC_EM_NoEncry = 48
"""不加密"""
THOST_FTDC_EM_DES = 49
"""DES"""
THOST_FTDC_EM_3DES = 50
"""3DES"""
class TThostFtdcBankRepealFlagType(Enum):
"""银行冲正标志类型"""
THOST_FTDC_BRF_BankNotNeedRepeal = 48
"""银行无需自动冲正"""
THOST_FTDC_BRF_BankWaitingRepeal = 49
"""银行待自动冲正"""
THOST_FTDC_BRF_BankBeenRepealed = 50
"""银行已自动冲正"""
class TThostFtdcBrokerRepealFlagType(Enum):
"""期商冲正标志类型"""
THOST_FTDC_BRORF_BrokerNotNeedRepeal = 48
"""期商无需自动冲正"""
THOST_FTDC_BRORF_BrokerWaitingRepeal = 49
"""期商待自动冲正"""
THOST_FTDC_BRORF_BrokerBeenRepealed = 50
"""期商已自动冲正"""
class TThostFtdcInstitutionTypeType(Enum):
"""机构类别类型"""
THOST_FTDC_TS_Bank = 48
"""银行"""
THOST_FTDC_TS_Future = 49
"""期商"""
THOST_FTDC_TS_Store = 50
"""券商"""
class TThostFtdcLastFragmentType(Enum):
"""最后分片标志类型"""
THOST_FTDC_LF_Yes = 48
"""是最后分片"""
THOST_FTDC_LF_No = 49
"""不是最后分片"""
class TThostFtdcBankAccStatusType(Enum):
"""银行账户状态类型"""
THOST_FTDC_BAS_Normal = 48
"""正常"""
THOST_FTDC_BAS_Freeze = 49
"""冻结"""
THOST_FTDC_BAS_ReportLoss = 50
"""挂失"""
class TThostFtdcMoneyAccountStatusType(Enum):
"""资金账户状态类型"""
THOST_FTDC_MAS_Normal = 48
"""正常"""
THOST_FTDC_MAS_Cancel = 49
"""销户"""
class TThostFtdcManageStatusType(Enum):
"""存管状态类型"""
THOST_FTDC_MSS_Point = 48
"""指定存管"""
THOST_FTDC_MSS_PrePoint = 49
"""预指定"""
THOST_FTDC_MSS_CancelPoint = 50
"""撤销指定"""
class TThostFtdcSystemTypeType(Enum):
"""应用系统类型类型"""
THOST_FTDC_SYT_FutureBankTransfer = 48
"""银期转帐"""
THOST_FTDC_SYT_StockBankTransfer = 49
"""银证转帐"""
THOST_FTDC_SYT_TheThirdPartStore = 50
"""第三方存管"""
class TThostFtdcTxnEndFlagType(Enum):
"""银期转帐划转结果标志类型"""
THOST_FTDC_TEF_NormalProcessing = 48
"""正常处理中"""
THOST_FTDC_TEF_Success = 49
"""成功结束"""
THOST_FTDC_TEF_Failed = 50
"""失败结束"""
THOST_FTDC_TEF_Abnormal = 51
"""异常中"""
THOST_FTDC_TEF_ManualProcessedForException = 52
"""已人工异常处理"""
THOST_FTDC_TEF_CommuFailedNeedManualProcess = 53
"""通讯异常 ,请人工处理"""
THOST_FTDC_TEF_SysErrorNeedManualProcess = 54
"""系统出错,请人工处理"""
class TThostFtdcProcessStatusType(Enum):
"""银期转帐服务处理状态类型"""
THOST_FTDC_PSS_NotProcess = 48
"""未处理"""
THOST_FTDC_PSS_StartProcess = 49
"""开始处理"""
THOST_FTDC_PSS_Finished = 50
"""处理完成"""
class TThostFtdcCustTypeType(Enum):
"""客户类型类型"""
THOST_FTDC_CUSTT_Person = 48
"""自然人"""
THOST_FTDC_CUSTT_Institution = 49
"""机构户"""
class TThostFtdcFBTTransferDirectionType(Enum):
"""银期转帐方向类型"""
THOST_FTDC_FBTTD_FromBankToFuture = 49
"""入金,银行转期货"""
THOST_FTDC_FBTTD_FromFutureToBank = 50
"""出金,期货转银行"""
class TThostFtdcOpenOrDestroyType(Enum):
"""开销户类别类型"""
THOST_FTDC_OOD_Open = 49
"""开户"""
THOST_FTDC_OOD_Destroy = 48
"""销户"""
class TThostFtdcAvailabilityFlagType(Enum):
"""有效标志类型"""
THOST_FTDC_AVAF_Invalid = 48
"""未确认"""
THOST_FTDC_AVAF_Valid = 49
"""有效"""
THOST_FTDC_AVAF_Repeal = 50
"""冲正"""
class TThostFtdcOrganTypeType(Enum):
"""机构类型类型"""
THOST_FTDC_OT_Bank = 49
"""银行代理"""
THOST_FTDC_OT_Future = 50
"""交易前置"""
THOST_FTDC_OT_PlateForm = 57
"""银期转帐平台管理"""
class TThostFtdcOrganLevelType(Enum):
"""机构级别类型"""
THOST_FTDC_OL_HeadQuarters = 49
"""银行总行或期商总部"""
THOST_FTDC_OL_Branch = 50
"""银行分中心或期货公司营业部"""
class TThostFtdcProtocalIDType(Enum):
"""协议类型类型"""
THOST_FTDC_PID_FutureProtocal = 48
"""期商协议"""
THOST_FTDC_PID_ICBCProtocal = 49
"""工行协议"""
THOST_FTDC_PID_ABCProtocal = 50
"""农行协议"""
THOST_FTDC_PID_CBCProtocal = 51
"""中国银行协议"""
THOST_FTDC_PID_CCBProtocal = 52
"""建行协议"""
THOST_FTDC_PID_BOCOMProtocal = 53
"""交行协议"""
THOST_FTDC_PID_FBTPlateFormProtocal = 88
"""银期转帐平台协议"""
class TThostFtdcConnectModeType(Enum):
"""套接字连接方式类型"""
THOST_FTDC_CM_ShortConnect = 48
"""短连接"""
THOST_FTDC_CM_LongConnect = 49
"""长连接"""
class TThostFtdcSyncModeType(Enum):
"""套接字通信方式类型"""
THOST_FTDC_SRM_ASync = 48
"""异步"""
THOST_FTDC_SRM_Sync = 49
"""同步"""
class TThostFtdcBankAccTypeType(Enum):
"""银行帐号类型类型"""
THOST_FTDC_BAT_BankBook = 49
"""银行存折"""
THOST_FTDC_BAT_SavingCard = 50
"""储蓄卡"""
THOST_FTDC_BAT_CreditCard = 51
"""信用卡"""
class TThostFtdcFutureAccTypeType(Enum):
"""期货公司帐号类型类型"""
THOST_FTDC_FAT_BankBook = 49
"""银行存折"""
THOST_FTDC_FAT_SavingCard = 50
"""储蓄卡"""
THOST_FTDC_FAT_CreditCard = 51
"""信用卡"""
class TThostFtdcOrganStatusType(Enum):
"""接入机构状态类型"""
THOST_FTDC_OS_Ready = 48
"""启用"""
THOST_FTDC_OS_CheckIn = 49
"""签到"""
THOST_FTDC_OS_CheckOut = 50
"""签退"""
THOST_FTDC_OS_CheckFileArrived = 51
"""对帐文件到达"""
THOST_FTDC_OS_CheckDetail = 52
"""对帐"""
THOST_FTDC_OS_DayEndClean = 53
"""日终清理"""
THOST_FTDC_OS_Invalid = 57
"""注销"""
class TThostFtdcCCBFeeModeType(Enum):
"""建行收费模式类型"""
THOST_FTDC_CCBFM_ByAmount = 49
"""按金额扣收"""
THOST_FTDC_CCBFM_ByMonth = 50
"""按月扣收"""
class TThostFtdcCommApiTypeType(Enum):
"""通讯API类型类型"""
THOST_FTDC_CAPIT_Client = 49
"""客户端"""
THOST_FTDC_CAPIT_Server = 50
"""服务端"""
THOST_FTDC_CAPIT_UserApi = 51
"""交易系统的UserApi"""
class TThostFtdcLinkStatusType(Enum):
"""连接状态类型"""
THOST_FTDC_LS_Connected = 49
"""已经连接"""
THOST_FTDC_LS_Disconnected = 50
"""没有连接"""
class TThostFtdcPwdFlagType(Enum):
"""密码核对标志类型"""
THOST_FTDC_BPWDF_NoCheck = 48
"""不核对"""
THOST_FTDC_BPWDF_BlankCheck = 49
"""明文核对"""
THOST_FTDC_BPWDF_EncryptCheck = 50
"""密文核对"""
class TThostFtdcSecuAccTypeType(Enum):
"""期货帐号类型类型"""
THOST_FTDC_SAT_AccountID = 49
"""资金帐号"""
THOST_FTDC_SAT_CardID = 50
"""资金卡号"""
THOST_FTDC_SAT_SHStockholderID = 51
"""上海股东帐号"""
THOST_FTDC_SAT_SZStockholderID = 52
"""深圳股东帐号"""
class TThostFtdcTransferStatusType(Enum):
"""转账交易状态类型"""
THOST_FTDC_TRFS_Normal = 48
"""正常"""
THOST_FTDC_TRFS_Repealed = 49
"""被冲正"""
class TThostFtdcSponsorTypeType(Enum):
"""发起方类型"""
THOST_FTDC_SPTYPE_Broker = 48
"""期商"""
THOST_FTDC_SPTYPE_Bank = 49
"""银行"""
class TThostFtdcReqRspTypeType(Enum):
"""请求响应类别类型"""
THOST_FTDC_REQRSP_Request = 48
"""请求"""
THOST_FTDC_REQRSP_Response = 49
"""响应"""
class TThostFtdcFBTUserEventTypeType(Enum):
"""银期转帐用户事件类型类型"""
THOST_FTDC_FBTUET_SignIn = 48
"""签到"""
THOST_FTDC_FBTUET_FromBankToFuture = 49
"""银行转期货"""
THOST_FTDC_FBTUET_FromFutureToBank = 50
"""期货转银行"""
THOST_FTDC_FBTUET_OpenAccount = 51
"""开户"""
THOST_FTDC_FBTUET_CancelAccount = 52
"""销户"""
THOST_FTDC_FBTUET_ChangeAccount = 53
"""变更银行账户"""
THOST_FTDC_FBTUET_RepealFromBankToFuture = 54
"""冲正银行转期货"""
THOST_FTDC_FBTUET_RepealFromFutureToBank = 55
"""冲正期货转银行"""
THOST_FTDC_FBTUET_QueryBankAccount = 56
"""查询银行账户"""
THOST_FTDC_FBTUET_QueryFutureAccount = 57
"""查询期货账户"""
THOST_FTDC_FBTUET_SignOut = 65
"""签退"""
THOST_FTDC_FBTUET_SyncKey = 66
"""密钥同步"""
THOST_FTDC_FBTUET_ReserveOpenAccount = 67
"""预约开户"""
THOST_FTDC_FBTUET_CancelReserveOpenAccount = 68
"""撤销预约开户"""
THOST_FTDC_FBTUET_ReserveOpenAccountConfirm = 69
"""预约开户确认"""
THOST_FTDC_FBTUET_Other = 90
"""其他"""
class TThostFtdcDBOperationType(Enum):
"""记录操作类型类型"""
THOST_FTDC_DBOP_Insert = 48
"""插入"""
THOST_FTDC_DBOP_Update = 49
"""更新"""
THOST_FTDC_DBOP_Delete = 50
"""删除"""
class TThostFtdcSyncFlagType(Enum):
"""同步标记类型"""
THOST_FTDC_SYNF_Yes = 48
"""已同步"""
THOST_FTDC_SYNF_No = 49
"""未同步"""
class TThostFtdcSyncTypeType(Enum):
"""同步类型类型"""
THOST_FTDC_SYNT_OneOffSync = 48
"""一次同步"""
THOST_FTDC_SYNT_TimerSync = 49
"""定时同步"""
THOST_FTDC_SYNT_TimerFullSync = 50
"""定时完全同步"""
class TThostFtdcExDirectionType(Enum):
"""换汇方向类型"""
THOST_FTDC_FBEDIR_Settlement = 48
"""结汇"""
THOST_FTDC_FBEDIR_Sale = 49
"""售汇"""
class TThostFtdcFBEResultFlagType(Enum):
"""换汇成功标志类型"""
THOST_FTDC_FBERES_Success = 48
"""成功"""
THOST_FTDC_FBERES_InsufficientBalance = 49
"""账户余额不足"""
THOST_FTDC_FBERES_UnknownTrading = 56
"""交易结果未知"""
THOST_FTDC_FBERES_Fail = 120
"""失败"""
class TThostFtdcFBEExchStatusType(Enum):
"""换汇交易状态类型"""
THOST_FTDC_FBEES_Normal = 48
"""正常"""
THOST_FTDC_FBEES_ReExchange = 49
"""交易重发"""
class TThostFtdcFBEFileFlagType(Enum):
"""换汇文件标志类型"""
THOST_FTDC_FBEFG_DataPackage = 48
"""数据包"""
THOST_FTDC_FBEFG_File = 49
"""文件"""
class TThostFtdcFBEAlreadyTradeType(Enum):
"""换汇已交易标志类型"""
THOST_FTDC_FBEAT_NotTrade = 48
"""未交易"""
THOST_FTDC_FBEAT_Trade = 49
"""已交易"""
class TThostFtdcFBEUserEventTypeType(Enum):
"""银期换汇用户事件类型类型"""
THOST_FTDC_FBEUET_SignIn = 48
"""签到"""
THOST_FTDC_FBEUET_Exchange = 49
"""换汇"""
THOST_FTDC_FBEUET_ReExchange = 50
"""换汇重发"""
THOST_FTDC_FBEUET_QueryBankAccount = 51
"""银行账户查询"""
THOST_FTDC_FBEUET_QueryExchDetial = 52
"""换汇明细查询"""
THOST_FTDC_FBEUET_QueryExchSummary = 53
"""换汇汇总查询"""
THOST_FTDC_FBEUET_QueryExchRate = 54
"""换汇汇率查询"""
THOST_FTDC_FBEUET_CheckBankAccount = 55
"""对账文件通知"""
THOST_FTDC_FBEUET_SignOut = 56
"""签退"""
THOST_FTDC_FBEUET_Other = 90
"""其他"""
class TThostFtdcFBEReqFlagType(Enum):
"""换汇发送标志类型"""
THOST_FTDC_FBERF_UnProcessed = 48
"""未处理"""
THOST_FTDC_FBERF_WaitSend = 49
"""等待发送"""
THOST_FTDC_FBERF_SendSuccess = 50
"""发送成功"""
THOST_FTDC_FBERF_SendFailed = 51
"""发送失败"""
THOST_FTDC_FBERF_WaitReSend = 52
"""等待重发"""
class TThostFtdcNotifyClassType(Enum):
"""风险通知类型类型"""
THOST_FTDC_NC_NOERROR = 48
"""正常"""
THOST_FTDC_NC_Warn = 49
"""警示"""
THOST_FTDC_NC_Call = 50
"""追保"""
THOST_FTDC_NC_Force = 51
"""强平"""
THOST_FTDC_NC_CHUANCANG = 52
"""穿仓"""
THOST_FTDC_NC_Exception = 53
"""异常"""
class TThostFtdcForceCloseTypeType(Enum):
"""强平单类型类型"""
THOST_FTDC_FCT_Manual = 48
"""手工强平"""
THOST_FTDC_FCT_Single = 49
"""单一投资者辅助强平"""
THOST_FTDC_FCT_Group = 50
"""批量投资者辅助强平"""
class TThostFtdcRiskNotifyMethodType(Enum):
"""风险通知途径类型"""
THOST_FTDC_RNM_System = 48
"""系统通知"""
THOST_FTDC_RNM_SMS = 49
"""短信通知"""
THOST_FTDC_RNM_EMail = 50
"""邮件通知"""
THOST_FTDC_RNM_Manual = 51
"""人工通知"""
class TThostFtdcRiskNotifyStatusType(Enum):
"""风险通知状态类型"""
THOST_FTDC_RNS_NotGen = 48
"""未生成"""
THOST_FTDC_RNS_Generated = 49
"""已生成未发送"""
THOST_FTDC_RNS_SendError = 50
"""发送失败"""
THOST_FTDC_RNS_SendOk = 51
"""已发送未接收"""
THOST_FTDC_RNS_Received = 52
"""已接收未确认"""
THOST_FTDC_RNS_Confirmed = 53
"""已确认"""
class TThostFtdcRiskUserEventType(Enum):
"""风控用户操作事件类型"""
THOST_FTDC_RUE_ExportData = 48
"""导出数据"""
class TThostFtdcConditionalOrderSortTypeType(Enum):
"""条件单索引条件类型"""
THOST_FTDC_COST_LastPriceAsc = 48
"""使用最新价升序"""
THOST_FTDC_COST_LastPriceDesc = 49
"""使用最新价降序"""
THOST_FTDC_COST_AskPriceAsc = 50
"""使用卖价升序"""
THOST_FTDC_COST_AskPriceDesc = 51
"""使用卖价降序"""
THOST_FTDC_COST_BidPriceAsc = 52
"""使用买价升序"""
THOST_FTDC_COST_BidPriceDesc = 53
"""使用买价降序"""
class TThostFtdcSendTypeType(Enum):
"""报送状态类型"""
THOST_FTDC_UOAST_NoSend = 48
"""未发送"""
THOST_FTDC_UOAST_Sended = 49
"""已发送"""
THOST_FTDC_UOAST_Generated = 50
"""已生成"""
THOST_FTDC_UOAST_SendFail = 51
"""报送失败"""
THOST_FTDC_UOAST_Success = 52
"""接收成功"""
THOST_FTDC_UOAST_Fail = 53
"""接收失败"""
THOST_FTDC_UOAST_Cancel = 54
"""取消报送"""
class TThostFtdcClientIDStatusType(Enum):
"""交易编码状态类型"""
THOST_FTDC_UOACS_NoApply = 49
"""未申请"""
THOST_FTDC_UOACS_Submited = 50
"""已提交申请"""
THOST_FTDC_UOACS_Sended = 51
"""已发送申请"""
THOST_FTDC_UOACS_Success = 52
"""完成"""
THOST_FTDC_UOACS_Refuse = 53
"""拒绝"""
THOST_FTDC_UOACS_Cancel = 54
"""已撤销编码"""
class TThostFtdcQuestionTypeType(Enum):
"""特有信息类型类型"""
THOST_FTDC_QT_Radio = 49
"""单选"""
THOST_FTDC_QT_Option = 50
"""多选"""
THOST_FTDC_QT_Blank = 51
"""填空"""
class TThostFtdcBusinessTypeType(Enum):
"""业务类型类型"""
THOST_FTDC_BT_Request = 49
"""请求"""
THOST_FTDC_BT_Response = 50
"""应答"""
THOST_FTDC_BT_Notice = 51
"""通知"""
class TThostFtdcCfmmcReturnCodeType(Enum):
"""监控中心返回码类型"""
THOST_FTDC_CRC_Success = 48
"""成功"""
THOST_FTDC_CRC_Working = 49
"""该客户已经有流程在处理中"""
THOST_FTDC_CRC_InfoFail = 50
"""监控中客户资料检查失败"""
THOST_FTDC_CRC_IDCardFail = 51
"""监控中实名制检查失败"""
THOST_FTDC_CRC_OtherFail = 52
"""其他错误"""
class TThostFtdcClientTypeType(Enum):
"""客户类型类型"""
THOST_FTDC_CfMMCCT_All = 48
"""所有"""
THOST_FTDC_CfMMCCT_Person = 49
"""个人"""
THOST_FTDC_CfMMCCT_Company = 50
"""单位"""
THOST_FTDC_CfMMCCT_Other = 51
"""其他"""
THOST_FTDC_CfMMCCT_SpecialOrgan = 52
"""特殊法人"""
THOST_FTDC_CfMMCCT_Asset = 53
"""资管户"""
class TThostFtdcExchangeIDTypeType(Enum):
"""交易所编号类型"""
THOST_FTDC_EIDT_SHFE = 83
"""上海期货交易所"""
THOST_FTDC_EIDT_CZCE = 90
"""郑州商品交易所"""
THOST_FTDC_EIDT_DCE = 68
"""大连商品交易所"""
THOST_FTDC_EIDT_CFFEX = 74
"""中国金融期货交易所"""
THOST_FTDC_EIDT_INE = 78
"""上海国际能源交易中心股份有限公司"""
class TThostFtdcExClientIDTypeType(Enum):
"""交易编码类型类型"""
THOST_FTDC_ECIDT_Hedge = 49
"""套保"""
THOST_FTDC_ECIDT_Arbitrage = 50
"""套利"""
THOST_FTDC_ECIDT_Speculation = 51
"""投机"""
class TThostFtdcUpdateFlagType(Enum):
"""更新状态类型"""
THOST_FTDC_UF_NoUpdate = 48
"""未更新"""
THOST_FTDC_UF_Success = 49
"""更新全部信息成功"""
THOST_FTDC_UF_Fail = 50
"""更新全部信息失败"""
THOST_FTDC_UF_TCSuccess = 51
"""更新交易编码成功"""
THOST_FTDC_UF_TCFail = 52
"""更新交易编码失败"""
THOST_FTDC_UF_Cancel = 53
"""已丢弃"""
class TThostFtdcApplyOperateIDType(Enum):
"""申请动作类型"""
THOST_FTDC_AOID_OpenInvestor = 49
"""开户"""
THOST_FTDC_AOID_ModifyIDCard = 50
"""修改身份信息"""
THOST_FTDC_AOID_ModifyNoIDCard = 51
"""修改一般信息"""
THOST_FTDC_AOID_ApplyTradingCode = 52
"""申请交易编码"""
THOST_FTDC_AOID_CancelTradingCode = 53
"""撤销交易编码"""
THOST_FTDC_AOID_CancelInvestor = 54
"""销户"""
THOST_FTDC_AOID_FreezeAccount = 56
"""账户休眠"""
THOST_FTDC_AOID_ActiveFreezeAccount = 57
"""激活休眠账户"""
class TThostFtdcApplyStatusIDType(Enum):
"""申请状态类型"""
THOST_FTDC_ASID_NoComplete = 49
"""未补全"""
THOST_FTDC_ASID_Submited = 50
"""已提交"""
THOST_FTDC_ASID_Checked = 51
"""已审核"""
THOST_FTDC_ASID_Refused = 52
"""已拒绝"""
THOST_FTDC_ASID_Deleted = 53
"""已删除"""
class TThostFtdcSendMethodType(Enum):
"""发送方式类型"""
THOST_FTDC_UOASM_ByAPI = 49
"""文件发送"""
THOST_FTDC_UOASM_ByFile = 50
"""电子发送"""
class TThostFtdcEventModeType(Enum):
"""操作方法类型"""
THOST_FTDC_EvM_ADD = 49
"""增加"""
THOST_FTDC_EvM_UPDATE = 50
"""修改"""
THOST_FTDC_EvM_DELETE = 51
"""删除"""
THOST_FTDC_EvM_CHECK = 52
"""复核"""
THOST_FTDC_EvM_COPY = 53
"""复制"""
THOST_FTDC_EvM_CANCEL = 54
"""注销"""
THOST_FTDC_EvM_Reverse = 55
"""冲销"""
class TThostFtdcUOAAutoSendType(Enum):
"""统一开户申请自动发送类型"""
THOST_FTDC_UOAA_ASR = 49
"""自动发送并接收"""
THOST_FTDC_UOAA_ASNR = 50
"""自动发送,不自动接收"""
THOST_FTDC_UOAA_NSAR = 51
"""不自动发送,自动接收"""
THOST_FTDC_UOAA_NSR = 52
"""不自动发送,也不自动接收"""
class TThostFtdcFlowIDType(Enum):
"""流程ID类型"""
THOST_FTDC_EvM_InvestorGroupFlow = 49
"""投资者对应投资者组设置"""
THOST_FTDC_EvM_InvestorRate = 50
"""投资者手续费率设置"""
THOST_FTDC_EvM_InvestorCommRateModel = 51
"""投资者手续费率模板关系设置"""
class TThostFtdcCheckLevelType(Enum):
"""复核级别类型"""
THOST_FTDC_CL_Zero = 48
"""零级复核"""
THOST_FTDC_CL_One = 49
"""一级复核"""
THOST_FTDC_CL_Two = 50
"""二级复核"""
class TThostFtdcCheckStatusType(Enum):
"""复核级别类型"""
THOST_FTDC_CHS_Init = 48
"""未复核"""
THOST_FTDC_CHS_Checking = 49
"""复核中"""
THOST_FTDC_CHS_Checked = 50
"""已复核"""
THOST_FTDC_CHS_Refuse = 51
"""拒绝"""
THOST_FTDC_CHS_Cancel = 52
"""作废"""
class TThostFtdcUsedStatusType(Enum):
"""生效状态类型"""
THOST_FTDC_CHU_Unused = 48
"""未生效"""
THOST_FTDC_CHU_Used = 49
"""已生效"""
THOST_FTDC_CHU_Fail = 50
"""生效失败"""
class TThostFtdcBankAcountOriginType(Enum):
"""账户来源类型"""
THOST_FTDC_BAO_ByAccProperty = 48
"""手工录入"""
THOST_FTDC_BAO_ByFBTransfer = 49
"""银期转账"""
class TThostFtdcMonthBillTradeSumType(Enum):
"""结算单月报成交汇总方式类型"""
THOST_FTDC_MBTS_ByInstrument = 48
"""同日同合约"""
THOST_FTDC_MBTS_ByDayInsPrc = 49
"""同日同合约同价格"""
THOST_FTDC_MBTS_ByDayIns = 50
"""同合约"""
class TThostFtdcFBTTradeCodeEnumType(Enum):
"""银期交易代码枚举类型"""
THOST_FTDC_FTC_BankLaunchBankToBroker = 49
"""银行发起银行转期货"""
THOST_FTDC_FTC_BrokerLaunchBankToBroker = 49
"""期货发起银行转期货"""
THOST_FTDC_FTC_BankLaunchBrokerToBank = 50
"""银行发起期货转银行"""
THOST_FTDC_FTC_BrokerLaunchBrokerToBank = 50
"""期货发起期货转银行"""
class TThostFtdcOTPTypeType(Enum):
"""动态令牌类型类型"""
THOST_FTDC_OTP_NONE = 48
"""无动态令牌"""
THOST_FTDC_OTP_TOTP = 49
"""时间令牌"""
class TThostFtdcOTPStatusType(Enum):
"""动态令牌状态类型"""
THOST_FTDC_OTPS_Unused = 48
"""未使用"""
THOST_FTDC_OTPS_Used = 49
"""已使用"""
THOST_FTDC_OTPS_Disuse = 50
"""注销"""
class TThostFtdcBrokerUserTypeType(Enum):
"""经济公司用户类型类型"""
THOST_FTDC_BUT_Investor = 49
"""投资者"""
THOST_FTDC_BUT_BrokerUser = 50
"""操作员"""
class TThostFtdcFutureTypeType(Enum):
"""期货类型类型"""
THOST_FTDC_FUTT_Commodity = 49
"""商品期货"""
THOST_FTDC_FUTT_Financial = 50
"""金融期货"""
class TThostFtdcFundEventTypeType(Enum):
"""资金管理操作类型类型"""
THOST_FTDC_FET_Restriction = 48
"""转账限额"""
THOST_FTDC_FET_TodayRestriction = 49
"""当日转账限额"""
THOST_FTDC_FET_Transfer = 50
"""期商流水"""
THOST_FTDC_FET_Credit = 51
"""资金冻结"""
THOST_FTDC_FET_InvestorWithdrawAlm = 52
"""投资者可提资金比例"""
THOST_FTDC_FET_BankRestriction = 53
"""单个银行帐户转账限额"""
THOST_FTDC_FET_Accountregister = 54
"""银期签约账户"""
THOST_FTDC_FET_ExchangeFundIO = 55
"""交易所出入金"""
THOST_FTDC_FET_InvestorFundIO = 56
"""投资者出入金"""
class TThostFtdcAccountSourceTypeType(Enum):
"""资金账户来源类型"""
THOST_FTDC_AST_FBTransfer = 48
"""银期同步"""
THOST_FTDC_AST_ManualEntry = 49
"""手工录入"""
class TThostFtdcCodeSourceTypeType(Enum):
"""交易编码来源类型"""
THOST_FTDC_CST_UnifyAccount = 48
"""统一开户(已规范)"""
THOST_FTDC_CST_ManualEntry = 49
"""手工录入(未规范)"""
class TThostFtdcUserRangeType(Enum):
"""操作员范围类型"""
THOST_FTDC_UR_All = 48
"""所有"""
THOST_FTDC_UR_Single = 49
"""单一操作员"""
class TThostFtdcByGroupType(Enum):
"""交易统计表按客户统计方式类型"""
THOST_FTDC_BG_Investor = 50
"""按投资者统计"""
THOST_FTDC_BG_Group = 49
"""按类统计"""
class TThostFtdcTradeSumStatModeType(Enum):
"""交易统计表按范围统计方式类型"""
THOST_FTDC_TSSM_Instrument = 49
"""按合约统计"""
THOST_FTDC_TSSM_Product = 50
"""按产品统计"""
THOST_FTDC_TSSM_Exchange = 51
"""按交易所统计"""
class TThostFtdcExprSetModeType(Enum):
"""日期表达式设置类型类型"""
THOST_FTDC_ESM_Relative = 49
"""相对已有规则设置"""
THOST_FTDC_ESM_Typical = 50
"""典型设置"""
class TThostFtdcRateInvestorRangeType(Enum):
"""投资者范围类型"""
THOST_FTDC_RIR_All = 49
"""公司标准"""
THOST_FTDC_RIR_Model = 50
"""模板"""
THOST_FTDC_RIR_Single = 51
"""单一投资者"""
class TThostFtdcSyncDataStatusType(Enum):
"""主次用系统数据同步状态类型"""
THOST_FTDC_SDS_Initialize = 48
"""未同步"""
THOST_FTDC_SDS_Settlementing = 49
"""同步中"""
THOST_FTDC_SDS_Settlemented = 50
"""已同步"""
class TThostFtdcTradeSourceType(Enum):
"""成交来源类型"""
THOST_FTDC_TSRC_NORMAL = 48
"""来自交易所普通回报"""
THOST_FTDC_TSRC_QUERY = 49
"""来自查询"""
class TThostFtdcFlexStatModeType(Enum):
"""产品合约统计方式类型"""
THOST_FTDC_FSM_Product = 49
"""产品统计"""
THOST_FTDC_FSM_Exchange = 50
"""交易所统计"""
THOST_FTDC_FSM_All = 51
"""统计所有"""
class TThostFtdcByInvestorRangeType(Enum):
"""投资者范围统计方式类型"""
THOST_FTDC_BIR_Property = 49
"""属性统计"""
THOST_FTDC_BIR_All = 50
"""统计所有"""
class TThostFtdcPropertyInvestorRangeType(Enum):
"""投资者范围类型"""
THOST_FTDC_PIR_All = 49
"""所有"""
THOST_FTDC_PIR_Property = 50
"""投资者属性"""
THOST_FTDC_PIR_Single = 51
"""单一投资者"""
class TThostFtdcFileStatusType(Enum):
"""文件状态类型"""
THOST_FTDC_FIS_NoCreate = 48
"""未生成"""
THOST_FTDC_FIS_Created = 49
"""已生成"""
THOST_FTDC_FIS_Failed = 50
"""生成失败"""
class TThostFtdcFileGenStyleType(Enum):
"""文件生成方式类型"""
THOST_FTDC_FGS_FileTransmit = 48
"""下发"""
THOST_FTDC_FGS_FileGen = 49
"""生成"""
class TThostFtdcSysOperModeType(Enum):
"""系统日志操作方法类型"""
THOST_FTDC_SoM_Add = 49
"""增加"""
THOST_FTDC_SoM_Update = 50
"""修改"""
THOST_FTDC_SoM_Delete = 51
"""删除"""
THOST_FTDC_SoM_Copy = 52
"""复制"""
THOST_FTDC_SoM_AcTive = 53
"""激活"""
THOST_FTDC_SoM_CanCel = 54
"""注销"""
THOST_FTDC_SoM_ReSet = 55
"""重置"""
class TThostFtdcSysOperTypeType(Enum):
"""系统日志操作类型类型"""
THOST_FTDC_SoT_UpdatePassword = 48
"""修改操作员密码"""
THOST_FTDC_SoT_UserDepartment = 49
"""操作员组织架构关系"""
THOST_FTDC_SoT_RoleManager = 50
"""角色管理"""
THOST_FTDC_SoT_RoleFunction = 51
"""角色功能设置"""
THOST_FTDC_SoT_BaseParam = 52
"""基础参数设置"""
THOST_FTDC_SoT_SetUserID = 53
"""设置操作员"""
THOST_FTDC_SoT_SetUserRole = 54
"""用户角色设置"""
THOST_FTDC_SoT_UserIpRestriction = 55
"""用户IP限制"""
THOST_FTDC_SoT_DepartmentManager = 56
"""组织架构管理"""
THOST_FTDC_SoT_DepartmentCopy = 57
"""组织架构向查询分类复制"""
THOST_FTDC_SoT_Tradingcode = 65
"""交易编码管理"""
THOST_FTDC_SoT_InvestorStatus = 66
"""投资者状态维护"""
THOST_FTDC_SoT_InvestorAuthority = 67
"""投资者权限管理"""
THOST_FTDC_SoT_PropertySet = 68
"""属性设置"""
THOST_FTDC_SoT_ReSetInvestorPasswd = 69
"""重置投资者密码"""
THOST_FTDC_SoT_InvestorPersonalityInfo = 70
"""投资者个性信息维护"""
class TThostFtdcCSRCDataQueyTypeType(Enum):
"""上报数据查询类型类型"""
THOST_FTDC_CSRCQ_Current = 48
"""查询当前交易日报送的数据"""
THOST_FTDC_CSRCQ_History = 49
"""查询历史报送的代理经纪公司的数据"""
class TThostFtdcFreezeStatusType(Enum):
"""休眠状态类型"""
THOST_FTDC_FRS_Normal = 49
"""活跃"""
THOST_FTDC_FRS_Freeze = 48
"""休眠"""
class TThostFtdcStandardStatusType(Enum):
"""规范状态类型"""
THOST_FTDC_STST_Standard = 48
"""已规范"""
THOST_FTDC_STST_NonStandard = 49
"""未规范"""
class TThostFtdcRightParamTypeType(Enum):
"""配置类型类型"""
THOST_FTDC_RPT_Freeze = 49
"""休眠户"""
THOST_FTDC_RPT_FreezeActive = 50
"""激活休眠户"""
THOST_FTDC_RPT_OpenLimit = 51
"""开仓权限限制"""
THOST_FTDC_RPT_RelieveOpenLimit = 52
"""解除开仓权限限制"""
class TThostFtdcDataStatusType(Enum):
"""反洗钱审核表数据状态类型"""
THOST_FTDC_AMLDS_Normal = 48
"""正常"""
THOST_FTDC_AMLDS_Deleted = 49
"""已删除"""
class TThostFtdcAMLCheckStatusType(Enum):
"""审核状态类型"""
THOST_FTDC_AMLCHS_Init = 48
"""未复核"""
THOST_FTDC_AMLCHS_Checking = 49
"""复核中"""
THOST_FTDC_AMLCHS_Checked = 50
"""已复核"""
THOST_FTDC_AMLCHS_RefuseReport = 51
"""拒绝上报"""
class TThostFtdcAmlDateTypeType(Enum):
"""日期类型类型"""
THOST_FTDC_AMLDT_DrawDay = 48
"""检查日期"""
THOST_FTDC_AMLDT_TouchDay = 49
"""发生日期"""
class TThostFtdcAmlCheckLevelType(Enum):
"""审核级别类型"""
THOST_FTDC_AMLCL_CheckLevel0 = 48
"""零级审核"""
THOST_FTDC_AMLCL_CheckLevel1 = 49
"""一级审核"""
THOST_FTDC_AMLCL_CheckLevel2 = 50
"""二级审核"""
THOST_FTDC_AMLCL_CheckLevel3 = 51
"""三级审核"""
class TThostFtdcExportFileTypeType(Enum):
"""导出文件类型类型"""
THOST_FTDC_EFT_CSV = 48
"""CSV"""
THOST_FTDC_EFT_EXCEL = 49
"""Excel"""
THOST_FTDC_EFT_DBF = 50
"""DBF"""
class TThostFtdcSettleManagerTypeType(Enum):
"""结算配置类型类型"""
THOST_FTDC_SMT_Before = 49
"""结算前准备"""
THOST_FTDC_SMT_Settlement = 50
"""结算"""
THOST_FTDC_SMT_After = 51
"""结算后核对"""
THOST_FTDC_SMT_Settlemented = 52
"""结算后处理"""
class TThostFtdcSettleManagerLevelType(Enum):
"""结算配置等级类型"""
THOST_FTDC_SML_Must = 49
"""必要"""
THOST_FTDC_SML_Alarm = 50
"""警告"""
THOST_FTDC_SML_Prompt = 51
"""提示"""
THOST_FTDC_SML_Ignore = 52
"""不检查"""
class TThostFtdcSettleManagerGroupType(Enum):
"""模块分组类型"""
THOST_FTDC_SMG_Exhcange = 49
"""交易所核对"""
THOST_FTDC_SMG_ASP = 50
"""内部核对"""
THOST_FTDC_SMG_CSRC = 51
"""上报数据核对"""
class TThostFtdcLimitUseTypeType(Enum):
"""保值额度使用类型类型"""
THOST_FTDC_LUT_Repeatable = 49
"""可重复使用"""
THOST_FTDC_LUT_Unrepeatable = 50
"""不可重复使用"""
class TThostFtdcDataResourceType(Enum):
"""数据来源类型"""
THOST_FTDC_DAR_Settle = 49
"""本系统"""
THOST_FTDC_DAR_Exchange = 50
"""交易所"""
THOST_FTDC_DAR_CSRC = 51
"""报送数据"""
class TThostFtdcMarginTypeType(Enum):
"""保证金类型类型"""
THOST_FTDC_MGT_ExchMarginRate = 48
"""交易所保证金率"""
THOST_FTDC_MGT_InstrMarginRate = 49
"""投资者保证金率"""
THOST_FTDC_MGT_InstrMarginRateTrade = 50
"""投资者交易保证金率"""
class TThostFtdcActiveTypeType(Enum):
"""生效类型类型"""
THOST_FTDC_ACT_Intraday = 49
"""仅当日生效"""
THOST_FTDC_ACT_Long = 50
"""长期生效"""
class TThostFtdcMarginRateTypeType(Enum):
"""冲突保证金率类型类型"""
THOST_FTDC_MRT_Exchange = 49
"""交易所保证金率"""
THOST_FTDC_MRT_Investor = 50
"""投资者保证金率"""
THOST_FTDC_MRT_InvestorTrade = 51
"""投资者交易保证金率"""
class TThostFtdcBackUpStatusType(Enum):
"""备份数据状态类型"""
THOST_FTDC_BUS_UnBak = 48
"""未生成备份数据"""
THOST_FTDC_BUS_BakUp = 49
"""备份数据生成中"""
THOST_FTDC_BUS_BakUped = 50
"""已生成备份数据"""
THOST_FTDC_BUS_BakFail = 51
"""备份数据失败"""
class TThostFtdcInitSettlementType(Enum):
"""结算初始化状态类型"""
THOST_FTDC_SIS_UnInitialize = 48
"""结算初始化未开始"""
THOST_FTDC_SIS_Initialize = 49
"""结算初始化中"""
THOST_FTDC_SIS_Initialized = 50
"""结算初始化完成"""
class TThostFtdcReportStatusType(Enum):
"""报表数据生成状态类型"""
THOST_FTDC_SRS_NoCreate = 48
"""未生成报表数据"""
THOST_FTDC_SRS_Create = 49
"""报表数据生成中"""
THOST_FTDC_SRS_Created = 50
"""已生成报表数据"""
THOST_FTDC_SRS_CreateFail = 51
"""生成报表数据失败"""
class TThostFtdcSaveStatusType(Enum):
"""数据归档状态类型"""
THOST_FTDC_SSS_UnSaveData = 48
"""归档未完成"""
THOST_FTDC_SSS_SaveDatad = 49
"""归档完成"""
class TThostFtdcSettArchiveStatusType(Enum):
"""结算确认数据归档状态类型"""
THOST_FTDC_SAS_UnArchived = 48
"""未归档数据"""
THOST_FTDC_SAS_Archiving = 49
"""数据归档中"""
THOST_FTDC_SAS_Archived = 50
"""已归档数据"""
THOST_FTDC_SAS_ArchiveFail = 51
"""归档数据失败"""
class TThostFtdcCTPTypeType(Enum):
"""CTP交易系统类型类型"""
THOST_FTDC_CTPT_Unkown = 48
"""未知类型"""
THOST_FTDC_CTPT_MainCenter = 49
"""主中心"""
THOST_FTDC_CTPT_BackUp = 50
"""备中心"""
class TThostFtdcCloseDealTypeType(Enum):
"""平仓处理类型类型"""
THOST_FTDC_CDT_Normal = 48
"""正常"""
THOST_FTDC_CDT_SpecFirst = 49
"""投机平仓优先"""
class TThostFtdcMortgageFundUseRangeType(Enum):
"""货币质押资金可用范围类型"""
THOST_FTDC_MFUR_None = 48
"""不能使用"""
THOST_FTDC_MFUR_Margin = 49
"""用于保证金"""
THOST_FTDC_MFUR_All = 50
"""用于手续费、盈亏、保证金"""
THOST_FTDC_MFUR_CNY3 = 51
"""人民币方案3"""
class TThostFtdcSpecProductTypeType(Enum):
"""特殊产品类型类型"""
THOST_FTDC_SPT_CzceHedge = 49
"""郑商所套保产品"""
THOST_FTDC_SPT_IneForeignCurrency = 50
"""货币质押产品"""
THOST_FTDC_SPT_DceOpenClose = 51
"""大连短线开平仓产品"""
class TThostFtdcFundMortgageTypeType(Enum):
"""货币质押类型类型"""
THOST_FTDC_FMT_Mortgage = 49
"""质押"""
THOST_FTDC_FMT_Redemption = 50
"""解质"""
class TThostFtdcAccountSettlementParamIDType(Enum):
"""投资者账户结算参数代码类型"""
THOST_FTDC_ASPI_BaseMargin = 49
"""基础保证金"""
THOST_FTDC_ASPI_LowestInterest = 50
"""最低权益标准"""
class TThostFtdcFundMortDirectionType(Enum):
"""货币质押方向类型"""
THOST_FTDC_FMD_In = 49
"""货币质入"""
THOST_FTDC_FMD_Out = 50
"""货币质出"""
class TThostFtdcBusinessClassType(Enum):
"""换汇类别类型"""
THOST_FTDC_BT_Profit = 48
"""盈利"""
THOST_FTDC_BT_Loss = 49
"""亏损"""
THOST_FTDC_BT_Other = 90
"""其他"""
class TThostFtdcSwapSourceTypeType(Enum):
"""换汇数据来源类型"""
THOST_FTDC_SST_Manual = 48
"""手工"""
THOST_FTDC_SST_Automatic = 49
"""自动生成"""
class TThostFtdcCurrExDirectionType(Enum):
"""换汇类型类型"""
THOST_FTDC_CED_Settlement = 48
"""结汇"""
THOST_FTDC_CED_Sale = 49
"""售汇"""
class TThostFtdcCurrencySwapStatusType(Enum):
"""申请状态类型"""
THOST_FTDC_CSS_Entry = 49
"""已录入"""
THOST_FTDC_CSS_Approve = 50
"""已审核"""
THOST_FTDC_CSS_Refuse = 51
"""已拒绝"""
THOST_FTDC_CSS_Revoke = 52
"""已撤销"""
THOST_FTDC_CSS_Send = 53
"""已发送"""
THOST_FTDC_CSS_Success = 54
"""换汇成功"""
THOST_FTDC_CSS_Failure = 55
"""换汇失败"""
class TThostFtdcReqFlagType(Enum):
"""换汇发送标志类型"""
THOST_FTDC_REQF_NoSend = 48
"""未发送"""
THOST_FTDC_REQF_SendSuccess = 49
"""发送成功"""
THOST_FTDC_REQF_SendFailed = 50
"""发送失败"""
THOST_FTDC_REQF_WaitReSend = 51
"""等待重发"""
class TThostFtdcResFlagType(Enum):
"""换汇返回成功标志类型"""
THOST_FTDC_RESF_Success = 48
"""成功"""
THOST_FTDC_RESF_InsuffiCient = 49
"""账户余额不足"""
THOST_FTDC_RESF_UnKnown = 56
"""交易结果未知"""
class TThostFtdcExStatusType(Enum):
"""修改状态类型"""
THOST_FTDC_EXS_Before = 48
"""修改前"""
THOST_FTDC_EXS_After = 49
"""修改后"""
class TThostFtdcClientRegionType(Enum):
"""开户客户地域类型"""
THOST_FTDC_CR_Domestic = 49
"""国内客户"""
THOST_FTDC_CR_GMT = 50
"""港澳台客户"""
THOST_FTDC_CR_Foreign = 51
"""国外客户"""
class TThostFtdcHasBoardType(Enum):
"""是否有董事会类型"""
THOST_FTDC_HB_No = 48
"""没有"""
THOST_FTDC_HB_Yes = 49
"""有"""
class TThostFtdcStartModeType(Enum):
"""启动模式类型"""
THOST_FTDC_SM_Normal = 49
"""正常"""
THOST_FTDC_SM_Emerge = 50
"""应急"""
THOST_FTDC_SM_Restore = 51
"""恢复"""
class TThostFtdcTemplateTypeType(Enum):
"""模型类型类型"""
THOST_FTDC_TPT_Full = 49
"""全量"""
THOST_FTDC_TPT_Increment = 50
"""增量"""
THOST_FTDC_TPT_BackUp = 51
"""备份"""
class TThostFtdcLoginModeType(Enum):
"""登录模式类型"""
THOST_FTDC_LM_Trade = 48
"""交易"""
THOST_FTDC_LM_Transfer = 49
"""转账"""
class TThostFtdcPromptTypeType(Enum):
"""日历提示类型类型"""
THOST_FTDC_CPT_Instrument = 49
"""合约上下市"""
THOST_FTDC_CPT_Margin = 50
"""保证金分段生效"""
class TThostFtdcHasTrusteeType(Enum):
"""是否有托管人类型"""
THOST_FTDC_HT_Yes = 49
"""有"""
THOST_FTDC_HT_No = 48
"""没有"""
class TThostFtdcAmTypeType(Enum):
"""机构类型类型"""
THOST_FTDC_AMT_Bank = 49
"""银行"""
THOST_FTDC_AMT_Securities = 50
"""证券公司"""
THOST_FTDC_AMT_Fund = 51
"""基金公司"""
THOST_FTDC_AMT_Insurance = 52
"""保险公司"""
THOST_FTDC_AMT_Trust = 53
"""信托公司"""
THOST_FTDC_AMT_Other = 57
"""其他"""
class TThostFtdcCSRCFundIOTypeType(Enum):
"""出入金类型类型"""
THOST_FTDC_CFIOT_FundIO = 48
"""出入金"""
THOST_FTDC_CFIOT_SwapCurrency = 49
"""银期换汇"""
class TThostFtdcCusAccountTypeType(Enum):
"""结算账户类型类型"""
THOST_FTDC_CAT_Futures = 49
"""期货结算账户"""
THOST_FTDC_CAT_AssetmgrFuture = 50
"""纯期货资管业务下的资管结算账户"""
THOST_FTDC_CAT_AssetmgrTrustee = 51
"""综合类资管业务下的期货资管托管账户"""
THOST_FTDC_CAT_AssetmgrTransfer = 52
"""综合类资管业务下的资金中转账户"""
class TThostFtdcLanguageTypeType(Enum):
"""通知语言类型类型"""
THOST_FTDC_LT_Chinese = 49
"""中文"""
THOST_FTDC_LT_English = 50
"""英文"""
class TThostFtdcAssetmgrClientTypeType(Enum):
"""资产管理客户类型类型"""
THOST_FTDC_AMCT_Person = 49
"""个人资管客户"""
THOST_FTDC_AMCT_Organ = 50
"""单位资管客户"""
THOST_FTDC_AMCT_SpecialOrgan = 52
"""特殊单位资管客户"""
class TThostFtdcAssetmgrTypeType(Enum):
"""投资类型类型"""
THOST_FTDC_ASST_Futures = 51
"""期货类"""
THOST_FTDC_ASST_SpecialOrgan = 52
"""综合类"""
class TThostFtdcCheckInstrTypeType(Enum):
"""合约比较类型类型"""
THOST_FTDC_CIT_HasExch = 48
"""合约交易所不存在"""
THOST_FTDC_CIT_HasATP = 49
"""合约本系统不存在"""
THOST_FTDC_CIT_HasDiff = 50
"""合约比较不一致"""
class TThostFtdcDeliveryTypeType(Enum):
"""交割类型类型"""
THOST_FTDC_DT_HandDeliv = 49
"""手工交割"""
THOST_FTDC_DT_PersonDeliv = 50
"""到期交割"""
class TThostFtdcMaxMarginSideAlgorithmType(Enum):
"""大额单边保证金算法类型"""
THOST_FTDC_MMSA_NO = 48
"""不使用大额单边保证金算法"""
THOST_FTDC_MMSA_YES = 49
"""使用大额单边保证金算法"""
class TThostFtdcDAClientTypeType(Enum):
"""资产管理客户类型类型"""
THOST_FTDC_CACT_Person = 48
"""自然人"""
THOST_FTDC_CACT_Company = 49
"""法人"""
THOST_FTDC_CACT_Other = 50
"""其他"""
class TThostFtdcUOAAssetmgrTypeType(Enum):
"""投资类型类型"""
THOST_FTDC_UOAAT_Futures = 49
"""期货类"""
THOST_FTDC_UOAAT_SpecialOrgan = 50
"""综合类"""
class TThostFtdcDirectionEnType(Enum):
"""买卖方向类型"""
THOST_FTDC_DEN_Buy = 48
"""Buy"""
THOST_FTDC_DEN_Sell = 49
"""Sell"""
class TThostFtdcOffsetFlagEnType(Enum):
"""开平标志类型"""
THOST_FTDC_OFEN_Open = 48
"""Position Opening"""
THOST_FTDC_OFEN_Close = 49
"""Position Close"""
THOST_FTDC_OFEN_ForceClose = 50
"""Forced Liquidation"""
THOST_FTDC_OFEN_CloseToday = 51
"""Close Today"""
THOST_FTDC_OFEN_CloseYesterday = 52
"""Close Prev."""
THOST_FTDC_OFEN_ForceOff = 53
"""Forced Reduction"""
THOST_FTDC_OFEN_LocalForceClose = 54
"""Local Forced Liquidation"""
class TThostFtdcHedgeFlagEnType(Enum):
"""投机套保标志类型"""
THOST_FTDC_HFEN_Speculation = 49
"""Speculation"""
THOST_FTDC_HFEN_Arbitrage = 50
"""Arbitrage"""
THOST_FTDC_HFEN_Hedge = 51
"""Hedge"""
class TThostFtdcFundIOTypeEnType(Enum):
"""出入金类型类型"""
THOST_FTDC_FIOTEN_FundIO = 49
"""Deposit/Withdrawal"""
THOST_FTDC_FIOTEN_Transfer = 50
"""Bank-Futures Transfer"""
THOST_FTDC_FIOTEN_SwapCurrency = 51
"""Bank-Futures FX Exchange"""
class TThostFtdcFundTypeEnType(Enum):
"""资金类型类型"""
THOST_FTDC_FTEN_Deposite = 49
"""Bank Deposit"""
THOST_FTDC_FTEN_ItemFund = 50
"""Payment/Fee"""
THOST_FTDC_FTEN_Company = 51
"""Brokerage Adj"""
THOST_FTDC_FTEN_InnerTransfer = 52
"""Internal Transfer"""
class TThostFtdcFundDirectionEnType(Enum):
"""出入金方向类型"""
THOST_FTDC_FDEN_In = 49
"""Deposit"""
THOST_FTDC_FDEN_Out = 50
"""Withdrawal"""
class TThostFtdcFundMortDirectionEnType(Enum):
"""货币质押方向类型"""
THOST_FTDC_FMDEN_In = 49
"""Pledge"""
THOST_FTDC_FMDEN_Out = 50
"""Redemption"""
class TThostFtdcOptionsTypeType(Enum):
"""期权类型类型"""
THOST_FTDC_CP_CallOptions = 49
"""看涨"""
THOST_FTDC_CP_PutOptions = 50
"""看跌"""
class TThostFtdcStrikeModeType(Enum):
"""执行方式类型"""
THOST_FTDC_STM_Continental = 48
"""欧式"""
THOST_FTDC_STM_American = 49
"""美式"""
THOST_FTDC_STM_Bermuda = 50
"""百慕大"""
class TThostFtdcStrikeTypeType(Enum):
"""执行类型类型"""
THOST_FTDC_STT_Hedge = 48
"""自身对冲"""
THOST_FTDC_STT_Match = 49
"""匹配执行"""
class TThostFtdcApplyTypeType(Enum):
"""中金所期权放弃执行申请类型类型"""
THOST_FTDC_APPT_NotStrikeNum = 52
"""不执行数量"""
class TThostFtdcGiveUpDataSourceType(Enum):
"""放弃执行申请数据来源类型"""
THOST_FTDC_GUDS_Gen = 48
"""系统生成"""
THOST_FTDC_GUDS_Hand = 49
"""手工添加"""
class TThostFtdcExecResultType(Enum):
"""执行结果类型"""
THOST_FTDC_OER_NoExec = 110
"""没有执行"""
THOST_FTDC_OER_Canceled = 99
"""已经取消"""
THOST_FTDC_OER_OK = 48
"""执行成功"""
THOST_FTDC_OER_NoPosition = 49
"""期权持仓不够"""
THOST_FTDC_OER_NoDeposit = 50
"""资金不够"""
THOST_FTDC_OER_NoParticipant = 51
"""会员不存在"""
THOST_FTDC_OER_NoClient = 52
"""客户不存在"""
THOST_FTDC_OER_NoInstrument = 54
"""合约不存在"""
THOST_FTDC_OER_NoRight = 55
"""没有执行权限"""
THOST_FTDC_OER_InvalidVolume = 56
"""不合理的数量"""
THOST_FTDC_OER_NoEnoughHistoryTrade = 57
"""没有足够的历史成交"""
THOST_FTDC_OER_Unknown = 97
"""未知"""
class TThostFtdcCombinationTypeType(Enum):
"""组合类型类型"""
THOST_FTDC_COMBT_Future = 48
"""期货组合"""
THOST_FTDC_COMBT_BUL = 49
"""垂直价差BUL"""
THOST_FTDC_COMBT_BER = 50
"""垂直价差BER"""
THOST_FTDC_COMBT_STD = 51
"""跨式组合"""
THOST_FTDC_COMBT_STG = 52
"""宽跨式组合"""
THOST_FTDC_COMBT_PRT = 53
"""备兑组合"""
THOST_FTDC_COMBT_CLD = 54
"""时间价差组合"""
class TThostFtdcDceCombinationTypeType(Enum):
"""组合类型类型"""
THOST_FTDC_DCECOMBT_SPL = 48
"""期货对锁组合"""
THOST_FTDC_DCECOMBT_OPL = 49
"""期权对锁组合"""
THOST_FTDC_DCECOMBT_SP = 50
"""期货跨期组合"""
THOST_FTDC_DCECOMBT_SPC = 51
"""期货跨品种组合"""
THOST_FTDC_DCECOMBT_BLS = 52
"""买入期权垂直价差组合"""
THOST_FTDC_DCECOMBT_BES = 53
"""卖出期权垂直价差组合"""
THOST_FTDC_DCECOMBT_CAS = 54
"""期权日历价差组合"""
THOST_FTDC_DCECOMBT_STD = 55
"""期权跨式组合"""
THOST_FTDC_DCECOMBT_STG = 56
"""期权宽跨式组合"""
THOST_FTDC_DCECOMBT_BFO = 57
"""买入期货期权组合"""
THOST_FTDC_DCECOMBT_SFO = 97
"""卖出期货期权组合"""
class TThostFtdcOptionRoyaltyPriceTypeType(Enum):
"""期权权利金价格类型类型"""
THOST_FTDC_ORPT_PreSettlementPrice = 49
"""昨结算价"""
THOST_FTDC_ORPT_OpenPrice = 52
"""开仓价"""
THOST_FTDC_ORPT_MaxPreSettlementPrice = 53
"""最新价与昨结算价较大值"""
class TThostFtdcBalanceAlgorithmType(Enum):
"""权益算法类型"""
THOST_FTDC_BLAG_Default = 49
"""不计算期权市值盈亏"""
THOST_FTDC_BLAG_IncludeOptValLost = 50
"""计算期权市值亏损"""
class TThostFtdcActionTypeType(Enum):
"""执行类型类型"""
THOST_FTDC_ACTP_Exec = 49
"""执行"""
THOST_FTDC_ACTP_Abandon = 50
"""放弃"""
class TThostFtdcForQuoteStatusType(Enum):
"""询价状态类型"""
THOST_FTDC_FQST_Submitted = 97
"""已经提交"""
THOST_FTDC_FQST_Accepted = 98
"""已经接受"""
THOST_FTDC_FQST_Rejected = 99
"""已经被拒绝"""
class TThostFtdcValueMethodType(Enum):
"""取值方式类型"""
THOST_FTDC_VM_Absolute = 48
"""按绝对值"""
THOST_FTDC_VM_Ratio = 49
"""按比率"""
class TThostFtdcExecOrderPositionFlagType(Enum):
"""期权行权后是否保留期货头寸的标记类型"""
THOST_FTDC_EOPF_Reserve = 48
"""保留"""
THOST_FTDC_EOPF_UnReserve = 49
"""不保留"""
class TThostFtdcExecOrderCloseFlagType(Enum):
"""期权行权后生成的头寸是否自动平仓类型"""
THOST_FTDC_EOCF_AutoClose = 48
"""自动平仓"""
THOST_FTDC_EOCF_NotToClose = 49
"""免于自动平仓"""
class TThostFtdcProductTypeType(Enum):
"""产品类型类型"""
THOST_FTDC_PTE_Futures = 49
"""期货"""
THOST_FTDC_PTE_Options = 50
"""期权"""
class TThostFtdcCZCEUploadFileNameType(Enum):
"""郑商所结算文件名类型"""
THOST_FTDC_CUFN_CUFN_O = 79
"""^\d{8}_zz_\d{4}"""
THOST_FTDC_CUFN_CUFN_T = 84
"""^\d{8}成交表"""
THOST_FTDC_CUFN_CUFN_P = 80
"""^\d{8}单腿持仓表new"""
THOST_FTDC_CUFN_CUFN_N = 78
"""^\d{8}非平仓了结表"""
THOST_FTDC_CUFN_CUFN_L = 76
"""^\d{8}平仓表"""
THOST_FTDC_CUFN_CUFN_F = 70
"""^\d{8}资金表"""
THOST_FTDC_CUFN_CUFN_C = 67
"""^\d{8}组合持仓表"""
THOST_FTDC_CUFN_CUFN_M = 77
"""^\d{8}保证金参数表"""
class TThostFtdcDCEUploadFileNameType(Enum):
"""大商所结算文件名类型"""
THOST_FTDC_DUFN_DUFN_O = 79
"""^\d{8}_dl_\d{3}"""
THOST_FTDC_DUFN_DUFN_T = 84
"""^\d{8}_成交表"""
THOST_FTDC_DUFN_DUFN_P = 80
"""^\d{8}_持仓表"""
THOST_FTDC_DUFN_DUFN_F = 70
"""^\d{8}_资金结算表"""
THOST_FTDC_DUFN_DUFN_C = 67
"""^\d{8}_优惠组合持仓明细表"""
THOST_FTDC_DUFN_DUFN_D = 68
"""^\d{8}_持仓明细表"""
THOST_FTDC_DUFN_DUFN_M = 77
"""^\d{8}_保证金参数表"""
THOST_FTDC_DUFN_DUFN_S = 83
"""^\d{8}_期权执行表"""
class TThostFtdcSHFEUploadFileNameType(Enum):
"""上期所结算文件名类型"""
THOST_FTDC_SUFN_SUFN_O = 79
"""^\d{4}_\d{8}_\d{8}_DailyFundChg"""
THOST_FTDC_SUFN_SUFN_T = 84
"""^\d{4}_\d{8}_\d{8}_Trade"""
THOST_FTDC_SUFN_SUFN_P = 80
"""^\d{4}_\d{8}_\d{8}_SettlementDetail"""
THOST_FTDC_SUFN_SUFN_F = 70
"""^\d{4}_\d{8}_\d{8}_Capital"""
class TThostFtdcCFFEXUploadFileNameType(Enum):
"""中金所结算文件名类型"""
THOST_FTDC_CFUFN_SUFN_T = 84
"""^\d{4}_SG\d{1}_\d{8}_\d{1}_Trade"""
THOST_FTDC_CFUFN_SUFN_P = 80
"""^\d{4}_SG\d{1}_\d{8}_\d{1}_SettlementDetail"""
THOST_FTDC_CFUFN_SUFN_F = 70
"""^\d{4}_SG\d{1}_\d{8}_\d{1}_Capital"""
THOST_FTDC_CFUFN_SUFN_S = 83
"""^\d{4}_SG\d{1}_\d{8}_\d{1}_OptionExec"""
class TThostFtdcCombDirectionType(Enum):
"""组合指令方向类型"""
THOST_FTDC_CMDR_Comb = 48
"""申请组合"""
THOST_FTDC_CMDR_UnComb = 49
"""申请拆分"""
class TThostFtdcStrikeOffsetTypeType(Enum):
"""行权偏移类型类型"""
THOST_FTDC_STOV_RealValue = 49
"""实值额"""
THOST_FTDC_STOV_ProfitValue = 50
"""盈利额"""
THOST_FTDC_STOV_RealRatio = 51
"""实值比例"""
THOST_FTDC_STOV_ProfitRatio = 52
"""盈利比例"""
class TThostFtdcReserveOpenAccStasType(Enum):
"""预约开户状态类型"""
THOST_FTDC_ROAST_Processing = 48
"""等待处理中"""
THOST_FTDC_ROAST_Cancelled = 49
"""已撤销"""
THOST_FTDC_ROAST_Opened = 50
"""已开户"""
THOST_FTDC_ROAST_Invalid = 51
"""无效请求"""
class TThostFtdcNewsUrgencyType(Enum):
"""紧急程度类型"""
class TThostFtdcWeakPasswordSourceType(Enum):
"""弱密码来源类型"""
THOST_FTDC_WPSR_Lib = 49
"""弱密码库"""
THOST_FTDC_WPSR_Manual = 50
"""手工录入"""
class TThostFtdcOptSelfCloseFlagType(Enum):
"""期权行权的头寸是否自对冲类型"""
THOST_FTDC_OSCF_CloseSelfOptionPosition = 49
"""自对冲期权仓位"""
THOST_FTDC_OSCF_ReserveOptionPosition = 50
"""保留期权仓位"""
THOST_FTDC_OSCF_SellCloseSelfFuturePosition = 51
"""自对冲卖方履约后的期货仓位"""
THOST_FTDC_OSCF_ReserveFuturePosition = 52
"""保留卖方履约后的期货仓位"""
class TThostFtdcBizTypeType(Enum):
"""业务类型类型"""
THOST_FTDC_BZTP_Future = 49
"""期货"""
THOST_FTDC_BZTP_Stock = 50
"""证券"""
class TThostFtdcAppTypeType(Enum):
"""用户App类型类型"""
THOST_FTDC_APP_TYPE_Investor = 49
"""直连的投资者"""
THOST_FTDC_APP_TYPE_InvestorRelay = 50
"""为每个投资者都创建连接的中继"""
THOST_FTDC_APP_TYPE_OperatorRelay = 51
"""所有投资者共享一个操作员连接的中继"""
THOST_FTDC_APP_TYPE_UnKnown = 52
"""未知"""
class TThostFtdcResponseValueType(Enum):
"""应答类型类型"""
THOST_FTDC_RV_Right = 48
"""检查成功"""
THOST_FTDC_RV_Refuse = 49
"""检查失败"""
class TThostFtdcOTCTradeTypeType(Enum):
"""OTC成交类型类型"""
THOST_FTDC_OTC_TRDT_Block = 48
"""大宗交易"""
THOST_FTDC_OTC_TRDT_EFP = 49
"""期转现"""
class TThostFtdcMatchTypeType(Enum):
"""期现风险匹配方式类型"""
THOST_FTDC_OTC_MT_DV01 = 49
"""基点价值"""
THOST_FTDC_OTC_MT_ParValue = 50
"""面值"""
| StarcoderdataPython |
9652490 | """
Interface module to the business logic
"""
from app.logic.model_manager import ModelLoader, ModelsIndex
import numpy as np
# Initialize db for docker.csv
db = ModelsIndex('docker.csv', path_to_cache=0)
# Init db for local
#db = ModelsIndex('fixture.csv', path_to_cache=1)
class OperationController:
def __init__(self):
self.geo_models_dict = dict()
def parse(self, **kwargs):
operation_name = kwargs.get('op_name')
if operation_name is None:
raise KeyError('Operations needs a name.')
if operation_name == 'edit':
# self.editGemPy(**kwargs['parameters'])
pass
elif operation_name == 'load':
# self.loadGemPy(**kwargs['parameters'])
pass
@staticmethod
def _add_model_state(modelUrn):
"""Add counter to the model.
Notes:
At the moment 23.07.2020 is not persistent
"""
db.df.loc[modelUrn, "m_state"] += 1
| StarcoderdataPython |
5005685 | import requests
from functools import partial
from datetime import datetime, timedelta
import calendar
import math
import nanome
import utils
from nanome.util import Logs
class Calendar:
def __init__(self, _plugin, container):
self._plugin = _plugin
menu = nanome.ui.Menu.io.from_json('components/json/calendar.json')
self._menu = menu
calendar = menu.root.find_node('Container')
self._datetime = datetime.now()
self._min_datetime = datetime.fromtimestamp(0)
self._max_datetime = datetime.fromtimestamp(1e10) # lell
self._input_month_year = calendar.find_node('Month Year').get_content()
self._input_month_year.register_submitted_callback(self.set_date_from_text)
self._button_inc_month = calendar.find_node('Inc Month').get_content()
self._button_inc_month.register_pressed_callback(partial(self.change_month, 1))
self._button_dec_month = calendar.find_node('Dec Month').get_content()
self._button_dec_month.register_pressed_callback(partial(self.change_month, -1))
self._input_hour = calendar.find_node('Hour Input').get_content()
self._input_hour.register_submitted_callback(partial(self.set_time, self._datetime, True))
self._input_min = calendar.find_node('Minute Input').get_content()
self._input_min.register_submitted_callback(partial(self.set_time, self._datetime, False))
self._button_inc_hour = calendar.find_node('Inc Hour').get_content()
self._button_inc_hour.register_pressed_callback(partial(self.change_hour, 1))
self._button_dec_hour = calendar.find_node('Dec Hour').get_content()
self._button_dec_hour.register_pressed_callback(partial(self.change_hour, -1))
self._button_inc_min = calendar.find_node('Inc Min').get_content()
self._button_inc_min.register_pressed_callback(partial(self.change_min, 1))
self._button_dec_min = calendar.find_node('Dec Min').get_content()
self._button_dec_min.register_pressed_callback(partial(self.change_min, -1))
self._button_AM_PM = calendar.find_node('AMPM Button').get_content()
self._button_AM_PM.register_pressed_callback(partial(self.toggle_am_pm))
self._label_AM_PM = calendar.find_node('AMPM Label').get_content()
self._calendar_day_buttons = []
for i in range(1, 43):
self._calendar_day_buttons.append(calendar.find_node('Day %d' % i))
self._readonly = False
container.add_child(calendar)
self.populate_buttons()
def populate_buttons(self):
dt = self._datetime
cal_btns = self._calendar_day_buttons
first_day, num_days = calendar.monthrange(dt.year, dt.month)
first_day = (first_day + 1) % 7
for i in range(0, 42):
btn = cal_btns[i].get_content()
if i < first_day or i >= first_day + num_days:
btn.unusable = True
btn.set_all_text('')
else:
day = 1 + i - first_day
btn.set_all_text(str(day))
btn.selected = day == dt.day
date = datetime(dt.year, dt.month, day, 0, 0)
btn.register_pressed_callback(partial(self.set_date, date))
min_date, max_date = self._min_datetime.date(), self._max_datetime.date()
invalid_date = date.date() < min_date or date.date() > max_date
btn.unusable = not btn.selected and (self._readonly or invalid_date)
def _datetime_callback(self, dt):
pass
def register_changed_callback(self, func):
self._datetime_callback = func
def set_readonly(self, readonly):
self._readonly = readonly
self._button_inc_month.unusable = readonly
self._button_dec_month.unusable = readonly
self._button_inc_hour.unusable = readonly
self._button_dec_hour.unusable = readonly
self._button_inc_min.unusable = readonly
self._button_dec_min.unusable = readonly
self._button_AM_PM.unusable = readonly
self.populate_buttons()
def set_datetime(self, dt):
self._datetime = dt
self.update_datetime(True)
def set_min_datetime(self, dt):
self._min_datetime = dt
self.update_datetime(False)
def set_max_datetime(self, dt):
self._max_datetime = dt
self.update_datetime(False)
def set_date(self, dt, button):
time = self._datetime.time()
self._datetime = datetime.combine(dt.date(), time)
self._input_month_year.input_text = self._datetime.strftime('%B %Y')
self.update_datetime(True)
def set_date_from_text(self, button):
update = True
txt = self._input_month_year.input_text
try:
date = datetime.strptime(txt, '%B %Y').date()
sdt = self._datetime
self._datetime = datetime(date.year, date.month, date.day, sdt.hour, sdt.minute)
except Exception:
update = False
self._plugin._modal.show_error('invalid date')
if update:
self.update_datetime(True)
def set_time(self, dt, is_hour, field):
dt = self._datetime
value = int(field.input_text)
if is_hour:
value = min(11, max(0, value % 12))
# keep am pm from before
if dt.hour >= 12: value += 12
self._datetime = datetime(dt.year, dt.month, dt.day, value, dt.minute)
else:
value = min(59, max(0, value))
self._datetime = datetime(dt.year, dt.month, dt.day, dt.hour, value)
self.update_datetime(False)
def change_month(self, dir, button):
dt = self._datetime
month = ((dt.month + dir - 1) % 12) + 1 # lol
year_change = (dt.month == 1 and dir == -1) or (dt.month == 12 and dir == 1)
year_inc = dir if year_change else 0
self._datetime = datetime(dt.year + year_inc, month, dt.day, dt.hour, dt.minute)
self.update_datetime(True)
def change_hour(self, dir, button):
day_before = self._datetime.day
self._datetime += dir * timedelta(hours=1)
day_after = self._datetime.day
update_buttons = day_before != day_after
self.update_datetime(update_buttons)
def change_min(self, dir, button):
day_before = self._datetime.day
self._datetime += dir * timedelta(minutes=1)
day_after = self._datetime.day
update_buttons = day_before != day_after
self.update_datetime(update_buttons)
def toggle_am_pm(self, button):
dt = self._datetime
self._datetime = datetime(dt.year, dt.month, dt.day, (dt.hour + 12) % 24, dt.minute)
self._label_AM_PM.text_value = dt.strftime('%p')
self.update_datetime(False)
def update_datetime(self, update_buttons):
dt = min(self._max_datetime, max(self._min_datetime, self._datetime))
update_buttons |= dt != self._datetime
self._datetime = dt
self._input_month_year.input_text = dt.strftime('%B %Y')
self._input_hour.input_text = dt.strftime('%I')
self._input_min.input_text = dt.strftime('%M')
self._label_AM_PM.text_value = dt.strftime('%p')
if not self._readonly:
self._button_inc_month.unusable = dt.month == self._max_datetime.month and dt.year == self._max_datetime.year
self._button_dec_month.unusable = dt.month == self._min_datetime.month and dt.year == self._min_datetime.year
self._button_inc_hour.unusable = dt + timedelta(hours=1) > self._max_datetime
self._button_dec_hour.unusable = dt - timedelta(hours=1) < self._min_datetime
self._button_inc_min.unusable = dt + timedelta(minutes=1) > self._max_datetime
self._button_dec_min.unusable = dt - timedelta(minutes=1) < self._min_datetime
if update_buttons:
self.populate_buttons()
self._datetime_callback(dt) | StarcoderdataPython |
5033046 | # Licensed under the terms of http://www.apache.org/licenses/LICENSE-2.0
# Author (©): <NAME>
import logging
import math
from mcpi.vec3 import Vec3
import mcpi.block
from mcthings.utils import size_region, find_min_max_cuboid_vertex
class BlockMemory:
def __init__(self, block_id, block_data, pos):
self.id = block_id
self.data = block_data
self.pos = pos
class BlocksMemory:
"""
Blocks memory for a Thing
"""
def __init__(self):
self.blocks = []
self._blocks_pos = {}
def add(self, block_memory):
"""
Add a new block to the memory.
:param block_memory: memory for a block
:return:
"""
self.blocks.append(block_memory)
def find_init_end_pos(self):
""" Find the init and end cuboid positions from all the blocks in the memory """
first_pos = self.blocks[0].pos
init_pos = Vec3(first_pos.x, first_pos.y, first_pos.z)
end_pos = Vec3(first_pos.x, first_pos.y, first_pos.z)
for block in self.blocks:
pos = block.pos
if pos.x < init_pos.x:
init_pos = Vec3(pos.x, init_pos.y, init_pos.z)
if pos.y < init_pos.y:
init_pos = Vec3(init_pos.x, pos.y, init_pos.z)
if pos.z < init_pos.z:
init_pos = Vec3(init_pos.x, init_pos.y, pos.z)
if pos.x > end_pos.x:
end_pos = Vec3(pos.x, end_pos.y, end_pos.z)
if pos.y > end_pos.y:
end_pos = Vec3(end_pos.x, pos.y, end_pos.z)
if pos.z > end_pos.z:
end_pos = Vec3(end_pos.x, end_pos.y, pos.z)
return init_pos, end_pos
def is_cuboid(self):
""" Check if the memory is a filled cuboid """
cuboid = False
# Check that the number of blocks needed for the filled cuboid is the same that the blocks
init_pos, vertex_max = self.find_init_end_pos()
size = size_region(init_pos, vertex_max)
if size.x * size.y * size.z == len(self.blocks):
cuboid = True
return cuboid
def memory_equal(self):
""" Check if all the blocks in the memory are equal """
equal = True
if self.blocks:
last_block = self.blocks[0]
for block in self.blocks:
if block.id != last_block.id or block.data != last_block.data:
equal = False
break
last_block = block
else:
equal = False
return equal
def flip_x(self, position):
"""
Flip based on x-axis the blocks in memory using position as base position from which to rotate
:param position: base position from which to rotate
:return:
"""
for block in self.blocks:
# Find the x position and flip it
width = abs(block.pos.x - position.x)
# TODO: the flip could be done in two directions (left or right)
# This one the the flip to the right
x_flipped = position.x - width
block.pos.x = x_flipped
def fill(self, fill_block):
"""
Fill all blocks in memory with fill_block
:param fill_block: block to be used to fill all memory
:return:
"""
for block in self.blocks:
block.id = fill_block.id
block.data = fill_block.data
def rotate(self, degrees, position):
"""
Rotate degrees the blocks in memory using position as base position from which to rotate
:param degrees: degrees to rotate (90, 180, 270)
:param position: base position from which to rotate
:return:
"""
valid_degrees = [90, 180, 270]
if degrees not in [90, 180, 270]:
raise RuntimeError("Invalid degrees: %s (valid: %s) " % (degrees, valid_degrees))
cos_degrees = math.cos(math.radians(degrees))
sin_degrees = math.sin(math.radians(degrees))
def rotate_x(pos_x, pos_z):
return pos_x * cos_degrees - pos_z * sin_degrees
def rotate_z(pos_x, pos_z):
return pos_z * cos_degrees + pos_x * sin_degrees
# Base position for the rotation
init_pos = position
rotated_blocks = []
# Rotate all blocks with respect the initial position and add them
for block in self.blocks:
b = block.id
d = block.data
x = block.pos.x - init_pos.x
z = block.pos.z - init_pos.z
rotated_x = round(init_pos.x + rotate_x(x, z))
rotated_z = round(init_pos.z + rotate_z(x, z))
rotated_blocks.append(BlockMemory(b, d, Vec3(rotated_x, block.pos.y, rotated_z)))
# Replace all blocks in memory with the rotated ones
self.blocks = []
for rotated_block in rotated_blocks:
self.set_block(rotated_block.pos, rotated_block.id, rotated_block.data)
def set_block(self, pos, block_id, block_data=None):
self.add(BlockMemory(block_id, block_data, pos))
def set_blocks(self, vertex, vertex_opposite, block_id):
""" Add a cuboid with the same block for all blocks and without specific data """
block_data = None
width = abs(vertex_opposite.x - vertex.x) + 1
height = abs(vertex_opposite.y - vertex.y) + 1
length = abs(vertex_opposite.z - vertex.z) + 1
vertex_min, vertex_max = find_min_max_cuboid_vertex(vertex, vertex_opposite)
for y in range(0, height):
for z in range(0, length):
for x in range(0, width):
block_pos = Vec3(vertex_min.x + x, vertex_min.y + y, vertex_min.z + z)
self.set_block(block_pos, block_id, block_data)
def _create_blocks_pos(self):
logging.info("Creating the memory cache with positions")
for block in self.blocks:
self._blocks_pos[str(block.pos)] = block
logging.info("Done memory cache with positions")
def find_block_at_pos(self, pos):
"""
Find a block in memory give its position
TODO: Improve performance
:param pos: position for the block
:return: the block found or None
"""
if not self._blocks_pos:
self._create_blocks_pos()
block_found = None
if str(pos) in self._blocks_pos:
block_found = self._blocks_pos[str(pos)]
return block_found
def to_nbt(self, init_pos, end_pos):
"""
Convert the blocks of memory to NBT format for exporting as Schematic
The NBT must be a complete cuboid with air in the positions where
there are no data in blocks memory.
:return: bytearrays for blocks ids and block data
"""
size = size_region(init_pos, end_pos)
blocks_bytes = bytearray()
data_bytes = bytearray()
# Use the same loop than reading Schematic format: x -> z -> y
for y in range(0, size.y):
for z in range(0, size.z):
for x in range(0, size.x):
block_data = 0
block_id = mcpi.block.AIR.id
block_pos = Vec3(init_pos.x + x, init_pos.y + y, init_pos.z + z)
# Find if there is a block at block_pos
mem_block = self.find_block_at_pos(block_pos)
if mem_block:
block_id = mem_block.id
block_data = mem_block.data
blocks_bytes.append(block_id)
data_bytes.append(block_data)
return blocks_bytes, data_bytes
def build_schematic(self):
init_pos, end_pos = self.find_init_end_pos()
return self.to_nbt(init_pos, end_pos, self)
def to_schematic(self, file_path):
"""
Convert the blocks memory to a Schematic Object
:file_path: file in which to export the memory in Schematic format
:return: the Schematic object
"""
self.build_schematic().write_file(file_path)
| StarcoderdataPython |
71099 | <reponame>alexmlamb/blocks_rl_gru_setup<gh_stars>0
from .device import Device
from .log import Logger
from .sample import sample_indices
| StarcoderdataPython |
3417166 | <reponame>asterfusion/sonic-swss-1<filename>neighsyncd/restore_neighbors.py
#!/usr/bin/env python
""""
Description: restore_neighbors.py -- restoring neighbor table into kernel during system warm reboot.
The script is started by supervisord in swss docker when the docker is started.
It does not do anything in case neither system nor swss warm restart is enabled.
In case swss warm restart enabled only, it sets the stateDB flag so neighsyncd can continue
the reconciation process.
In case system warm reboot is enabled, it will try to restore the neighbor table into kernel
through netlink API calls and update the neighbor table in kernel by sending arp/ns requests
to all neighbor entries, then it sets the stateDB flag for neighsyncd to continue the
reconciliation process.
"""
import sys
import swsssdk
import netifaces
import time
import monotonic
from pyroute2 import IPRoute, NetlinkError
from pyroute2.netlink.rtnl import ndmsg
from socket import AF_INET,AF_INET6
import logging
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
from scapy.all import conf, in6_getnsma, inet_pton, inet_ntop, in6_getnsmac, get_if_hwaddr, Ether, ARP, IPv6, ICMPv6ND_NS, ICMPv6NDOptSrcLLAddr
from swsscommon import swsscommon
import errno
logger = logging.getLogger(__name__)
logger.setLevel(logging.WARNING)
logger.addHandler(logging.NullHandler())
# timeout the restore process in 110 seconds if not finished
# This is mostly to wait for interfaces to be created and up after system warm-reboot
# and this process is started by supervisord in swss docker.
# There had been devices taking close to 70 seconds to complete restoration, setting
# default timeout to 110 seconds.
DEF_TIME_OUT = 110
# every 5 seconds to check interfaces states
CHECK_INTERVAL = 5
ip_family = {"IPv4": AF_INET, "IPv6": AF_INET6}
# return the first ipv4/ipv6 address assigned on intf
def first_ip_on_intf(intf, family):
if intf in netifaces.interfaces():
ipaddresses = netifaces.ifaddresses(intf)
if ip_family[family] in ipaddresses:
# cover link local address as well
return ipaddresses[ip_family[family]][0]['addr'].split("%")[0]
return None
# check if the intf is operational up
def is_intf_oper_state_up(intf):
oper_file = '/sys/class/net/{0}/carrier'
try:
state_file = open(oper_file.format(intf), 'r')
state = state_file.readline().rstrip()
except Exception as e:
logger.info('Error: {}'.format(str(e)))
return False
if state == '1':
return True
return False
# read the neigh table from AppDB to memory, format as below
# build map as below, this can efficiently access intf and family groups later
# { intf1 -> { { family1 -> [[ip1, mac1], [ip2, mac2] ...] }
# { family2 -> [[ipM, macM], [ipN, macN] ...] } },
# ...
# intfA -> { { family1 -> [[ipW, macW], [ipX, macX] ...] }
# { family2 -> [[ipY, macY], [ipZ, macZ] ...] } }
# }
#
# Alternatively:
# 1, we can build:
# { intf1 -> [[family1, ip1, mac1], [family2, ip2, mac2] ...]},
# ...
# { intfA -> [[family1, ipX, macX], [family2, ipY, macY] ...]}
#
# 2, Or simply build two maps based on families
# These alternative solutions would have worse performance because:
# 1, need iterate the whole list if only one family is up.
# 2, need check interface state twice due to the split map
def read_neigh_table_to_maps():
db = swsssdk.SonicV2Connector(host='127.0.0.1')
db.connect(db.APPL_DB, False)
intf_neigh_map = {}
# Key format: "NEIGH_TABLE:intf-name:ipv4/ipv6", examples below:
# "NEIGH_TABLE:Ethernet122:192.168.3.11"
# "NEIGH_TABLE:Ethernet122:fe80::2e0:ecff:fe3b:d6ac"
# Value format:
# 1) "neigh"
# 2) "00:22:33:44:55:cc"
# 3) "family"
# 4) "IPv4" or "IPv6"
keys = db.keys(db.APPL_DB, 'NEIGH_TABLE:*')
keys = [] if keys is None else keys
for key in keys:
key_split = key.split(':', 2)
intf_name = key_split[1]
if intf_name == 'lo':
continue
dst_ip = key_split[2]
value = db.get_all(db.APPL_DB, key)
if 'neigh' in value and 'family' in value:
dmac = value['neigh']
family = value['family']
else:
raise RuntimeError('Neigh table format is incorrect')
if family not in ip_family:
raise RuntimeError('Neigh table format is incorrect')
# build map like this:
# { intf1 -> { { family1 -> [[ip1, mac1], [ip2, mac2] ...] }
# { family2 -> [[ipM, macM], [ipN, macN] ...] } },
# intfX -> {...}
# }
ip_mac_pair = []
ip_mac_pair.append(dst_ip)
ip_mac_pair.append(dmac)
intf_neigh_map.setdefault(intf_name, {}).setdefault(family, []).append(ip_mac_pair)
db.close(db.APPL_DB)
return intf_neigh_map
# Use netlink to set neigh table into kernel, not overwrite the existing ones
def set_neigh_in_kernel(ipclass, family, intf_idx, dst_ip, dmac):
logging.info('Add neighbor entries: family: {}, intf_idx: {}, ip: {}, mac: {}'.format(
family, intf_idx, dst_ip, dmac))
if family not in ip_family:
return
family_af_inet = ip_family[family]
# Add neighbor to kernel with "stale" state, we will send arp/ns packet later
# so if the neighbor is active, it will become "reachable", otherwise, it will
# stay at "stale" state and get aged out by kernel.
try :
ipclass.neigh('add',
family=family_af_inet,
dst=dst_ip,
lladdr=dmac,
ifindex=intf_idx,
state=ndmsg.states['stale'])
# If neigh exists, log it but no exception raise, other exceptions, raise
except NetlinkError as e:
if e[0] == errno.EEXIST:
logger.warning('Neigh exists in kernel with family: {}, intf_idx: {}, ip: {}, mac: {}'.format(
family, intf_idx, dst_ip, dmac))
else:
raise
# build ARP or NS packets depending on family
def build_arp_ns_pkt(family, smac, src_ip, dst_ip):
if family == 'IPv4':
eth = Ether(src=smac, dst='ff:ff:ff:ff:ff:ff')
pkt = eth/ARP(op=ARP.who_has, pdst=dst_ip)
elif family == 'IPv6':
nsma = in6_getnsma(inet_pton(AF_INET6, dst_ip))
mcast_dst_ip = inet_ntop(AF_INET6, nsma)
dmac = in6_getnsmac(nsma)
eth = Ether(src=smac,dst=dmac)
ipv6 = IPv6(src=src_ip, dst=mcast_dst_ip)
ns = ICMPv6ND_NS(tgt=dst_ip)
ns_opt = ICMPv6NDOptSrcLLAddr(lladdr=smac)
pkt = eth/ipv6/ns/ns_opt
return pkt
# Set the statedb "NEIGH_RESTORE_TABLE|Flags", so neighsyncd can start reconciliation
def set_statedb_neigh_restore_done():
db = swsssdk.SonicV2Connector(host='127.0.0.1')
db.connect(db.STATE_DB, False)
db.set(db.STATE_DB, 'NEIGH_RESTORE_TABLE|Flags', 'restored', 'true')
db.close(db.STATE_DB)
return
# This function is to restore the kernel neighbors based on the saved neighbor map
# It iterates through the map, and work on interface by interface basis.
# If the interface is operational up and has IP configured per IP family,
# it will restore the neighbors per family.
# The restoring process is done by setting the neighbors in kernel from saved entries
# first, then sending arp/nd packets to update the neighbors.
# Once all the entries are restored, this function is returned.
# The interfaces' states were checked in a loop with an interval (CHECK_INTERVAL)
# The function will timeout in case interfaces' states never meet the condition
# after some time (DEF_TIME_OUT).
def restore_update_kernel_neighbors(intf_neigh_map, timeout=DEF_TIME_OUT):
# create object for netlink calls to kernel
ipclass = IPRoute()
mtime = monotonic.time.time
start_time = mtime()
while (mtime() - start_time) < timeout:
for intf, family_neigh_map in intf_neigh_map.items():
# only try to restore to kernel when link is up
if is_intf_oper_state_up(intf):
src_mac = get_if_hwaddr(intf)
intf_idx = ipclass.link_lookup(ifname=intf)[0]
# create socket per intf to send packets
s = conf.L2socket(iface=intf)
# Only two families: 'IPv4' and 'IPv6'
for family in ip_family.keys():
# if ip address assigned and if we have neighs in this family, restore them
src_ip = first_ip_on_intf(intf, family)
if src_ip and (family in family_neigh_map):
neigh_list = family_neigh_map[family]
for dst_ip, dmac in neigh_list:
# use netlink to set neighbor entries
set_neigh_in_kernel(ipclass, family, intf_idx, dst_ip, dmac)
# sending arp/ns packet to update kernel neigh info
s.send(build_arp_ns_pkt(family, src_mac, src_ip, dst_ip))
# delete this family on the intf
del intf_neigh_map[intf][family]
# close the pkt socket
s.close()
# if all families are deleted, remove the key
if len(intf_neigh_map[intf]) == 0:
del intf_neigh_map[intf]
# map is empty, all neigh entries are restored
if not intf_neigh_map:
break
time.sleep(CHECK_INTERVAL)
def main():
print "restore_neighbors service is started"
# Use warmstart python binding to check warmstart information
warmstart = swsscommon.WarmStart()
warmstart.initialize("neighsyncd", "swss")
warmstart.checkWarmStart("neighsyncd", "swss", False)
# if swss or system warm reboot not enabled, don't run
if not warmstart.isWarmStart():
print "restore_neighbors service is skipped as warm restart not enabled"
return
# swss restart not system warm reboot, set statedb directly
if not warmstart.isSystemWarmRebootEnabled():
set_statedb_neigh_restore_done()
print "restore_neighbors service is done as system warm reboot not enabled"
return
# read the neigh table from appDB to internal map
try:
intf_neigh_map = read_neigh_table_to_maps()
except RuntimeError as e:
logger.exception(str(e))
sys.exit(1)
try:
restore_update_kernel_neighbors(intf_neigh_map)
except Exception as e:
logger.exception(str(e))
sys.exit(1)
# set statedb to signal other processes like neighsyncd
set_statedb_neigh_restore_done()
print "restore_neighbor service is done for system warmreboot"
return
if __name__ == '__main__':
main()
| StarcoderdataPython |
1622751 | def selection_sort(input_list):
if not isinstance(input_list, list):
raise TypeError('input needs to be a list')
def find_lowest_value_index(start_index):
lowest_value_index = start_index
for i, x in enumerate(input_list[start_index:]):
if x < input_list[lowest_value_index]:
lowest_value_index = start_index + i
return lowest_value_index
def swap_indices_in_place(i, j):
temp = input_list[i]
input_list[i] = input_list[j]
input_list[j] = temp
for k, x in enumerate(input_list):
lowest_value_index_in_input = find_lowest_value_index(k)
if input_list[lowest_value_index_in_input] < x:
swap_indices_in_place(lowest_value_index_in_input, k)
return input_list
| StarcoderdataPython |
4881952 | <gh_stars>10-100
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetBackupPlanResult',
'AwaitableGetBackupPlanResult',
'get_backup_plan',
'get_backup_plan_output',
]
@pulumi.output_type
class GetBackupPlanResult:
def __init__(__self__, backup_plan=None, backup_plan_arn=None, backup_plan_id=None, backup_plan_tags=None, version_id=None):
if backup_plan and not isinstance(backup_plan, dict):
raise TypeError("Expected argument 'backup_plan' to be a dict")
pulumi.set(__self__, "backup_plan", backup_plan)
if backup_plan_arn and not isinstance(backup_plan_arn, str):
raise TypeError("Expected argument 'backup_plan_arn' to be a str")
pulumi.set(__self__, "backup_plan_arn", backup_plan_arn)
if backup_plan_id and not isinstance(backup_plan_id, str):
raise TypeError("Expected argument 'backup_plan_id' to be a str")
pulumi.set(__self__, "backup_plan_id", backup_plan_id)
if backup_plan_tags and not isinstance(backup_plan_tags, dict):
raise TypeError("Expected argument 'backup_plan_tags' to be a dict")
pulumi.set(__self__, "backup_plan_tags", backup_plan_tags)
if version_id and not isinstance(version_id, str):
raise TypeError("Expected argument 'version_id' to be a str")
pulumi.set(__self__, "version_id", version_id)
@property
@pulumi.getter(name="backupPlan")
def backup_plan(self) -> Optional['outputs.BackupPlanResourceType']:
return pulumi.get(self, "backup_plan")
@property
@pulumi.getter(name="backupPlanArn")
def backup_plan_arn(self) -> Optional[str]:
return pulumi.get(self, "backup_plan_arn")
@property
@pulumi.getter(name="backupPlanId")
def backup_plan_id(self) -> Optional[str]:
return pulumi.get(self, "backup_plan_id")
@property
@pulumi.getter(name="backupPlanTags")
def backup_plan_tags(self) -> Optional[Any]:
return pulumi.get(self, "backup_plan_tags")
@property
@pulumi.getter(name="versionId")
def version_id(self) -> Optional[str]:
return pulumi.get(self, "version_id")
class AwaitableGetBackupPlanResult(GetBackupPlanResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetBackupPlanResult(
backup_plan=self.backup_plan,
backup_plan_arn=self.backup_plan_arn,
backup_plan_id=self.backup_plan_id,
backup_plan_tags=self.backup_plan_tags,
version_id=self.version_id)
def get_backup_plan(backup_plan_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetBackupPlanResult:
"""
Resource Type definition for AWS::Backup::BackupPlan
"""
__args__ = dict()
__args__['backupPlanId'] = backup_plan_id
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('aws-native:backup:getBackupPlan', __args__, opts=opts, typ=GetBackupPlanResult).value
return AwaitableGetBackupPlanResult(
backup_plan=__ret__.backup_plan,
backup_plan_arn=__ret__.backup_plan_arn,
backup_plan_id=__ret__.backup_plan_id,
backup_plan_tags=__ret__.backup_plan_tags,
version_id=__ret__.version_id)
@_utilities.lift_output_func(get_backup_plan)
def get_backup_plan_output(backup_plan_id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetBackupPlanResult]:
"""
Resource Type definition for AWS::Backup::BackupPlan
"""
...
| StarcoderdataPython |
1632018 | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import os
import json
import paddle
from paddle.distributed.auto_parallel.cluster import Cluster
from paddle.distributed.auto_parallel.cost import CommContext
from paddle.distributed.auto_parallel.cost import build_comm_desc
from paddle.distributed.auto_parallel.cost import AllreduceSumOpCost
from paddle.distributed.auto_parallel.cost import AllgatherOpCost
from paddle.distributed.auto_parallel.cost import BroadcastOpCost
from paddle.distributed.auto_parallel.cost import SendOpCost
from paddle.distributed.auto_parallel.cost import RecvOpCost
from paddle.distributed.auto_parallel.cost import IdentityOpCost
from test_cluster import cluster_json, multi_cluster_json
class TestCommOpCost(unittest.TestCase):
def test_comm_cost(self):
# Build cluster
file_dir = os.path.dirname(os.path.abspath(__file__))
cluster_json_path = os.path.join(file_dir, "auto_parallel_cluster.json")
cluster_json_object = json.loads(cluster_json)
with open(cluster_json_path, "w") as cluster_json_file:
json.dump(cluster_json_object, cluster_json_file)
cluster = Cluster()
cluster.build_from_file(cluster_json_path)
# Build CommConetxt
CommContext._has_instance = None
CommContext._instance = None
comm_context = CommContext(cluster)
# Check AllreduceSumCost 128MB ring cost
allreduce_sum_op_desc = build_comm_desc(
"c_allreduce_sum", [0, 1, 2, 3, 4, 5, 6, 7], paddle.float32,
[1, 32 * (10**6)])
allreduce_sum_op_cost = AllreduceSumOpCost(
op_desc=allreduce_sum_op_desc, comm_context=comm_context)
# Check AllgatherOpCost cost
allgather_op_desc = build_comm_desc("c_allgather",
[0, 1, 2, 3, 4, 5, 6, 7],
paddle.float32, [1, 32 * (10**6)])
allgather_op_cost = AllgatherOpCost(
op_desc=allgather_op_desc, comm_context=comm_context)
self.assertTrue(allgather_op_cost.time > 0)
# Check BroadcastOpCost cost
broadcast_op_desc = build_comm_desc("c_broadcast",
[0, 1, 2, 3, 4, 5, 6, 7],
paddle.float32, [1, 32 * (10**6)])
broadcast_op_cost = BroadcastOpCost(
op_desc=broadcast_op_desc, comm_context=comm_context)
self.assertTrue(broadcast_op_cost.time > 0)
# Check SendOpCost cost
send_op_desc = build_comm_desc("send_v2", [0, 1], paddle.float32,
[1, 32 * (10**6)])
send_op_cost = SendOpCost(
op_desc=send_op_desc, comm_context=comm_context)
self.assertTrue(send_op_cost.time > 0)
# Check RecvOpCost cost
recv_op_desc = build_comm_desc("recv_v2", [0, 1], paddle.float32,
[1, 32 * (10**6)])
recv_op_cost = RecvOpCost(
op_desc=recv_op_desc, comm_context=comm_context)
self.assertTrue(recv_op_cost.time > 0)
# Check IdentityOpCost cost
identity_op_desc = build_comm_desc("c_identity", [0, 1], paddle.float32,
[1, 32 * (10**6)])
identity_op_cost = IdentityOpCost(
op_desc=identity_op_desc, comm_context=comm_context)
self.assertTrue(identity_op_cost.time >= 0)
# Remove unnecessary files
if os.path.exists(cluster_json_path):
os.remove(cluster_json_path)
def test_cross_machine_comm_cost(self):
# Build cluster
file_dir = os.path.dirname(os.path.abspath(__file__))
cluster_json_path = os.path.join(file_dir, "auto_parallel_cluster.json")
cluster_json_object = json.loads(multi_cluster_json)
with open(cluster_json_path, "w") as cluster_json_file:
json.dump(cluster_json_object, cluster_json_file)
cluster = Cluster()
cluster.build_from_file(cluster_json_path)
# Build CommConetxt
CommContext._has_instance = None
CommContext._instance = None
comm_context = CommContext(cluster)
# Check AllreduceSumCost 128MB ring cost
allreduce_sum_op_desc = build_comm_desc(
"c_allreduce_sum",
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
paddle.float32, [1, 32 * (10**6)])
allreduce_sum_op_cost = AllreduceSumOpCost(
op_desc=allreduce_sum_op_desc, comm_context=comm_context)
# Check AllgatherOpCost cost
allgather_op_desc = build_comm_desc(
"c_allgather",
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
paddle.float32, [1, 32 * (10**6)])
allgather_op_cost = AllgatherOpCost(
op_desc=allgather_op_desc, comm_context=comm_context)
self.assertTrue(allgather_op_cost.time > 0)
# Check BroadcastOpCost cost
broadcast_op_desc = build_comm_desc(
"c_broadcast",
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
paddle.float32, [1, 32 * (10**6)])
broadcast_op_cost = BroadcastOpCost(
op_desc=broadcast_op_desc, comm_context=comm_context)
self.assertTrue(broadcast_op_cost.time > 0)
# Check SendOpCost cost
send_op_desc = build_comm_desc("send_v2", [0, 1], paddle.float32,
[1, 32 * (10**6)])
send_op_cost = SendOpCost(
op_desc=send_op_desc, comm_context=comm_context)
self.assertTrue(send_op_cost.time > 0)
# Check RecvOpCost cost
recv_op_desc = build_comm_desc("recv_v2", [0, 1], paddle.float32,
[1, 32 * (10**6)])
recv_op_cost = RecvOpCost(
op_desc=recv_op_desc, comm_context=comm_context)
self.assertTrue(recv_op_cost.time > 0)
# Remove unnecessary files
if os.path.exists(cluster_json_path):
os.remove(cluster_json_path)
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
5079236 | # coding: utf-8
# Copyright (c) <NAME>. MIT license.
import os
import sys
import json
import logging
import tornado
import tornado.web
import tornado.ioloop
import tornado.options
import tornado.httpserver
from tornado.options import define, options
from centrifuge.log import logger
from centrifuge.utils import namedAny
define(
"debug", default=False, help="tornado debug mode", type=bool
)
define(
"port", default=8000, help="app port", type=int
)
define(
"address", default="", help="address to bind to", type=str
)
define(
"config", default='config.json', help="JSON config file", type=str
)
define(
"name", default='', help="unique node name", type=str
)
define(
"web", default='', help="path to web app directory", type=str
)
engine = os.environ.get('CENTRIFUGE_ENGINE')
if not engine or engine == 'memory':
engine_class_path = 'centrifuge.engine.memory.Engine'
elif engine == "redis":
engine_class_path = 'centrifuge.engine.redis.Engine'
else:
engine_class_path = engine
engine_class = namedAny(engine_class_path)
tornado.options.parse_command_line()
def setup_logging_level(level):
"""
Set logging level for Centrifuge logger according to command-line option provided
"""
if level == 'none':
return
logger.setLevel(getattr(logging, level.upper()))
setup_logging_level(options.logging)
from sockjs.tornado import SockJSRouter
from centrifuge.core import Application
from centrifuge.handlers import ApiHandler
from centrifuge.handlers import SockjsConnection
from centrifuge.handlers import Client
from centrifuge.web.handlers import InfoHandler
from centrifuge.web.handlers import AuthHandler
from centrifuge.web.handlers import AdminWebSocketHandler
from centrifuge.web.handlers import ActionHandler
def stop_running(msg):
"""
Called only during initialization when critical error occurred.
"""
logger.error(msg)
sys.exit(1)
def create_application_handlers(sockjs_settings):
handlers = [
tornado.web.url(r'/api/([^/]+)/?$', ApiHandler, name="api"),
tornado.web.url(r'/info/$', InfoHandler, name="info"),
tornado.web.url(r'/action/$', ActionHandler, name="action"),
tornado.web.url(r'/auth/$', AuthHandler, name="auth"),
(r'/socket', AdminWebSocketHandler),
]
if options.web:
logger.info("serving web application from {0}".format(os.path.abspath(options.web)))
handlers.append(
(
r'/(.*)',
tornado.web.StaticFileHandler,
{"path": options.web, "default_filename": "index.html"}
)
)
# create SockJS route for client connections
client_sock_router = SockJSRouter(
SockjsConnection, '/connection', user_settings=sockjs_settings
)
handlers = client_sock_router.urls + handlers
return handlers
def create_centrifuge_application():
try:
custom_settings = json.load(open(options.config, 'r'))
except IOError:
return stop_running("No configuration file found.")
# override security related options using environment variable
# value if exists
for option_name in ["password", "cookie_secret", "api_secret"]:
environment_var_name = "CENTRIFUGE_{0}".format(option_name.upper())
environment_value = os.environ.get(environment_var_name)
if environment_value:
logger.debug("using {0} environment variable for {1} option value".format(
environment_var_name, option_name
))
custom_settings[option_name] = environment_value
if os.environ.get("CENTRIFUGE_INSECURE") == "1":
custom_settings["insecure"] = True
settings = dict(
cookie_secret=custom_settings.get("cookie_secret", "bad secret"),
template_path=os.path.join(
os.path.dirname(__file__),
os.path.join("web/frontend", "templates")
),
xsrf_cookies=False,
autoescape="xhtml_escape",
debug=options.debug,
options=options,
config=custom_settings
)
sockjs_settings = custom_settings.get("sockjs_settings", {})
if not sockjs_settings or not sockjs_settings.get("sockjs_url"):
# SockJS CDN will be retired
# see https://github.com/sockjs/sockjs-client/issues/198
# if no explicit SockJS url provided in configuration file
# then we use jsdelivr CDN instead of default cdn.sockjs.org
# this can be fixed directly in SockJS-Tornado soon
sockjs_settings["sockjs_url"] = "https://cdn.jsdelivr.net/sockjs/1.0/sockjs.min.js"
handlers = create_application_handlers(sockjs_settings)
# custom settings to configure the tornado HTTPServer
tornado_settings = custom_settings.get("tornado_settings", {})
logger.debug("tornado_settings: %s", tornado_settings)
if 'io_loop' in tornado_settings:
stop_running(
"The io_loop in tornado_settings is not supported for now."
)
try:
app = Application(handlers=handlers, **settings)
server = tornado.httpserver.HTTPServer(app, **tornado_settings)
server.listen(options.port, address=options.address)
except Exception as e:
return stop_running(str(e))
logger.info("Engine class: {0}".format(engine_class_path))
app.engine = engine_class(app)
# create reference to application from Client
Client.application = app
app.initialize()
logger.info("Tornado port: {0}, address: {1}".format(options.port, options.address))
return app
def main():
ioloop_instance = tornado.ioloop.IOLoop.instance()
create_centrifuge_application()
try:
ioloop_instance.start()
except KeyboardInterrupt:
logger.info('interrupted')
if __name__ == '__main__':
main()
| StarcoderdataPython |
3240710 | expected_output = {
'inventory_item_index': {
0: {
'description': 'cisco c1111-8pltela chassis',
'name': 'chassis',
'pid': 'C1111-8PLTELA',
'sn': 'FGL221190VF',
'vid': 'V01',
},
1: {
'description': 'external power supply module',
'name': 'power supply module 0',
'pid': 'PWR-12V',
'sn': 'JAB0929092D',
'vid': 'V01',
},
2: {
'description': 'cisco c1111-8pltela built-in nim controller',
'name': 'module 0',
'pid': 'C1111-8PLTELA',
},
3: {
'description': 'front panel 2 port gigabitethernet module',
'name': 'nim subslot 0/0',
'pid': 'C1111-2x1GE',
'vid': 'V01',
},
4: {
'description': 'c1111-es-8',
'name': 'nim subslot 0/1',
'pid': 'C1111-ES-8',
'vid': 'V01',
},
5: {
'description': 'c1111-lte module',
'name': 'nim subslot 0/2',
'pid': 'C1111-LTE',
'vid': 'V01',
},
6: {
'description': 'sierra wireless em7455/em7430',
'name': 'modem 0 on cellular0/2/0',
'pid': 'EM7455/EM7430',
'sn': '355813070074072',
'vid': '1.0',
},
7: {
'description': 'cisco c1111-8pltela route processor',
'name': 'module r0',
'pid': 'C1111-8PLTELA',
'sn': 'FOC21520MF1',
'vid': 'V01',
},
8: {
'description': 'cisco c1111-8pltela forwarding processor',
'name': 'module f0',
'pid': 'C1111-8PLTELA',
},
},
} | StarcoderdataPython |
124306 | <reponame>obespoir/echecs<gh_stars>10-100
# coding=utf-8
import random
from db.desk import Desk as DBDesk
from service.room.common_define import DeskType
from service.room.handlers.basehandler import BaseHandler, RegisterEvent
from service.room.models.room_desk import RoomDesk
from service.room.models.roomdesk_manager import desk_mgr
from service.room.models.user_manager import UserManager
from service.room.validators.match_desk.join_desk_by_room_type import JoinMatchDeskByTypeValidator
# from service.session_gate_rel import session_gate_ins
from config.globalconfig import GlobalConfig
from share.commontool import weight_choice
from share.notify_web_server import notify_web_server_join_room
from share.message_ids import *
@RegisterEvent(JOIN_MATCH_DESK_BY_TYPE)
class JoinMatchDeskByTypeHandler(BaseHandler):
def execute(self, *args, **kwargs):
"""
加入指定匹配场桌子请求处理
:param args:
:param kwargs:
:return:
"""
validator = JoinMatchDeskByTypeValidator(handler=self)
desk, user = _apply_match_desk(validator.user_id.data, validator.session_id.data, max_player_num=2,
room_type=validator.room_type.data)
# session_gate_ins.update_rel(validator.session_id.data, self.gate_name)
data = user.to_dict()
desk.notify_desk_some_user(PUSH_USER_JOIN_DESK, data, [user.user_id])
return {"desk_id": desk.desk_id, "seat_info": desk.get_users_info(), "room_type": validator.room_type.data}
def _apply_match_desk(user_id, session_id, max_player_num=4, room_type=0):
"""
申请一张匹配桌
:return: 返回桌子对象, 用户对象
"""
cur_desk_player_count = [[] for _ in range(max_player_num)] # 当前桌子人数统计[[desk_id1, ...], [desk_id2, ...]...}
# num = 0
for _id, desk in desk_mgr.match_desks.items():
max_player_num = desk.max_player_num # 匹配场麻将最大人数默认不变
if desk.is_full():
continue
num = desk.people_count
if desk.room_type == room_type:
if num in cur_desk_player_count:
cur_desk_player_count[num].append(desk)
else:
cur_desk_player_count[num] = [desk]
desk_weight = [100 for _ in range(max_player_num)]
for i, lst in enumerate(cur_desk_player_count):
if 0 == len(lst):
desk_weight[i] = 0
else:
desk_weight[i] = desk_weight[i] * (1 + 10 * i)
desk_weight[0] = 100
index = weight_choice(desk_weight)
print("index = ", index)
if 0 == index:
# 创建桌子
desk_id = DBDesk.get_a_id()
custom_config = GlobalConfig().room_cfg_list[room_type]
desk = RoomDesk(desk_id, max_player_num=max_player_num, desk_type=DeskType.MATCH_DESK, custom_config=custom_config)
desk_mgr.add_room_desk(desk)
user = UserManager().add_user(user_id, desk_id, session_id)
desk.user_sit(user, seat_id=0)
return desk, user
else:
desk = random.choice(cur_desk_player_count[index])
user = UserManager().add_user(user_id, desk.desk_id, session_id)
desk.user_sit(user)
return desk, user
| StarcoderdataPython |
1825963 | <gh_stars>1-10
import time
from threading import Timer
from zone_api import platform_encapsulator as pe
from zone_api.core.device import Device
from zone_api import time_utilities
class Switch(Device):
"""
Represents a light or fan switch. Each switch contains an internal timer.
When the switch is turned on, the timer is started. As the timer expires,
the switch is turned off (if it is not off already). If the
switch is turned off not by the timer, the timer is cancelled.
"""
# The maximum duration between the last turned-off time and the current turned-on time, during which the
# turned-off timer will be set to 1.5 times longer than the initial value. This simulates the scenario when the
# user is relatively immobile and thus no motion event was triggered to renew the timer. Once the previous timer is
# triggered and turn off the light, the user manually reaches out to turn on the light again. This indicates that
# the user is in the middle of something and as such we will increase the timer duration.
STICKY_SWITCH_DURATION_IN_SECS = 30
# Related to STICKY_SWITCH_DURATION_IN_SECS, how long should the default timer duration be extended.
EXTENDED_TIMER_DURATION_FACTOR = 1.5
def __init__(self, switch_item, duration_in_minutes: float):
"""
Ctor
:param SwitchItem switch_item:
:param int duration_in_minutes: how long the switch will be kept on
:raise ValueError: if any parameter is invalid
"""
Device.__init__(self, switch_item)
self.lastOffTimestampInSeconds = -1
self.duration_in_minutes = duration_in_minutes
self.timer = None
def _start_timer(self, events):
"""
Creates and returns the timer to turn off the switch.
"""
def turn_off_switch():
zone = self.get_zone_manager().get_containing_zone(self)
(occupied, device) = zone.is_occupied([Switch], 60)
if not occupied:
events.send_command(self.get_item_name(), "OFF")
pe.log_debug("{}: turning off {}.".format(
zone.get_name(), self.get_item_name()))
else:
self.timer = Timer(self.duration_in_minutes * 60, turn_off_switch)
self.timer.start()
pe.log_debug("{}: {} is in use by {}.".format(
zone.get_name(), self.get_item_name(), device))
self._cancel_timer() # cancel the previous timer, if any.
duration = self.duration_in_minutes * 60
if (time.time() - self.get_last_off_timestamp_in_seconds()) <= Switch.STICKY_SWITCH_DURATION_IN_SECS:
duration = duration * Switch.EXTENDED_TIMER_DURATION_FACTOR
self.timer = Timer(duration, turn_off_switch)
self.timer.start()
def _cancel_timer(self):
"""
Cancel the turn-off-switch timer.
"""
if self.timer is not None and self.timer.is_alive():
self.timer.cancel()
self.timer = None
def _is_timer_active(self):
return self.timer is not None and self.timer.is_alive()
def turn_on(self, events):
"""
Turns on this light, if it is not on yet. In either case, the associated
timer item is also turned on.
"""
if self.is_on(): # already on, renew timer
self._start_timer(events)
else:
events.send_command(self.get_item_name(), "ON")
def turn_off(self, events):
"""
Turn off this light.
"""
if self.is_on():
events.send_command(self.get_item_name(), "OFF")
self._cancel_timer()
def is_on(self):
"""
Returns true if the switch is turned on; false otherwise.
"""
return pe.is_in_on_state(self.get_item())
def on_switch_turned_on(self, events, item_name):
"""
Invoked when a switch on event is triggered. Note that a switch can be
turned on through this class' turnOn method, or through the event bus, or
manually by the user.
The following actions are done:
- the on timestamp is set;
- the timer is started or renewed.
:param events:
:param str item_name: the name of the item triggering the event
:return True: if itemName refers to this switch; False otherwise
"""
is_processed = (self.get_item_name() == item_name)
if is_processed:
self._handle_common_on_action(events)
return is_processed
def on_switch_turned_off(self, events, item_name):
"""
Invoked when a switch off event is triggered. Note that a switch can be
turned off through this class' turnOff method, or through the event bus,
or manually by the user.
The following actions are done:
- the timer is cancelled.
:param scope.events events:
:param string item_name: the name of the item triggering the event
:return: True if itemName refers to this switch; False otherwise
"""
is_processed = (self.get_item_name() == item_name)
if is_processed:
self.lastOffTimestampInSeconds = time.time()
self._cancel_timer()
return is_processed
def get_last_off_timestamp_in_seconds(self):
"""
Returns the timestamp in epoch seconds the switch was last turned off.
:return: -1 if the timestamp is not available, or an integer presenting\
the epoch seconds
"""
return self.lastOffTimestampInSeconds
# Misc common things to do when a switch is turned on.
def _handle_common_on_action(self, events):
self._start_timer(events) # start or renew timer
def is_low_illuminance(self, current_illuminance):
""" Always return False. """
return False
def __str__(self):
""" @override """
return u"{}, duration: {} minutes".format(
super(Switch, self).__str__(), self.duration_in_minutes)
class Light(Switch):
""" Represents a regular light. """
def __init__(self, switch_item, duration_in_minutes: float, illuminance_level: int = None,
no_premature_turn_off_time_range: str = None):
"""
:param int illuminance_level: the illuminance level in LUX unit. The \
light should only be turned on if the light level is below this unit.
:param str no_premature_turn_off_time_range: optional parameter to define \
the time range when the light should not be turned off before its \
expiry time.
"""
Switch.__init__(self, switch_item, duration_in_minutes)
self.illuminance_level = illuminance_level
self.no_premature_turn_off_time_range = no_premature_turn_off_time_range
def get_illuminance_threshold(self):
"""
Returns the illuminance level in LUX unit. Returns None if not applicable.
:rtype: int or None
"""
return self.illuminance_level
def is_low_illuminance(self, current_illuminance):
"""
Returns False if this light has no illuminance threshold or if
current_illuminance is less than 0. Otherwise returns True if the
current_illuminance is less than threshold.
@override
"""
if self.get_illuminance_threshold() is None:
return False
if current_illuminance < 0: # current illuminance not available
return False
return current_illuminance < self.get_illuminance_threshold()
def can_be_turned_off_by_adjacent_zone(self):
"""
Returns True if this light can be turned off when the light of an
adjacent zone is turned on.
A False value might be desired if movement in the adjacent zone causes
the light to be turned off unexpectedly too often.
:rtype: bool
"""
if self.no_premature_turn_off_time_range is None:
return True
if time_utilities.is_in_time_range(self.no_premature_turn_off_time_range):
return False
return True
def is_occupied(self, seconds_from_last_event=5 * 60):
"""
Returns True if the device is on.
@override
:rtype: bool
"""
return self.is_on()
def __str__(self):
""" @override """
return u"{}, illuminance: {}{}".format(
super(Light, self).__str__(), self.illuminance_level,
", no premature turn-off time range: {}".format(self.no_premature_turn_off_time_range)
if self.no_premature_turn_off_time_range is not None else "")
class Fan(Switch):
""" Represents a fan switch. """
def __init__(self, switch_item, duration_in_minutes):
Switch.__init__(self, switch_item, duration_in_minutes)
| StarcoderdataPython |
5071176 | <reponame>joaompinto/kubesh
import io
import yaml
import pkgutil
import importlib
from os.path import dirname
from pathlib import Path
import inspect
import traceback
from wasabi import TracebackPrinter
from .mapper import table_from_list
# https://packaging.python.org/guides/creating-and-discovering-plugins/
class YAMLCommand:
def __init__(self, yaml_data):
self.yaml = yaml_data
clone_fields = ["Name", "Aliases", "Description", "When"]
for field in clone_fields:
if field in yaml_data:
setattr(self, field, yaml_data[field])
def run(self, console, api):
api_func = self.yaml["API"]
api_call = f"api.{api_func}"
response = eval(api_call)
output_type = self.yaml["Output"]["Type"]
content = self.yaml["Output"]["Content"]
if output_type.lower() == "list":
response_data = table_from_list(response, content)
console.table(response_data)
def iter_namespace(ns_pkg):
return pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + ".")
def yaml2cmds(filename):
cmd_list = []
with io.open(filename, encoding="utf8") as yaml_file:
yaml_data = yaml.load_all(yaml_file, Loader=yaml.FullLoader)
for cmd_data in yaml_data:
cmd = YAMLCommand(cmd_data)
cmd_list.append(cmd)
return cmd_list
def load_yaml_commands():
yaml_cmd_list = []
commands_dir = Path(dirname((__file__))).joinpath("commands")
for cmd_yaml_filename in Path(commands_dir).glob("*.yaml"):
cmd_list = yaml2cmds(cmd_yaml_filename)
yaml_cmd_list.extend(cmd_list)
return yaml_cmd_list
def load_module_commands():
cmd_list = []
from . import commands
for finder, name, ispkg in iter_namespace(commands):
module = importlib.import_module(name)
cls_members = inspect.getmembers(module, inspect.isclass)
cls_members = [c[1] for c in cls_members if c[1].__name__.startswith("Command")]
for cmd in cls_members:
cmd_object = cmd()
required_params = ["Name", "Description"]
for param in required_params:
if not hasattr(cmd_object, param):
tb = TracebackPrinter(
tb_base="kubesh", tb_exclude=("core.py", "runpy.py")
)
error = tb(
f"Command missing required '{param}' attribute",
f"File: {module.__file__}",
highlight="kwargs",
tb=traceback.extract_stack(),
)
raise ValueError(error)
# raise Exception(f"Command {cmd} does must provide a syntax field")
cmd_list.append(cmd_object)
return cmd_list
def load_commands():
yaml_cmds = load_yaml_commands()
module_cmds = load_module_commands()
yaml_cmds.extend(module_cmds)
return yaml_cmds + module_cmds
| StarcoderdataPython |
9626294 | import numpy as np
from itertools import permutations
def calc(list1):
p = [c for c in permutations(list1, 4)]
symbols = ["+", "-", "*", "/"]
list2 = [] # 算出24的排列组合的列表
flag= False
for n in p:
one, two, three, four = n
for s1 in symbols:
for s2 in symbols:
for s3 in symbols:
if s1+s2+s3 == "+++" or s1+s2+s3 == "***":
express = ["{0} {1} {2} {3} {4} {5} {6}".format(one, s1, two, s2, three, s3, four)] # 全加或者乘时,括号已经没有意义。
else:
express= ["(({0} {1} {2}) {3} {4}) {5} {6}".format(one, s1, two, s2, three, s3, four),
"({0} {1} {2}) {3} ({4} {5} {6})".format(
one, s1, two, s2, three, s3, four),
"(({0} {1} ({2} {3} {4})) {5} {6})".format(
one, s1, two, s2, three, s3, four),
"{0} {1} (({2} {3} {4}) {5} {6})".format(
one, s1, two, s2, three, s3, four),
"{0} {1} ({2} {3} ({4} {5} {6}))".format(one, s1, two, s2, three, s3, four)]
for e in express:
try:
if abs(eval(e)-24) == 0:
list2.append(e)
flag= True
except ZeroDivisionError:
pass
list3 = set(list2) # 去除重复项
result = []
for c in list(list3)[0:1]:
b = c.replace(' ', '')
result.append(b)
break
if flag == False:
print("无法算出")
return result
| StarcoderdataPython |
118747 | r"""
*****
Array
*****
.. autofunction:: is_all_equal
.. autofunction:: is_all_finite
.. autofunction:: is_crescent
"""
from numpy import asarray, isfinite, mgrid, prod, rollaxis
from numpy import sum as _sum
from numpy import unique as _unique
try:
from numba import boolean, char, float64, int32, int64, jit
_NUMBA = True
except ImportError:
_NUMBA = False
def is_crescent(arr):
r"""Check if the array values are in non-decreasing order.
Args:
arr (array_like): sequence of values.
Returns:
bool: ``True`` for non-decreasing order.
"""
arr = asarray(arr)
return _is_crescent(arr)
def is_all_equal(arr):
r"""Check if the array values are all equal.
Args:
arr (array_like): sequence of values.
Returns:
bool: ``True`` if values are all equal.
"""
arr = asarray(arr)
return _is_all_equal(arr)
def is_all_finite(arr):
r"""Check if the array values are all finite.
Args:
arr (array_like): sequence of values.
Returns:
bool: ``True`` if values are all finite.
"""
return isfinite(_sum(asarray(arr)))
def _is_crescent(arr):
i = 0
while i < arr.shape[0] - 1:
if arr[i] > arr[i + 1]:
return False
i += 1
return True
if _NUMBA:
signature = jit(
[boolean(float64[:]), boolean(int64[:]), boolean(char[:]), boolean(int32[:])],
nogil=True,
nopython=True,
cache=True,
)
_is_crescent = signature(_is_crescent)
def _is_all_equal(arr):
arr = arr.ravel()
v = arr[0]
i = 1
while i < arr.shape[0]:
if arr[i] != v:
return False
i += 1
return True
if _NUMBA:
_is_all_equal = signature(_is_all_equal)
def cartesian(shape):
r"""Cartesian indexing.
Returns a sequence of n-tuples indexing each element of a hypothetical
matrix of the given shape.
Args:
shape (tuple): tuple of dimensions.
Returns:
array_like: indices.
Example
-------
.. doctest::
>>> from numpy_sugar import cartesian
>>> print(cartesian((2, 3)))
[[0 0]
[0 1]
[0 2]
[1 0]
[1 1]
[1 2]]
Reference:
[1] http://stackoverflow.com/a/27286794
"""
n = len(shape)
idx = [slice(0, s) for s in shape]
g = rollaxis(mgrid[idx], 0, n + 1)
return g.reshape((prod(shape), n))
def unique(ar):
r"""Find the unique elements of an array.
It uses ``dask.array.unique`` if necessary.
Args:
ar (array_like): Input array.
Returns:
array_like: the sorted unique elements.
"""
import dask.array as da
if isinstance(ar, da.core.Array):
return da.unique(ar)
return _unique(ar)
| StarcoderdataPython |
53977 | <reponame>Dan-Patterson/Tools_for_ArcGIS_Pro<gh_stars>10-100
d e f s t r i p _ c o n c a t e n a t e ( i n _ f l d s , s t r i p _ l i s t = [ " " , " , " , N o n e ] ) :
" " " P r o v i d e t h e f i e l d s a s a l i s t i e [ a , b , c ] t o s t r i p s p a c e s
: a n d r e m o v e n u l l s
: u s e : p y t h o n p a r s e r
: s y n t a x : s t r i p _ s t u f f ( ' ! a ! , ! b ! , ! c ! ] ) a s s u m e d f i e l d n a m e s
" " "
f i x e d = [ ]
f m t = [ ]
f o r i i n i n _ f l d s :
i f i n o t i n s t r i p _ l i s t :
f i x e d . a p p e n d ( i )
f m t . a p p e n d ( " { } " )
f r m t = " " . j o i n ( [ f f o r f i n f m t ] )
f r m t . s t r i p ( )
f l d s = [ s t r ( i ) . s t r i p ( ) f o r i i n f i x e d ]
r e s u l t = f r m t . f o r m a t ( * f i x e d )
r e t u r n r e s u l t
_ _ e s r i _ f i e l d _ c a l c u l a t o r _ s p l i t t e r _ _
s t r i p _ c o n c a t e n a t e ( ) | StarcoderdataPython |
4954176 | <reponame>chinasaur/bokeh
from __future__ import absolute_import, print_function
import pytest
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.keys import Keys
from bokeh.io import save
from bokeh.plotting import figure
from tests.integration.utils import has_no_console_errors, wait_for_canvas_resize
pytestmark = pytest.mark.integration
def make_plot(tools='box_select'):
plot = figure(id='plot-id', height=800, width=800, x_range=(0,6),
y_range=(0,6), tools=tools)
plot.rect(name='rect-glyph', x=[1, 3, 5], y=[3, 3, 3], width=1, height=1)
return plot
def perform_box_selection(selenium, start, end, hold_shift=False):
canvas = selenium.find_element_by_tag_name('canvas')
wait_for_canvas_resize(canvas, selenium)
actions = ActionChains(selenium)
if hold_shift:
actions.key_down(Keys.LEFT_SHIFT)
actions.move_to_element_with_offset(canvas, *start)
actions.click_and_hold()
actions.move_by_offset(*end)
actions.release()
if hold_shift:
actions.key_up(Keys.LEFT_SHIFT)
actions.perform()
def test_selection_tool_make_selection(output_file_url, selenium):
plot = make_plot()
save(plot)
selenium.get(output_file_url)
assert has_no_console_errors(selenium)
perform_box_selection(selenium, (50, 200), (450, 400))
code = "return Bokeh.index['plot-id'].model.select_one('rect-glyph').data_source.selected['1d'].indices"
selected = selenium.execute_script(code)
assert selected == [0, 1]
def test_selection_tool_selection_ending_outside_frame_makes_selection(output_file_url, selenium):
plot = make_plot()
save(plot)
selenium.get(output_file_url)
assert has_no_console_errors(selenium)
# make selection ending outside of frame
perform_box_selection(selenium, (50, 50), (1000, 1000))
code = "return Bokeh.index['plot-id'].model.select_one('rect-glyph').data_source.selected['1d'].indices"
selected = selenium.execute_script(code)
assert selected == [0,1,2]
def test_selection_tool_non_selection_clears_selected(output_file_url, selenium):
plot = make_plot()
save(plot)
selenium.get(output_file_url)
assert has_no_console_errors(selenium)
# make first selection
perform_box_selection(selenium, (50, 200), (200, 400))
# make selection over no glyphs
perform_box_selection(selenium, (50, 50), (50, 50))
code = "return Bokeh.index['plot-id'].model.select_one('rect-glyph').data_source.selected['1d'].indices"
selected = selenium.execute_script(code)
assert selected == []
def test_selection_tool_new_selection_clears_old_selection(output_file_url, selenium):
plot = make_plot()
save(plot)
selenium.get(output_file_url)
assert has_no_console_errors(selenium)
# make first selection
perform_box_selection(selenium, (50, 200), (250, 400))
# make second selection
perform_box_selection(selenium, (250, 200), (300, 400))
code = "return Bokeh.index['plot-id'].model.select_one('rect-glyph').data_source.selected['1d'].indices"
selected = selenium.execute_script(code)
assert selected == [1]
def test_selection_tool_multiselection_with_shift(output_file_url, selenium):
plot = make_plot()
# Save the plot and start the test
save(plot)
selenium.get(output_file_url)
assert has_no_console_errors(selenium)
# make first selection
perform_box_selection(selenium, (50, 200), (250, 400))
# make second, multi-selection with shift
perform_box_selection(selenium, (475, 200), (275, 400), hold_shift=True)
code = "return Bokeh.index['plot-id'].model.select_one('rect-glyph').data_source.selected['1d'].indices"
selected = selenium.execute_script(code)
assert selected == [0, 2]
| StarcoderdataPython |
11252336 | import nltk
import random
import igraph as ig
import matplotlib.pyplot as plt
from gensim.models import Doc2Vec
from gensim import utils
from plot_chat_path import plot_sentiment, plot_temporal_cluster_using_path, load_tsne_coordinates_from
class ConversationGraph:
def __init__(self, conversation_file, edges_filename, doc2vec_filename):
self.g, self.vertex_to_lines_dict, self.lines_to_vertex_dict = self._load_edges(edges_filename)
self.doc2vec_model = Doc2Vec.load(doc2vec_filename)
self.goal_line = 'LINES_106702'
self.goal = self.lines_to_vertex_dict[self.goal_line]
self.goal_vector = self.doc2vec_model.docvecs[self.goal_line]
self.lines_dict = {'LINES_' + str(i): line.replace('\n', '')
for i, line in enumerate(open(conversation_file,
encoding="ISO-8859-1").readlines())}
self.current_node = -1
self.current_vector = []
self.path = []
self.lines_in_path = []
def _load_edges(self, filename):
file = open(filename)
lines = file.readlines()
g = ig.Graph(directed=False)
vertex_to_lines_dict = {}
lines_to_vertex_dict = {}
vertices = set()
edges = []
for line in lines:
row = line.split(';')
weight = row[4]
if int(weight) < 2:
continue
from_vertex = row[0]
to_vertex = row[2]
from_lines = row[1].replace(' ', '').replace('\'', '').split(',')
to_lines = row[3].replace(' ', '').replace('\'', '').split(',')
vertex_to_lines_dict[from_vertex] = from_lines
vertex_to_lines_dict[to_vertex] = to_lines
for line in from_lines:
try:
lines_to_vertex_dict[line].append(from_vertex)
except:
lines_to_vertex_dict[line] = from_vertex
for line in to_lines:
try:
lines_to_vertex_dict[line].append(to_vertex)
except:
lines_to_vertex_dict[line] = to_vertex
vertices.add(from_vertex)
vertices.add(to_vertex)
edges.append((from_vertex, to_vertex))
g.add_vertices(list(vertices))
g.add_edges(edges)
return g, vertex_to_lines_dict, lines_to_vertex_dict
def _get_most_similar_vertex_from_string(self, string):
model = self.doc2vec_model
tokenizer = nltk.tokenize.TweetTokenizer()
words = tokenizer.tokenize(utils.to_unicode(string))
words = [word.lower() for word in words]
vector = model.infer_vector(words)
pairs = model.docvecs.most_similar([vector], topn=1000)
best_node = -1
best_vector = ''
best_line = ''
for pair in pairs:
line = pair[0]
try:
vertex = self.lines_to_vertex_dict[line]
best_node = vertex
best_vector = model.docvecs[line]
best_line = line
break
except:
pass
return best_node, best_vector, best_line
def define_endpoint(self, end_string):
self.goal, self.goal_vector, _ = self._get_most_similar_vertex_from_string(end_string)
def _get_shortest_paths(self, start, end):
try:
position = self.path.index(int(start))
self.path = self.path[position + 1:]
return self.path
except:
pass
self.path = self.g.get_shortest_paths(start, end)[0]
return self.path
def _find_next_node_in_path(self, node):
path = self._get_shortest_paths(node, self.goal)
if path:
return str(path[0])
return -1
def _find_random_line_number_in_node(self, node):
lines_in_node = self.vertex_to_lines_dict[node]
return random.choice(lines_in_node)
def find_next_line_in_path(self, string):
current_node, current_vector, current_line = self._get_most_similar_vertex_from_string(string)
if not self.lines_in_path:
self.lines_in_path.append(current_line)
if current_node == self.goal:
return 'END!'
node = self._find_next_node_in_path(current_node)
if node == self.goal:
return 'END!'
if node == -1:
node = self.goal
self.current_node = node
self.current_vector = current_vector
chosen_line_number = self._find_random_line_number_in_node(node)
chosen_line = self.lines_dict[chosen_line_number]
self.lines_in_path.append(chosen_line_number)
return chosen_line
def get_line_numbers_in_path(self):
return [int(item.replace('LINES_', '')) for item in self.lines_in_path]
def start_new_path(self):
self.lines_in_path = []
if __name__ == '__main__':
conversation_graph = ConversationGraph('ordered_lines.txt',
'results/edges.txt',
'lines-150.d2v')
FIRST_LINE = 'Hello! How are you?'
LAST_LINE = 'Goodbye!'
conversation_graph.define_endpoint(LAST_LINE)
line_xy_dict, _ = load_tsne_coordinates_from('results/tsne_coordinates.txt')
for _ in range(100):
lines = []
conversation_graph.start_new_path()
next_line = FIRST_LINE
while next_line != 'END!':
print('Alice: ', next_line)
next_line = conversation_graph.find_next_line_in_path(next_line)
if next_line == 'END!':
print('Bob:', LAST_LINE)
break
print('Bob:', next_line)
next_line = conversation_graph.find_next_line_in_path(next_line)
if next_line == 'END!':
print('Alice:', LAST_LINE)
break
print('')
nodes_in_path = conversation_graph.get_line_numbers_in_path()
plot_sentiment(line_xy_dict)
plot_temporal_cluster_using_path(line_xy_dict, nodes_in_path)
plt.show()
print('--')
| StarcoderdataPython |
9694470 | <filename>super_sac/nets/cnns.py
import math
import numpy as np
import torch
import torch.nn.functional as F
from torch import distributions as pyd
from torch import nn
from . import weight_init
def compute_conv_output(
inp_shape, kernel_size, padding=(0, 0), dilation=(1, 1), stride=(1, 1)
):
"""
Compute the shape of the output of a torch Conv2d layer using
the formula from the docs.
every argument is a tuple corresponding to (height, width), e.g. kernel_size=(3, 4)
"""
height_out = math.floor(
(
(inp_shape[0] + 2 * padding[0] - dilation[0] * (kernel_size[0] - 1) - 1)
/ stride[0]
)
+ 1
)
width_out = math.floor(
(
(inp_shape[1] + 2 * padding[1] - dilation[1] * (kernel_size[1] - 1) - 1)
/ stride[1]
)
+ 1
)
return height_out, width_out
class BigPixelEncoder(nn.Module):
def __init__(self, obs_shape, out_dim=50):
super().__init__()
channels = obs_shape[0]
self.conv1 = nn.Conv2d(channels, 32, kernel_size=3, stride=2)
self.conv2 = nn.Conv2d(32, 32, kernel_size=3, stride=1)
self.conv3 = nn.Conv2d(32, 32, kernel_size=3, stride=1)
self.conv4 = nn.Conv2d(32, 32, kernel_size=3, stride=1)
output_height, output_width = compute_conv_output(
obs_shape[1:], kernel_size=(3, 3), stride=(2, 2)
)
for _ in range(3):
output_height, output_width = compute_conv_output(
(output_height, output_width), kernel_size=(3, 3), stride=(1, 1)
)
self.fc = nn.Linear(output_height * output_width * 32, out_dim)
self.ln = nn.LayerNorm(out_dim)
self.apply(weight_init)
self.embedding_dim = out_dim
def forward(self, obs):
img = (obs / 255.0) - 0.5
x = F.relu(self.conv1(img))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = F.relu(self.conv4(x))
x = x.view(x.size(0), -1)
x = self.fc(x)
x = self.ln(x)
state = torch.tanh(x)
return state
class SmallPixelEncoder(nn.Module):
def __init__(self, obs_shape, out_dim=50):
super().__init__()
channels = obs_shape[0]
self.conv1 = nn.Conv2d(channels, 32, kernel_size=8, stride=4)
self.conv2 = nn.Conv2d(32, 64, kernel_size=4, stride=2)
self.conv3 = nn.Conv2d(64, 64, kernel_size=3, stride=1)
output_height, output_width = compute_conv_output(
obs_shape[1:], kernel_size=(8, 8), stride=(4, 4)
)
output_height, output_width = compute_conv_output(
(output_height, output_width), kernel_size=(4, 4), stride=(2, 2)
)
output_height, output_width = compute_conv_output(
(output_height, output_width), kernel_size=(3, 3), stride=(1, 1)
)
self.fc = nn.Linear(output_height * output_width * 64, out_dim)
self.apply(weight_init)
self.embedding_dim = out_dim
def forward(self, obs):
img = obs / 255.0
x = F.relu(self.conv1(img))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = x.view(x.size(0), -1)
state = self.fc(x)
return state
| StarcoderdataPython |
8135806 | <filename>set_cases_single.py
from singlezone_diss import main
import os
import pandas as pd
print('FIM DA IMPORTACAO')
def set_cases(N_CLUSTERS):
FOLDER = 'sobol'
NAME_STDRD = 'sobol'
os.system('mkdir '+FOLDER)
for i in range(N_CLUSTERS):
os.system('mkdir '+FOLDER+'/cluster'+str(i))
# load samples
# sample = pd.read_csv('sample_sobol_11-26.csv')
sample = pd.read_csv('sample_sobol_06-11.csv')
sample = sample[:100] # para teste
samples_x_cluster = sample.shape[0]/N_CLUSTERS
# min's and max's
MIN_AREA = 12
MAX_AREA = 100
MIN_RATIO = .4
MAX_RATIO = 2.5
MIN_ZONE_HEIGHT = 2.4
MAX_ZONE_HEIGHT = 3.2
MIN_ABS = .1
MAX_ABS = .9
MIN_SHADING = 0
MAX_SHADING = 1.5
MIN_AZIMUTH = 0
MAX_AZIMUTH = 359.9
MIN_WALL_U = 0.5
MAX_WALL_U = 4.7
MIN_WALL_CT = 20
MAX_WALL_CT = 400
MIN_WWR = .1
MAX_WWR = .9
MIN_OPENFAC = 0.1
MAX_OPENFAC = 1
MIN_THERMALLOAD = 0
MAX_THERMALLOAD = 30
MIN_PEOPLE = .05
MAX_PEOPLE = 1
MIN_FS = .3
MAX_FS = .87
MIN_FLOOR_HEIGHT = 0
MIN_FLOOR_HEIGHT = 50
MIN_BLDG_RATIO = .3
MAX_BLDG_RATIO = 1
BOUNDS = {
'area':{
'min': MIN_AREA,
'max': MAX_AREA
},
'ratio':{
'min': MIN_RATIO,
'max': MAX_RATIO
},
'zone_height':{
'min': MIN_ZONE_HEIGHT,
'max': MAX_ZONE_HEIGHT
},
'abs':{
'min': MIN_ABS,
'max': MAX_ABS
},
'shading':{
'min': MIN_SHADING,
'max': MAX_SHADING
},
'azimuth':{
'min': MIN_AZIMUTH,
'max': MAX_AZIMUTH
},
'wall_u':{
'min': MIN_WALL_U,
'max': MAX_WALL_U
},
'wall_ct':{
'min': MIN_WALL_CT,
'max': MAX_WALL_CT
},
'wwr':{
'min': MIN_WWR,
'max': MAX_WWR
},
'open_fac':{
'min': MIN_OPENFAC,
'max': MAX_OPENFAC
},
'thermal_loads':{
'min': MIN_THERMALLOAD,
'max': MAX_THERMALLOAD
},
'people':{
'min': MIN_PEOPLE,
'max': MAX_PEOPLE
},
'glass':{
'min': MIN_FS,
'max': MAX_FS
},
'floor_height':{
'min': MIN_FLOOR_HEIGHT,
'max': MIN_FLOOR_HEIGHT
},
'bldg_ratio':{
'min': MIN_BLDG_RATIO,
'max': MAX_BLDG_RATIO
}
}
# means and sd
for parameter in BOUNDS:
mean = (BOUNDS[parameter]['max']+BOUNDS[parameter]['min'])*.5
sd = BOUNDS[parameter]['max']-mean
BOUNDS[parameter]['mean'] = mean
BOUNDS[parameter]['sd'] = sd
# prepare inputs
for col in sample:
if col in list(BOUNDS):
sample[col] = BOUNDS[col]['mean']+BOUNDS[col]['sd']*sample[col]
# start iteration
for line in range(len(sample)):
# prepare inputs
area = sample['area'][line]
ratio = sample['ratio'][line]
zone_height = sample['zone_height'][line]
absorptance = sample['abs'][line]
shading = sample['shading'][line]
azimuth = sample['azimuth'][line]
wall_u = sample['wall_u'][line]
wall_ct = sample['wall_ct'][line]
wwr = sample['wwr'][line]
open_fac = sample['open_fac'][line]
thermal_loads = sample['thermal_loads'][line]
people = sample['people'][line]
glass = sample['glass'][line]
floor_height = sample['floor_height'][line]
bldg_ratio = sample['bldg_ratio'][line]
if sample['room_type'][line] < -.6:
room_type = '1_window'
zn = 1
corner_window = True
elif sample['room_type'][line] < -.2:
room_type = '3_window'
zn = 0
corner_window = True
elif sample['room_type'][line] < .2:
room_type = '1_wall'
zn = 1
corner_window = False
elif sample['room_type'][line] < .6:
room_type = '3_wall'
zn = 0
corner_window = False
else:
room_type = '0_window'
zn = 2
corner_window = False
if sample['ground'][line] < 0:
ground = 0
else:
ground = 1
if sample['roof'][line] < 0:
roof = 0
else:
roof = 1
cluster_n = int(line//samples_x_cluster)
caso = '{:05.0f}'.format(line)
output = (FOLDER+'/cluster'+'{:01.0f}'.format(cluster_n)+'/'+NAME_STDRD+'_{}.epJSON'.format(caso))
main(zone_area=area, zone_ratio=ratio, zone_height=zone_height,
absorptance=absorptance, shading=shading, azimuth=azimuth,
bldg_ratio=bldg_ratio, wall_u=wall_u, wall_ct=wall_ct,
zn=zn, floor_height=floor_height, corner_window=corner_window,
ground=ground, roof=roof, people=people, glass_fs=glass, wwr=wwr,
door=False, cp_eq = True, open_fac=open_fac,
input_file="seed_single_U-conc-eps.json", output=output)
| StarcoderdataPython |
9738489 | <filename>gym-sigmoid/gym_sigmoid/envs/sigmoidworld_env.py
import gym
from gym import spaces
from gym import utils
import numpy as np
import logging
import numpy.random as rn
import math
from itertools import product
logger = logging.getLogger(__name__)
class SigmoidWorldEnv(gym.Env, utils.EzPickle):
metadata = {'render.modes': ['human']}
def __init__(self,grid_size=6,goals = [7, 20, 31],discount=0.9,weights=[1.25, 5.0, 0.],horizon =15):
debug = True
self.gtweights=np.array(weights)
self.actions = ((1, 0), (0, 1),(-1, 0),(0, -1),(0, 0))
self.n_actions = len(self.actions)
self.horizon = horizon
self.grid_size = grid_size
self.n_states = (grid_size**2)+1#last state is the sink state
self.discount = discount
self.goals = goals
self.weights = weights
self.features = np.zeros((self.n_states))
self.features[goals[0]] = 10. # high positive reward
self.features[self.goals[1]] = 2. #close to zero reward
self.features[self.goals[2]] = 1. #almost zero reward
self.features[len(self.features)-1] = -50.
#np.save("sigmoid_obs.npy",self.features)
# Preconstruct the transition probability array.
self.transition_probability = np.zeros((self.n_states,self.n_actions,self.n_states))
for i in range(self.n_states-1):
for j in range(self.n_actions):
self._better_transition_probability(i,j)
#Automatic transition from any goal to sink state regardless of action taken
#Sink state only transitions to itself
for g in self.goals:
self.transition_probability[g,:,:] = 0
self.transition_probability[g,:,self.grid_size**2] = 1
self.transition_probability[self.grid_size**2,:,:] = 0
self.transition_probability[self.grid_size**2,:,self.grid_size**2] = 1
self.rewards = None
self.set_reward(self.weights)
self.viewer = None
self.server_process = None
self.server_port = None
self.observation_space = spaces.Box(low=np.min(self.features), high=np.max(self.features),
shape=(1,))
# Action space omits the Tackle/Catch actions, which are useful on defense
#self.action_space = spaces.Discrete(len(self.actions))
self.action_space = spaces.Box(low=0, high=5,
shape=(1,))
self.reset()
print("init over")
def int_to_point(self, i):
"""
Convert a state int into the corresponding coordinate.
i: State int.
-> (x, y) int tuple.
"""
return (i % self.grid_size, i // self.grid_size)
def point_to_int(self, p):
"""
Convert a coordinate into the corresponding state int.
p: (x, y) tuple.
-> State int.
"""
return p[0] + p[1]*self.grid_size
def _better_transition_probability(self, i, j):
x, y = self.int_to_point(i)
kpoint = [x+self.actions[j][0],y+self.actions[j][1]]
if (kpoint[0]<0 or kpoint[0]>=self.grid_size or kpoint[1]<0 or kpoint[1]>=self.grid_size):
k = i
else:
k = self.point_to_int(kpoint)
if k == self.grid_size**2 and i not in self.goals:
k = i
self.transition_probability[i,j,k] = 1
def set_reward(self, w):
self.weights = w
rewards = 10./(1. + np.exp(-1.*w[0]*(self.features-w[1])))
#rewards[len(self.features)-1] = 0
rewards += w[2]
self.rewards = rewards
def step(self, action):
action = action.astype(np.int)[0]
action = min(max(action,0),4)
#print("#### Action: %d####" %action)
obs,obsind = self._take_action(action)
self.obs = obs
self.obsind = obsind
reward = self.rewards[obsind]
episode_over = False
#if obsind == self.n_states-1:
# episode_over = True
return obs, reward, episode_over, {}
def _take_action(self, action):
next_state_prob = self.transition_probability[self.obsind,action]
next_state_ind = np.random.choice(np.arange(self.n_states),1,p=next_state_prob)[0]
next_state = self.features[next_state_ind]
return next_state,next_state_ind
def reset(self):
stateind = np.random.randint(0,len(self.features),1)[0]
obs = self.features[stateind]
self.obs = obs
self.obsind = stateind
return obs
def _render(self, mode='human', close=False):
""" Viewer only supports human mode currently. """
raise NotImplementedError
| StarcoderdataPython |
1821848 | from django.urls import re_path
from .views import ServeMediaView
app_name = 'gridfs_storage'
urlpatterns = [
re_path(r'^(?P<file_path>.*)/$', ServeMediaView.as_view(), name='media_url')
]
| StarcoderdataPython |
6572944 | from functools import reduce
import random
from collections import defaultdict
from MiniFlow.nn.core import Placeholder
def topological(graph):
graph = graph.copy()
sorted_node = []
while graph:
all_nodes_have_inputs = reduce(lambda a, b: a + b, list(graph.values()))
all_nodes_have_outputs = list(graph.keys())
all_nodes_only_have_ouputs_no_inputs = set(all_nodes_have_outputs) - set(all_nodes_have_inputs)
if all_nodes_only_have_ouputs_no_inputs:
node = random.choice(list(all_nodes_only_have_ouputs_no_inputs))
sorted_node.append(node)
if len(graph) == 1:
sorted_node += graph[node]
graph.pop(node)
else:
raise TypeError("This graph has circle, which cannot get topoligical order!")
return sorted_node
def convert_feed_dict_to_graph(feed_dict):
computing_graph = defaultdict(list)
nodes = [n for n in feed_dict]
while nodes:
n = nodes.pop(0)
if n in computing_graph: continue
if isinstance(n, Placeholder):
n.value = feed_dict[n]
for m in n.outputs:
nodes.append(m)
computing_graph[n].append(m)
return computing_graph
def forward(sorted_nodes):
for node in sorted_nodes:
node.forward()
def backward(sorted_nodes):
for node in sorted_nodes[::-1]:
node.backward()
def forward_and_backward(sorted_nodes):
forward(sorted_nodes)
backward(sorted_nodes)
def topological_sort_feed_dict(feed_dict):
graph = convert_feed_dict_to_graph(feed_dict)
return topological(graph)
def optimize(trainables, learning_rate=1e-1):
for node in trainables:
node.value += -1 * node.gradients[node] * learning_rate
| StarcoderdataPython |
1672301 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2013-2020 CERN
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors:
# - <NAME> <<EMAIL>>, 2013-2015
# - <NAME> <<EMAIL>>, 2014-2020
# - <NAME> <<EMAIL>>, 2014
# - <NAME> <<EMAIL>>, 2014-2017
# - <NAME> <<EMAIL>>, 2017
# - <NAME> <<EMAIL>>, 2017-2018
# - <NAME> <<EMAIL>>, 2019
# - <NAME> <<EMAIL>>, 2019
# - <NAME> <<EMAIL>>, 2020
# - <NAME> <<EMAIL>>, 2020
# - <NAME>' <<EMAIL>>, 2020
import sys
import os.path
base_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(base_path)
os.chdir(base_path)
from rucio.api.vo import add_vo # noqa: E402
from rucio.client import Client # noqa: E402
from rucio.common.config import config_get, config_get_bool # noqa: E402
from rucio.common.exception import Duplicate # noqa: E402
from rucio.core.account import add_account_attribute # noqa: E402
from rucio.common.types import InternalAccount # noqa: E402
if __name__ == '__main__':
if config_get_bool('common', 'multi_vo', raise_exception=False, default=False):
vo = {'vo': config_get('client', 'vo', raise_exception=False, default='tst')}
try:
add_vo(new_vo=vo['vo'], issuer='super_root', description='A VO to test multi-vo features', email='N/A', vo='def')
except Duplicate:
print('VO {} already added'.format(vo['vo']) % locals())
else:
vo = {}
c = Client()
try:
c.add_account('jdoe', 'SERVICE', '<EMAIL>')
except Duplicate:
print('Account jdoe already added' % locals())
try:
add_account_attribute(account=InternalAccount('root', **vo), key='admin', value=True) # bypass client as schema validation fails at API level
except Exception as error:
print(error)
try:
c.add_account('panda', 'SERVICE', '<EMAIL>')
add_account_attribute(account=InternalAccount('panda', **vo), key='admin', value=True)
except Duplicate:
print('Account panda already added' % locals())
try:
c.add_scope('jdoe', 'mock')
except Duplicate:
print('Scope mock already added' % locals())
try:
c.add_scope('root', 'archive')
except Duplicate:
print('Scope archive already added' % locals())
# add your accounts here, if you test against CERN authed nodes
additional_test_accounts = [('/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mlassnig/CN=663551/CN=<NAME>', 'x509', '<EMAIL>'),
('/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=barisits/CN=692443/CN=<NAME>', 'x509', '<EMAIL>'),
('/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=tbeerman/CN=722011/CN=<NAME>', 'x509', '<EMAIL>'),
('/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=ruciobuildbot/CN=692443/CN=Robot: Rucio build bot', 'x509', '<EMAIL>'),
('/CN=docker client', 'x509', '<EMAIL>'),
('<EMAIL>', 'GSS', '<EMAIL>')]
for i in additional_test_accounts:
try:
c.add_identity(account='root', identity=i[0], authtype=i[1], email=i[2])
except Exception:
print('Already added: ', i)
| StarcoderdataPython |
1659687 | <filename>scripts/addons/auto_mirror_ex.py<gh_stars>1-10
# b2.80~ update by Bookyakuno
####################################################################
# Quickly set up mirror modifiers and assist mirror modifiers
# Detailed authorship:
# Bookyakuno(Current support b2.80~)
# Lapineige(AutoMirror ~b2.79),
# <NAME> & MX(MirrorMirror ~b2.79),
####################################################################
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
bl_info = {
"name": "Auto Mirror EX",
"description": "Super fast cutting and mirroring for mesh",
"author": "Lapineige, <NAME> & MX, Bookyakuno(Blender2.8 Update)",
"version": (2, 9, 4),
"blender": (2, 93, 0),
"location": "View 3D > Sidebar > Tools tab > AutoMirror (panel)",
"warning": "",
"wiki_url": "https://bookyakuno.com/auto-mirror/",
"category": "Mesh"}
import bpy
import rna_keymap_ui # Required for displaying keymap list Menu
from mathutils import Vector
from bpy.props import *
from bpy.types import (
Operator,
Menu,
Panel,
PropertyGroup,
AddonPreferences,
)
mod_items = [
("DATA_TRANSFER", "Data Transfer","","MOD_DATA_TRANSFER",0),
("MESH_CACHE", "Mesh Cache","","MOD_MESH_CACHE",1),
("MESH_SEQUENCE_CACHE", "Mesh Sequence Cache","","MOD_MESH_SEQUENCE_CACHE",2),
("NORMAL_EDIT", "Normal Edit","","MOD_NORMALEDIT",3),
("WEIGHTED_NORMAL", "Weighted Normal","","MOD_NORMALEDIT",4),
("UV_PROJECT", "UV Project","","MOD_UVPROJECT",5),
("UV_WARP", "UV Warp","","MOD_UVPROJECT",6),
("VERTEX_WEIGHT_EDIT", "Vertex Weight Edit","","MOD_VERTEX_WEIGHT",7),
("VERTEX_WEIGHT_MIX", "Vertex Weight Mix","","MOD_VERTEX_WEIGHT",8),
("VERTEX_WEIGHT_PROXIMITY", "Vertex Weight Proximity","","MOD_VERTEX_WEIGHT",9),
("____", "____","","BLANK1",10),
("ARRAY", "Array","","MOD_ARRAY",11),
("BEVEL", "Bevel","","MOD_BEVEL",12),
("BOOLEAN", "Boolean","","MOD_BOOLEAN",13),
("BUILD", "Build","","MOD_BUILD",14),
("DECIMATE", "Decimate","","MOD_DECIM",15),
("EDGE_SPLIT", "Edge Split","","MOD_EDGESPLIT",16),
("MASK", "Mask","","MOD_MASK",17),
("MIRROR", "Mirror","","MOD_MIRROR",18),
("MULTIRES", "Multiresolution","","MOD_MULTIRES",19),
("REMESH", "Remesh","","MOD_REMESH",20),
("SCREW", "Screw","","MOD_SCREW",21),
("SKIN", "Skin","","MOD_SKIN",22),
("SOLIDIFY", "Solidify","","MOD_SOLIDIFY",23),
("SUBSURF", "Subdivision Surface","","MOD_SUBSURF",24),
("TRIANGULATE", "Triangulate","","MOD_TRIANGULATE",25),
("WIREFRAME", "Wireframe","","MOD_WIREFRAME",26),
("____", "____","","BLANK1",27),
("WELD", "Weld","","MOD_WELD",28),
("ARMATURE", "Armature","","MOD_ARMATURE",29),
("CAST", "Cast","","MOD_CAST",30),
("CURVE", "Curve","","MOD_CURVE",31),
("DISPLACE", "Displace","","MOD_DISPLACE",32),
("HOOK", "Hook","","HOOK",33),
("LAPLACIANDEFORM", "Laplacian Deform","","MOD_LAPLACIANDEFORM",34),
("LATTICE", "Lattice","","MOD_LATTICE",35),
("MESH_DEFORM", "Mesh Deform","","MOD_MESH_DEFORM",36),
("SHRINKWRAP", "Shrinkwrap","","MOD_SHRINKWRAP",37),
("SIMPLE_DEFORM", "Simple Deform","","MOD_SIMPLEDEFORM",38),
("SMOOTH", "Smooth","","MOD_SMOOTH",39),
("CORRECTIVE_SMOOTH", "Smooth Corrective","","MOD_SMOOTH",40),
("LAPLACIANSMOOTH", "Smooth Laplacian","","MOD_SMOOTH",41),
("SURFACE_DEFORM", "Surface Deform","","MOD_MESHDEFORM",42),
("WARP", "Warp","","MOD_WARP",43),
("WAVE", "Wave","","MOD_WAVE",44),
("____", "____","","BLANK1",45),
("CLOTH", "Cloth","","MOD_CLOTH",46),
("COLLISION", "Collision","","MOD_COLLISION",47),
("DYNAMIC_PAINT", "Dynamic Paint","","MOD_DYNAMIC_PAINT",48),
("EXPLODE", "Explode","","MOD_EXPLODE",49),
("OCEAN", "Ocean","","MOD_OCEAN",50),
("PARTICLE_INSTANCE", "Particle Instance","","MOD_PARTICLE_INSTANCE",51),
("PARTICLE_SYSTEM", "Particle System","","PARTICLES",52),
("FLUID", "Fluid Simulation","","MOD_FLUID",53),
("SOFT_BODY", "Soft Body","","MOD_SOFT",54),
("SURFACE", "Surface","","MOD_SURFACE",55),
]
def get_mode_name(mod_type):
if mod_type == "DATA_TRANSFER": return "Data Transfer"
if mod_type == "MESH_CACHE": return "Mesh Cache"
if mod_type == "MESH_SEQUENCE_CACHE": return "Mesh Sequence Cache"
if mod_type == "NORMAL_EDIT": return "Normal Edit"
if mod_type == "WEIGHTED_NORMAL": return "Weighted Normal"
if mod_type == "UV_PROJECT": return "UV Project"
if mod_type == "UV_WARP": return "UV Warp"
if mod_type == "VERTEX_WEIGHT_EDIT": return "Vertex Weight Edit"
if mod_type == "VERTEX_WEIGHT_MIX": return "Vertex Weight Mix"
if mod_type == "VERTEX_WEIGHT_PROXIMITY": return "Vertex Weight Proximity"
if mod_type == "____": return "____"
if mod_type == "ARRAY": return "Array"
if mod_type == "BEVEL": return "Bevel"
if mod_type == "BOOLEAN": return "Boolean"
if mod_type == "BUILD": return "Build"
if mod_type == "DECIMATE": return "Decimate"
if mod_type == "EDGE_SPLIT": return "Edge Split"
if mod_type == "MASK": return "Mask"
if mod_type == "MIRROR": return "Mirror"
if mod_type == "MULTIRES": return "Multiresolution"
if mod_type == "REMESH": return "Remesh"
if mod_type == "SCREW": return "Screw"
if mod_type == "SKIN": return "Skin"
if mod_type == "SOLIDIFY": return "Solidify"
if mod_type == "SUBSURF": return "Subdivision Surface"
if mod_type == "TRIANGULATE": return "Triangulate"
if mod_type == "WIREFRAME": return "Wireframe"
if mod_type == "____": return "____"
if mod_type == "WELD": return "Weld"
if mod_type == "ARMATURE": return "Armature"
if mod_type == "CAST": return "Cast"
if mod_type == "CURVE": return "Curve"
if mod_type == "DISPLACE": return "Displace"
if mod_type == "HOOK": return "Hook"
if mod_type == "LAPLACIANDEFORM": return "Laplacian Deform"
if mod_type == "LATTICE": return "Lattice"
if mod_type == "MESH_DEFORM": return "Mesh Deform"
if mod_type == "SHRINKWRAP": return "Shrinkwrap"
if mod_type == "SIMPLE_DEFORM": return "Simple Deform"
if mod_type == "SMOOTH": return "Smooth"
if mod_type == "CORRECTIVE_SMOOTH": return "Smooth Corrective"
if mod_type == "LAPLACIANSMOOTH": return "Smooth Laplacian"
if mod_type == "SURFACE_DEFORM": return "Surface Deform"
if mod_type == "WARP": return "Warp"
if mod_type == "WAVE": return "Wave"
if mod_type == "____": return "____"
if mod_type == "CLOTH": return "Cloth"
if mod_type == "COLLISION": return "Collision"
if mod_type == "DYNAMIC_PAINT": return "Dynamic Paint"
if mod_type == "EXPLODE": return "Explode"
if mod_type == "OCEAN": return "Ocean"
if mod_type == "PARTICLE_INSTANCE": return "Particle Instance"
if mod_type == "PARTICLE_SYSTEM": return "Particle System"
if mod_type == "FLUID": return "Fluid Simulation"
if mod_type == "SOFT_BODY": return "Soft Body"
if mod_type == "SURFACE": return "Surface"
# Changing the category in which the panel menu is displayed
# Executed when Blender starts or when a checkbox is changed
def update_panel(self, context):
message = ": Updating Panel locations has failed"
try:
for panel in panels:
if "bl_rna" in panel.__dict__:
bpy.utils.unregister_class(panel)
for panel in panels:
panel.bl_category = context.preferences.addons[__name__].preferences.category
bpy.utils.register_class(panel)
except Exception as e:
print("\n[{}]\n{}\n\nError:\n{}".format(__name__, message, e))
pass
# Add / Remove keymap
# Executed when Blender starts or when a checkbox is changed
def update_keymap(self, context):
try:
addon_prefs = bpy.context.preferences.addons[__name__].preferences
if addon_prefs.keymap_automirror:
add_keymap_automirror()
else:
remove_keymap_automirror()
except:
pass
class AUTOMIRROR_MT_AddonPreferences(AddonPreferences):
bl_idname = __name__
category : StringProperty(name="Tab Category", description="Choose a name for the category of the panel", default="Edit", update=update_panel)
keymap_automirror : BoolProperty(name = "Add Keymap (X/Y/Z/F + Shift + alt)", update = update_keymap)
tab_addon_menu : EnumProperty(name="Tab", description="", items=[('Option', "Option", "","DOT",0),('Keymap', "Keymap", "","KEYINGSET",1), ('Link', "Link", "","URL",2)], default='Option')
def draw(self, context):
layout = self.layout
row = layout.row(align=True)
row.prop(self, "tab_addon_menu",expand=True)
if self.tab_addon_menu=="Option":
box = layout.box()
row = box.row()
col = row.column()
col.label(text="Tab Category:")
col.prop(self, "category", text="")
# Keymap
# Search the keymap that matches the operator name and the property status of the keymap registered with the add-on from the Blender keymap setting and display it in the menu
if self.tab_addon_menu=="Keymap":
box = layout.box()
box.prop(self, "keymap_automirror")
if self.keymap_automirror:
col = box.column()
col.label(text="Keymap List:",icon="KEYINGSET")
wm = bpy.context.window_manager
kc = wm.keyconfigs.user
old_km_name = ""
get_kmi_l = []
for km_add, kmi_add in addon_keymaps:
for km_con in kc.keymaps:
if km_add.name == km_con.name:
km = km_con
break
for kmi_con in km.keymap_items:
if kmi_add.idname == kmi_con.idname:
if kmi_add.name == kmi_con.name:
get_kmi_l.append((km,kmi_con))
get_kmi_l = sorted(set(get_kmi_l), key=get_kmi_l.index)
for km, kmi in get_kmi_l:
if not km.name == old_km_name:
col.label(text=str(km.name),icon="DOT")
col.context_pointer_set("keymap", km)
rna_keymap_ui.draw_kmi([], kc, km, kmi, col, 0)
col.separator()
old_km_name = km.name
if self.tab_addon_menu=="Link":
box = layout.box()
col = box.column(align=True)
col.label(text="Store Site")
row = col.row()
row.operator("wm.url_open", text="gumroad", icon="URL").url = "https://bookyakuno.com/auto-mirror/"
col.separator()
col.label(text="Description")
row = col.row()
row.operator("wm.url_open", text="Blender Artists", icon="URL").url = "https://blenderartists.org/t/auto-mirror-blender2-8-ver/1151539"
row.operator("wm.url_open", text="Japanese - bookyakuno.com", icon="URL").url = "https://bookyakuno.com/auto-mirror/"
col.separator()
col.label(text="Old version")
row = col.row()
row.operator("wm.url_open", text="Old version 2.7x - github", icon="URL").url = "https://github.com/lapineige/Blender_add-ons/blob/master/AutoMirror/AutoMirror_V2-4.py"
# row.operator("wm.url_open", text="MirrorMirror - github", icon="URL").url = "https://github.com/fornof/BlenderAddons/blob/master/MirrorMirrorTool.py"
#
class AUTOMIRROR_OT_MirrorMirror(Operator):
"""Mirror to the selected object"""
bl_idname = "automirror.mirror_mirror"
bl_label = "MirrorMirror"
bl_description = "Mirror another object to an axis.\n First select the objects you want to mirror,\n Second select the objects you want to be axis and then execute.\n Set up a regular mirror if there is only one selected object"
bl_options = {'REGISTER', 'UNDO'}
sort_top_mod : BoolProperty(name="Sort first Modifier",default=True)
use_existing_mod : BoolProperty(name="Use existing mirror modifier",description="Use existing mirror modifier")
axis_x : BoolProperty(default=True,name="Axis X")
axis_y : BoolProperty(name="Axis Y")
axis_z : BoolProperty(name="Axis Z")
use_bisect_axis : BoolProperty(name="Bisect")
use_bisect_flip_axis : BoolProperty(name="Bisect Flip")
apply_mirror : BoolProperty(description="Apply the mirror modifier (useful to symmetrise the mesh)")
def invoke(self, context, event):
props = bpy.context.scene.automirror
props.mm_target_obj = bpy.context.view_layer.objects.active
return self.execute(context)
@classmethod
def poll(cls, context):
return len(bpy.context.selected_objects)
def draw(self, context):
layout = self.layout
row = layout.row(align=True)
row.prop(self,"axis_x",text="X",icon="BLANK1",emboss=True)
row.prop(self,"axis_y",text="Y",icon="BLANK1",emboss=True)
row.prop(self,"axis_z",text="Z",icon="BLANK1",emboss=True)
layout.prop(self,"sort_top_mod")
layout.prop(self,"use_bisect_axis")
layout.prop(self,"use_bisect_flip_axis")
row = layout.row(align=True)
row.label(text="",icon="CHECKBOX_HLT")
row.prop(self, "apply_mirror", text="Apply Modifier")
def run_mirror_mirror_mesh(self, context, obj, tgt_obj):
mod = obj.modifiers.new("mirror_mirror","MIRROR")
mod.use_axis[0] = False
mod.use_axis[1] = False
mod.use_axis[2] = False
if self.axis_x:
mod.use_axis[0] = True
if self.use_bisect_axis:
mod.use_bisect_axis[0] = True
if self.use_bisect_flip_axis:
mod.use_bisect_flip_axis[0] = True
if self.axis_y:
mod.use_axis[1] = True
if self.use_bisect_axis:
mod.use_bisect_axis[1] = True
if self.use_bisect_flip_axis:
mod.use_bisect_flip_axis[1] = True
if self.axis_z:
mod.use_axis[2] = True
if self.use_bisect_axis:
mod.use_bisect_axis[2] = True
if self.use_bisect_flip_axis:
mod.use_bisect_flip_axis[2] = True
if tgt_obj:
mod.mirror_object = tgt_obj
sort_top_mod(self,context,obj,mod,1)
if self.apply_mirror:
current_mode = obj.mode
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.modifier_apply(modifier= mod.name)
bpy.ops.object.mode_set(mode=current_mode)
return mod
def run_mirror_mirror_gp(self, context, obj, tgt_obj):
mod = obj.grease_pencil_modifiers.new("mirror_mirror","GP_MIRROR")
x = False
y = False
z = False
if self.axis_x:
x = True
if self.axis_y:
y = True
if self.axis_z:
z = True
mod.use_axis_x = x
mod.use_axis_y = y
mod.use_axis_z = z
if tgt_obj:
mod.object = tgt_obj
sort_top_mod(self,context,obj,mod,1)
if self.apply_mirror:
current_mode = obj.mode
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.gpencil_modifier_apply(modifier= mod.name)
bpy.ops.object.mode_set(mode=current_mode)
return mod
def execute(self, context):
props = bpy.context.scene.automirror
# info text
info_text = "Add"
X = "*"
Y = "*"
Z = "*"
if self.axis_x:
X = "X"
if self.axis_y:
Y = "Y"
if self.axis_z:
Z = "Z"
if self.apply_mirror:
info_text = "Apply"
# main run
if len(bpy.context.selected_objects) == 1:
obj = bpy.context.object
if obj.type=="GPENCIL":
self.run_mirror_mirror_gp(context, obj, None)
else:
self.run_mirror_mirror_mesh(context, obj, None)
self.report({'INFO'}, "%s Mirror Modifier [%s,%s,%s]" % (info_text, X, Y, Z))
return{'FINISHED'}
else:
# Multi selected object
tgt_obj = bpy.context.view_layer.objects.active
tgt_obj.select_set(False)
for obj in bpy.context.selected_objects:
bpy.context.view_layer.objects.active = obj
if obj.type=="GPENCIL":
self.run_mirror_mirror_gp(context, obj, tgt_obj)
else:
self.run_mirror_mirror_mesh(context, obj, tgt_obj)
sel_obj = str(len(bpy.context.selected_objects))
self.report({'INFO'}, "%s Mirror Modifier [%s,%s,%s] to %s object" % (info_text, X, Y, Z, sel_obj))
return {'FINISHED'}
class AUTOMIRROR_OT_mirror_toggle(Operator):
'''Switch on / off the Mirror Modifier'''
bl_idname = "automirror.toggle_mirror"
bl_label = "Toggle Mirror"
bl_options = {'REGISTER', 'UNDO'}
bl_description = "Switch on / off the Mirror Modifier"
show_viewport : BoolProperty(name="Viewport",default=True)
show_render : BoolProperty(name="Render")
show_in_editmode : BoolProperty(name="In edit mode")
show_on_cage : BoolProperty(name="On cage")
only_same_name : BoolProperty(name="Only Modifier Name")
mod_name : StringProperty(default="Mirror",name="Modifier Name")
global mod_items
mod_type : EnumProperty(default="MIRROR",name="Modifier Type", description="",items = mod_items)
@classmethod
def poll(cls, context):
return not len(bpy.context.selected_objects) == 0
def draw(self, context):
layout = self.layout
col = layout.column(align=True)
row = col.row(align=True)
row.active=(not self.only_same_name)
row.prop(self, "mod_type")
col.prop(self, "only_same_name")
row = col.row(align=True)
row.active=self.only_same_name
row.prop(self, "mod_name")
# row = col.row(align=True)
# row.prop(self, "move_count")
row = layout.row(align=True)
row.prop(self, "show_render",text="",icon="RESTRICT_RENDER_OFF" if self.show_render else "RESTRICT_RENDER_ON")
row.prop(self, "show_viewport",text="",icon="RESTRICT_VIEW_OFF" if self.show_viewport else "RESTRICT_VIEW_ON")
row.prop(self, "show_in_editmode",text="",icon="EDITMODE_HLT")
row.prop(self, "show_on_cage",text="",icon="OUTLINER_DATA_MESH")
def toggle_status(self, context,mod):
if self.show_viewport:
if mod.show_viewport == True:
mod.show_viewport = False
else:
mod.show_viewport = True
if self.show_render:
if mod.show_render == True:
mod.show_render = False
else:
mod.show_render = True
if self.show_in_editmode:
if mod.show_in_editmode == True:
mod.show_in_editmode = False
else:
mod.show_in_editmode = True
if self.show_on_cage:
if mod.show_on_cage == True:
mod.show_on_cage = False
else:
mod.show_on_cage = True
def execute(self, context):
old_act = bpy.context.view_layer.objects.active
for obj in bpy.context.selected_objects:
bpy.context.view_layer.objects.active = obj
if not len(obj.modifiers):
continue
for mod in obj.modifiers:
if self.only_same_name:
if mod.name == self.mod_name:
self.toggle_status(context,mod)
else:
if mod.type == self.mod_type:
self.toggle_status(context,mod)
bpy.context.view_layer.objects.active = old_act
return {'FINISHED'}
class AUTOMIRROR_OT_mirror_target_set(Operator):
'''Set the Active object as the 'mirror target object' of the mirror modifier'''
bl_idname = "automirror.target_set"
bl_label = "Target Set"
bl_options = {'REGISTER', 'UNDO'}
bl_description = "Set the Active object as the 'mirror target object' of the mirror modifier"
clear : BoolProperty(name="Clear")
only : BoolProperty(name="Only Modifier Name")
mod_name : StringProperty(default="mirror_mirror",name="Modifier Name")
@classmethod
def poll(cls, context):
return context.active_object is not None
def draw(self, context):
layout = self.layout
row = layout.row(align=True)
row.prop(self, "clear")
col = layout.column(align=True)
col.prop(self, "only")
row = col.row(align=True)
if not self.only:
row.active=False
row.prop(self, "mod_name")
def execute(self, context):
if self.clear:
try:
for obj in bpy.context.selected_objects:
for mod in obj.modifiers:
if mod.type == "MIRROR":
if self.only:
if mod.name == self.mod_name:
mod.mirror_object = None
else:
mod.mirror_object = None
except: pass
return {'FINISHED'}
act_obj = bpy.context.object
try:
for obj in bpy.context.selected_objects:
if obj == act_obj:
continue
for mod in obj.modifiers:
if mod.type == "MIRROR":
if self.only:
if mod.name == self.mod_name:
mod.mirror_object = act_obj
else:
mod.mirror_object = act_obj
except: pass
return {'FINISHED'}
class AUTOMIRROR_OT_modifier_add(Operator):
bl_idname = "automirror.modifier_add"
bl_label = "Add Modifier"
bl_description = "Add a specific modifier on the selected object"
bl_options = {'REGISTER', 'UNDO'}
only_same_name : BoolProperty(name="Only Modifier Name")
mod_name : StringProperty(default="",name="Modifier Name")
global mod_items
mod_type : EnumProperty(default="MIRROR",name="Modifier Type", description="",items = mod_items)
@classmethod
def poll(cls, context):
return not len(bpy.context.selected_objects) == 0
def draw(self, context):
layout = self.layout
col = layout.column(align=True)
row = col.row(align=True)
row.active=(not self.only_same_name)
row.prop(self, "mod_type")
col.prop(self, "only_same_name")
row = col.row(align=True)
row.active=self.only_same_name
row.prop(self, "mod_name")
def execute(self, context):
# old_act = bpy.context.view_layer.objects.active
if self.mod_name:
mod_name = self.mod_name
else:
mod_name = get_mode_name(self.mod_type)
for obj in bpy.context.selected_objects:
bpy.context.view_layer.objects.active = obj
mod = obj.modifiers.new(mod_name,self.mod_type)
# bpy.context.view_layer.objects.active = old_act
return {'FINISHED'}
class AUTOMIRROR_OT_mirror_apply(Operator):
'''Apply a specific modifier on the selected object'''
bl_idname = "automirror.apply"
bl_label = "Apply Modifier"
bl_description = "Apply a specific modifier on the selected object"
bl_options = {'REGISTER', 'UNDO'}
only_same_name : BoolProperty(name="Only Modifier Name")
mod_name : StringProperty(default="Mirror",name="Modifier Name")
global mod_items
mod_type : EnumProperty(default="MIRROR",name="Modifier Type", description="",items = mod_items)
@classmethod
def poll(cls, context):
return not len(bpy.context.selected_objects) == 0
def draw(self, context):
layout = self.layout
col = layout.column(align=True)
row = col.row(align=True)
row.active=(not self.only_same_name)
row.prop(self, "mod_type")
col.prop(self, "only_same_name")
row = col.row(align=True)
row.active=self.only_same_name
row.prop(self, "mod_name")
def execute(self, context):
old_act = bpy.context.view_layer.objects.active
for obj in bpy.context.selected_objects:
bpy.context.view_layer.objects.active = obj
for mod in obj.modifiers:
if self.only_same_name:
if mod.name == self.mod_name:
bpy.ops.object.modifier_apply(modifier=mod.name)
else:
if mod.type == self.mod_type:
bpy.ops.object.modifier_apply(modifier=mod.name)
bpy.context.view_layer.objects.active = old_act
return {'FINISHED'}
class AUTOMIRROR_OT_mirror_remove(Operator):
'''Remove a specific modifier on the selected object'''
bl_idname = "automirror.remove"
bl_label = "Remove Modifier"
bl_options = {'REGISTER', 'UNDO'}
bl_description = "Remove a specific modifier on the selected object"
only_same_name : BoolProperty(name="Only Modifier Name")
mod_name : StringProperty(default="Mirror",name="Modifier Name")
global mod_items
mod_type : EnumProperty(default="MIRROR",name="Modifier Type", description="",items = mod_items)
@classmethod
def poll(cls, context):
return not len(bpy.context.selected_objects) == 0
def draw(self, context):
layout = self.layout
col = layout.column(align=True)
row = col.row(align=True)
row.active=(not self.only_same_name)
row.prop(self, "mod_type")
col.prop(self, "only_same_name")
row = col.row(align=True)
row.active=self.only_same_name
row.prop(self, "mod_name")
def execute(self, context):
for obj in bpy.context.selected_objects:
for mod in obj.modifiers:
if self.only_same_name:
if mod.name == self.mod_name:
obj.modifiers.remove(modifier=mod)
else:
if mod.type == self.mod_type:
obj.modifiers.remove(modifier=mod)
return {'FINISHED'}
class AUTOMIRROR_OT_modifier_sort(Operator):
bl_idname = "automirror.modifier_sort"
bl_label = "Modifier Sort"
bl_description = ""
bl_options = {'REGISTER', 'UNDO'}
is_down : BoolProperty(name="Down")
only_same_name : BoolProperty(name="Only Modifier Name")
mod_name : StringProperty(default="Mirror",name="Modifier Name")
move_count : IntProperty(default=0,name="Move Count",min=0)
global mod_items
mod_type : EnumProperty(default="MIRROR",name="Modifier Type", description="",items = mod_items)
@classmethod
def poll(cls, context):
return not len(bpy.context.selected_objects) == 0
def draw(self, context):
layout = self.layout
col = layout.column(align=True)
row = col.row(align=True)
row.active=(not self.only_same_name)
row.prop(self, "mod_type")
col.prop(self, "only_same_name")
row = col.row(align=True)
row.active=self.only_same_name
row.prop(self, "mod_name")
col.separator()
row = col.row(align=True)
row.label(text="",icon="SORT_ASC")
row.prop(self, "is_down")
row = col.row(align=True)
row.label(text="",icon="BLANK1")
row.prop(self, "move_count")
def execute(self, context):
old_act = bpy.context.view_layer.objects.active
for obj in bpy.context.selected_objects:
bpy.context.view_layer.objects.active = obj
if not len(obj.modifiers) > 1:
continue
for mod in obj.modifiers:
if self.only_same_name:
if mod.name == self.mod_name:
self.mod_sort_fanc(context,obj,mod)
else:
if mod.type == self.mod_type:
self.mod_sort_fanc(context,obj,mod)
bpy.context.view_layer.objects.active = old_act
return {'FINISHED'}
def mod_sort_fanc(self, context, obj, mod):
if self.move_count == 0:
modindex = len(obj.modifiers) - self.move_count
else:
modindex = self.move_count
for i in range(modindex):
if self.is_down:
bpy.ops.object.modifier_move_down(modifier=mod.name)
else:
bpy.ops.object.modifier_move_up(modifier=mod.name)
# モディファイアの数だけ繰り返して一番最初に移動する
# modindex = len(obj.modifiers)
#
# # if self.move_count < 0:
# # modindex = len(obj.modifiers) - self.move_count
# # else:
# # modindex = len(obj.modifiers) - self.move_count
# #
# #
# for i in range(modindex):
# # 順番が同じなら終了
# tgt_mod_index = obj.modifiers.find(mod.name)
# if tgt_mod_index == len(obj.modifiers):
# return
# if self.move_count < 0:
#
#
# bpy.ops.object.modifier_move_down(modifier=mod.name)
# else:
# if tgt_mod_index == 0:
# return
# bpy.ops.object.modifier_move_up(modifier=mod.name)
class AUTOMIRROR_OT_main(Operator):
""" Automatically cut an object along an axis """
bl_idname = "automirror.automirror"
bl_label = "AutoMirror"
bl_options = {'REGISTER', 'UNDO'}
sort_top_mod : BoolProperty(name="Sort first Modifier",default=True)
apply_mirror : BoolProperty(description="Apply the mirror modifier (useful to symmetrise the mesh)")
cut : BoolProperty(default= True, description="If enabeled, cut the mesh in two parts and mirror it. If not, just make a loopcut")
show_on_cage : BoolProperty(description="Enable to edit the cage (it's the classical modifier's option)")
threshold : FloatProperty(default= 0.001, min= 0, description="Vertices closer than this distance are merged on the loopcut")
toggle_edit : BoolProperty(default= False, description="If not in edit mode, change mode to edit")
use_clip : BoolProperty(default=True, description="Use clipping for the mirror modifier")
axis_x : BoolProperty(name="Axis X",default=True)
axis_y : BoolProperty(name="Axis Y")
axis_z : BoolProperty(name="Axis Z")
axis_quick_override : BoolProperty(name="axis_quick_override", description="Axis used by the mirror modifier")
orientation : EnumProperty(description="Choose the side along the axis of the editable part (+/- coordinates)",items = [
("positive", "Positive", "", "ADD", 0),
("negative", "Negative","", "REMOVE", 1)])
@classmethod
def poll(cls, context):
obj = context.active_object
return obj and obj.type == "MESH"
def invoke(self, context, event):
props = bpy.context.scene.automirror
self.apply_mirror = props.apply_mirror
self.cut = props.cut
self.show_on_cage = props.show_on_cage
self.threshold = props.threshold
self.toggle_edit = props.toggle_edit
self.use_clip = props.use_clip
self.orientation = props.orientation
if not self.axis_quick_override:
self.axis_x = props.axis_x
self.axis_y = props.axis_y
self.axis_z = props.axis_z
return self.execute(context)
def draw(self, context):
layout = self.layout
row = layout.row(align=True)
row.prop(self,"axis_x",text="X",toggle=True)
row.prop(self,"axis_y",text="Y",toggle=True)
row.prop(self,"axis_z",text="Z",toggle=True)
row = layout.row(align=True)
row.prop(self, "orientation", text="Orientation", expand=True)
layout.prop(self, "threshold", text="Threshold")
layout.prop(self, "toggle_edit", text="Toggle Edit")
layout.prop(self, "cut", text="Cut and Mirror")
if self.cut:
layout.label(text="Mirror Modifier:")
row = layout.row(align=True)
row.label(text="",icon="AUTOMERGE_ON")
row.prop(self, "use_clip", text="Use Clip")
row = layout.row(align=True)
row.label(text="",icon="OUTLINER_DATA_MESH")
row.prop(self, "show_on_cage", text="Editable")
row = layout.row(align=True)
row.label(text="",icon="SORT_DESC")
row.prop(self, "sort_top_mod")
row = layout.row(align=True)
row.label(text="",icon="CHECKBOX_HLT")
row.prop(self, "apply_mirror", text="Apply Modifier")
else:
layout.label(text="Only Bisect")
def get_local_axis_vector(self, context, X, Y, Z, orientation,obj):
loc = obj.location
bpy.ops.object.mode_set(mode="OBJECT") # Needed to avoid to translate vertices
v1 = Vector((loc[0],loc[1],loc[2]))
bpy.ops.transform.translate(value=(X*orientation, Y*orientation, Z*orientation),
constraint_axis=((X==1), (Y==1), (Z==1)),
orient_type='LOCAL')
v2 = Vector((loc[0],loc[1],loc[2]))
bpy.ops.transform.translate(value=(-X*orientation, -Y*orientation, -Z*orientation),
constraint_axis=((X==1), (Y==1), (Z==1)),
orient_type='LOCAL')
bpy.ops.object.mode_set(mode="EDIT")
return v2-v1
def bisect_main(self, context, X, Y, Z, orientation,obj):
cut_normal = self.get_local_axis_vector(context, X, Y, Z, orientation,obj)
# plane_no=[X*orientation,Y*orientation,Z*orientation],
# Cut the mesh
bpy.ops.mesh.bisect(
plane_co=(
obj.location[0],
obj.location[1],
obj.location[2]
),
plane_no=cut_normal,
use_fill= False,
clear_inner= self.cut,
clear_outer= 0,
threshold= self.threshold)
def execute(self, context):
props = bpy.context.scene.automirror
sc = bpy.context.scene
if not (self.axis_x or self.axis_y or self.axis_z):
self.report({'WARNING'}, "No axis")
return {'FINISHED'}
# info text
info_text = "Add"
text_X = "*"
text_Y = "*"
text_Z = "*"
if self.axis_x:
text_X = "X"
if self.axis_y:
text_Y = "Y"
if self.axis_z:
text_Z = "Z"
if self.apply_mirror:
info_text = "Apply"
if self.orientation == 'positive':
orientation = 1
else:
orientation = -1
# 選択オブジェクトを回す
old_obj = bpy.context.view_layer.objects.active
old_sel = bpy.context.selected_objects
for obj in bpy.context.selected_objects:
obj.select_set(False)
for obj in old_sel:
if obj.type == "MESH":
obj.select_set(True)
bpy.context.view_layer.objects.active = obj
X,Y,Z = 0,0,0
current_mode = obj.mode # Save the current mode
if obj.mode != "EDIT":
bpy.ops.object.mode_set(mode="EDIT") # Go to edit mode
##############################################
# 反対側を削除
if self.axis_x:
bpy.ops.mesh.select_all(action='SELECT')
X = 1
Y = 0
Z = 0
self.bisect_main(context, X, Y, Z, orientation,obj)
if self.axis_y:
bpy.ops.mesh.select_all(action='SELECT')
X = 0
Y = 1
Z = 0
self.bisect_main(context, X, Y, Z, orientation,obj)
if self.axis_z:
bpy.ops.mesh.select_all(action='SELECT')
X = 0
Y = 0
Z = 1
self.bisect_main(context, X, Y, Z, orientation,obj)
##############################################
# Modifier
if self.cut:
mod = obj.modifiers.new("Mirror","MIRROR")
mod.use_axis[0] = self.axis_x # Choose the axis to use, based on the cut's axis
mod.use_axis[1] = self.axis_y
mod.use_axis[2] = self.axis_z
mod.use_clip = self.use_clip
mod.show_on_cage = self.show_on_cage
sort_top_mod(self,context,obj,mod,1)
if self.apply_mirror:
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.modifier_apply(modifier= mod.name)
if self.toggle_edit:
bpy.ops.object.mode_set(mode='EDIT')
else:
bpy.ops.object.mode_set(mode=current_mode)
if not self.toggle_edit:
bpy.ops.object.mode_set(mode=current_mode) # Reload previous mode
obj.select_set(False)
for obj in old_sel:
obj.select_set(True)
bpy.context.view_layer.objects.active = old_obj
# 変更した設定をシーン設定に反映
props.apply_mirror = self.apply_mirror
props.cut = self.cut
props.show_on_cage = self.show_on_cage
props.threshold = self.threshold
props.toggle_edit = self.toggle_edit
props.use_clip = self.use_clip
props.orientation = self.orientation
if not self.axis_quick_override:
props.axis_x = self.axis_x
props.axis_y = self.axis_y
props.axis_z = self.axis_z
else:
self.axis_quick_override = False
if len(bpy.context.selected_objects) == 1:
self.report({'INFO'}, "%s Mirror Modifier [%s,%s,%s]" % (info_text, text_X, text_Y, text_Z))
else:
sel_obj = str(len(bpy.context.selected_objects))
self.report({'INFO'}, "%s Mirror Modifier [%s,%s,%s] to %s object" % (info_text, text_X, text_Y, text_Z, sel_obj))
return {'FINISHED'}
def sort_top_mod(self,context,obj,mod,move_count):
old_act = bpy.context.view_layer.objects.active
bpy.context.view_layer.objects.active = obj
if not self.sort_top_mod:
return
if not len(obj.modifiers) > 1:
return
modindex = len(obj.modifiers) - move_count
for i in range(modindex):
bpy.ops.object.modifier_move_up(modifier=mod.name)
bpy.context.view_layer.objects.active = old_act
class AUTOMIRROR_PT_panel(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = 'Tools'
bl_label = "Auto Mirror"
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
obj = bpy.context.object
return obj is not None
def draw(self, context):
layout = self.layout
props = bpy.context.scene.automirror
box = layout.box()
row = box.row(align=True)
row.scale_y = 1.2
row.operator("automirror.automirror")
row.separator()
rows = row.row(align=True)
row.scale_x = 1.5
rows.operator("automirror.toggle_mirror",text="",icon="RESTRICT_VIEW_OFF")
rows.operator("automirror.target_set",text="",icon="OBJECT_DATAMODE")
sp = box.split(align=True,factor=0.3)
sp.label(text="Quick Axis")
row = sp.row()
row.scale_y = 1.2
am = row.operator("automirror.automirror",text="X")
am.axis_quick_override = True
am.axis_x = True
am.axis_y = False
am.axis_z = False
am = row.operator("automirror.automirror",text="Y")
am.axis_quick_override = True
am.axis_x = False
am.axis_y = True
am.axis_z = False
am = row.operator("automirror.automirror",text="Z")
am.axis_quick_override = True
am.axis_x = False
am.axis_y = False
am.axis_z = True
row = box.row(align=True)
row.alignment="LEFT"
row.prop(props, "toggle_option", text="Option", icon="TRIA_DOWN" if props.toggle_option else "TRIA_RIGHT", emboss=False)
if props.toggle_option:
# draw_main_fanc_option(self,context,layout)
box = box.box()
col = box.column()
row = col.row(align=True)
row.prop(props,"axis_x",text="X",toggle=True)
row.prop(props,"axis_y",text="Y",toggle=True)
row.prop(props,"axis_z",text="Z",toggle=True)
# row.prop(props, "axis", text="Mirror Axis", expand=True)
row = col.row(align=True)
row.prop(props, "orientation", text="Orientation", expand=True)
col.prop(props, "threshold", text="Threshold")
col.prop(props, "toggle_edit", text="Toggle Edit")
col.prop(props, "cut", text="Cut and Mirror")
if props.cut:
col = box.column(align=True)
col.label(text="Mirror Modifier:")
row = col.row(align=True)
row.label(text="",icon="AUTOMERGE_ON")
row.prop(props, "use_clip", text="Use Clip")
row = col.row(align=True)
row.label(text="",icon="OUTLINER_DATA_MESH")
row.prop(props, "show_on_cage", text="Editable")
row = col.row(align=True)
row.label(text="",icon="SORT_DESC")
row.prop(props, "sort_top_mod")
row = col.row(align=True)
row.label(text="",icon="CHECKBOX_HLT")
row.prop(props, "apply_mirror", text="Apply Mirror")
else:
box.label(text="Only Bisect")
# mirror mirror
box = layout.box()
sp = box.split(align=True,factor=0.3)
sp.label(text="Mirror Mirror")
row = sp.row()
row.scale_y = 1.2
mm = row.operator("automirror.mirror_mirror",text="X")
mm.axis_x = True
mm.axis_y = False
mm.axis_z = False
mm = row.operator("automirror.mirror_mirror",text="Y")
mm.axis_x = False
mm.axis_y = True
mm.axis_z = False
mm = row.operator("automirror.mirror_mirror",text="Z")
mm.axis_x = False
mm.axis_y = True
mm.axis_z = False
row = layout.row(align=True)
row.scale_x = 1.2
row.menu("AUTOMIRROR_MT_modifier_add",text="",icon="ADD")
row.separator()
row.operator("automirror.apply",text="Apply",icon="FILE_TICK")
row.operator("automirror.remove",text="Remove",icon="X")
row.separator()
row.operator("automirror.modifier_sort",text="",icon="SORT_DESC")
class AUTOMIRROR_MT_modifier_add(Menu):
bl_label = "Add"
def draw(self, context):
layout = self.layout
global mod_items
for i in mod_items:
op = layout.operator("automirror.modifier_add",text=i[1],icon="ADD")
op.mod_type = i[0]
class AUTOMIRROR_Props(PropertyGroup):
sort_top_mod : BoolProperty(name="Sort first Modifier",default=True)
toggle_option : BoolProperty(name="Toggle Option")
apply_mirror : BoolProperty(description="Apply the mirror modifier (useful to symmetrise the mesh)")
cut : BoolProperty(default= True, description="If enabeled, cut the mesh in two parts and mirror it. If not, just make a loopcut")
show_on_cage : BoolProperty(description="Enable to edit the cage (it's the classical modifier's option)")
threshold : FloatProperty(default= 0.001, min= 0.001, description="Vertices closer than this distance are merged on the loopcut")
toggle_edit : BoolProperty(description="If not in edit mode, change mode to edit")
use_clip : BoolProperty(default=True, description="Use clipping for the mirror modifier")
axis_x : BoolProperty(name="Axis X",default=True)
axis_y : BoolProperty(name="Axis Y")
axis_z : BoolProperty(name="Axis Z")
axis : EnumProperty(name="Axis", description="Axis used by the mirror modifier",items = [
("x", "X", "", 1),
("y", "Y", "", 2),
("z", "Z", "", 3)])
orientation : EnumProperty(description="Choose the side along the axis of the editable part (+/- coordinates)",items = [
("positive", "Positive", "", "ADD", 1),
("negative", "Negative","", "REMOVE", 2)])
mm_target_obj : PointerProperty(name="Target Object", type=bpy.types.Object)
# Add-ons Preferences Update Panel. Define Panel classes for updating
panels = (
AUTOMIRROR_PT_panel,
)
# define classes for registration
classes = (
AUTOMIRROR_MT_AddonPreferences,
AUTOMIRROR_MT_modifier_add,
AUTOMIRROR_OT_main,
AUTOMIRROR_OT_mirror_apply,
AUTOMIRROR_OT_mirror_remove,
AUTOMIRROR_OT_mirror_target_set,
AUTOMIRROR_OT_mirror_toggle,
AUTOMIRROR_OT_MirrorMirror,
AUTOMIRROR_OT_modifier_add,
AUTOMIRROR_OT_modifier_sort,
AUTOMIRROR_Props,
AUTOMIRROR_PT_panel,
)
addon_keymaps = []
# Keymap List
def add_keymap_automirror():
wm = bpy.context.window_manager
km = wm.keyconfigs.addon.keymaps.new(name = '3D View Generic', space_type = 'VIEW_3D')
kmi = km.keymap_items.new("automirror.automirror", 'X', 'PRESS', alt=True, shift=True, ctrl=True)
addon_keymaps.append((km, kmi))
kmi = km.keymap_items.new("automirror.mirror_mirror", 'X', 'PRESS',alt=True, shift = True)
kmi.properties.axis_x = True
kmi.properties.axis_y = False
kmi.properties.axis_z = False
addon_keymaps.append((km, kmi))
kmi = km.keymap_items.new("automirror.mirror_mirror", 'Y', 'PRESS', alt=True, shift = True)
kmi.properties.axis_x = False
kmi.properties.axis_y = True
kmi.properties.axis_z = False
addon_keymaps.append((km, kmi))
kmi = km.keymap_items.new("automirror.mirror_mirror", 'Z', 'PRESS', alt=True, shift = True)
kmi.properties.axis_x = False
kmi.properties.axis_y = False
kmi.properties.axis_z = True
addon_keymaps.append((km, kmi))
kmi = km.keymap_items.new("automirror.toggle_mirror", 'F', 'PRESS', alt=True, shift = True)
addon_keymaps.append((km, kmi))
def remove_keymap_automirror():
for l in addon_keymaps:
for km, kmi in l:
km.keymap_items.remove(kmi)
l.clear()
del l[:]
def register():
for cls in classes:
bpy.utils.register_class(cls)
update_panel(None, bpy.context)
update_keymap(None, bpy.context)
bpy.types.Scene.automirror = PointerProperty(type=AUTOMIRROR_Props)
def unregister():
for cls in reversed(classes):
bpy.utils.unregister_class(cls)
for km, kmi in addon_keymaps:
km.keymap_items.remove(kmi)
addon_keymaps.clear()
if __name__ == "__main__":
register()
| StarcoderdataPython |
92449 |
class UIColors:
"color indices for UI (c64 original) palette"
white = 2
lightgrey = 16
medgrey = 13
darkgrey = 12
black = 1
yellow = 8
red = 3
brightred = 11
| StarcoderdataPython |
3440108 | <filename>software/pynguin/pynguin/setup/testcluster.py
# This file is part of Pynguin.
#
# Pynguin is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pynguin is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pynguin. If not, see <https://www.gnu.org/licenses/>.
"""Provides a test cluster."""
from __future__ import annotations
from typing import Any, Dict, List, Optional, Set, Type, cast
from typing_inspect import get_args, is_union_type
from pynguin.utils import randomness, type_utils
from pynguin.utils.exceptions import ConstructionFailedException
from pynguin.utils.generic.genericaccessibleobject import GenericAccessibleObject
from pynguin.utils.type_utils import PRIMITIVES
class TestCluster:
"""A test cluster which contains all methods/constructors/functions
and all required transitive dependencies.
"""
def __init__(self):
"""Create new test cluster."""
self._generators: Dict[Type, Set[GenericAccessibleObject]] = cast(
Dict[Type, Set[GenericAccessibleObject]], dict()
)
self._modifiers: Dict[Type, Set[GenericAccessibleObject]] = cast(
Dict[Type, Set[GenericAccessibleObject]], dict()
)
self._accessible_objects_under_test: Set[GenericAccessibleObject] = set()
def add_generator(self, generator: GenericAccessibleObject) -> None:
"""Add the given accessible as a generator, if the type is known, not primitive
and not NoneType."""
type_ = generator.generated_type()
if (
type_ is None
or type_utils.is_none_type(type_)
or type_utils.is_primitive_type(type_)
):
return
if type_ in self._generators:
self._generators[type_].add(generator)
else:
self._generators[type_] = {generator}
def add_accessible_object_under_test(self, obj: GenericAccessibleObject):
"""Add accessible object to the objects under test."""
self._accessible_objects_under_test.add(obj)
def add_modifier(self, type_: Type, obj: GenericAccessibleObject):
"""Add a modifier, e.g. something that can be used to modify the given type.
e.g. a method."""
if type_ in self._modifiers:
self._modifiers[type_].add(obj)
else:
self._modifiers[type_] = {obj}
@property
def accessible_objects_under_test(self) -> Set[GenericAccessibleObject]:
"""Provides all accessible objects that are under test."""
return self._accessible_objects_under_test
def num_accessible_objects_under_test(self) -> int:
"""Provide the number of accessible objects under test.
This is useful to check if there even is something to test."""
return len(self._accessible_objects_under_test)
def get_generators_for(self, for_type: Type) -> Set[GenericAccessibleObject]:
"""
Retrieve all known generators for the given type which are
known within the test cluster.
"""
if for_type in self._generators:
return self._generators[for_type]
return set()
def get_modifiers_for(self, for_type: Type) -> Set[GenericAccessibleObject]:
"""Get all known modifiers of a type. This currently does not take
inheritance into account."""
if for_type in self._modifiers:
return self._modifiers[for_type]
return set()
@property
def generators(self) -> Dict[Type, Set[GenericAccessibleObject]]:
"""Provides all available generators."""
return self._generators
@property
def modifiers(self) -> Dict[Type, Set[GenericAccessibleObject]]:
"""Provides all available modifiers."""
return self._modifiers
def get_random_accessible(self) -> Optional[GenericAccessibleObject]:
"""Provide a random accessible of the unit under test."""
if self.num_accessible_objects_under_test() == 0:
return None
return randomness.choice(list(self._accessible_objects_under_test))
def get_random_call_for(self, type_: Type) -> GenericAccessibleObject:
"""Get a random modifier for the given type."""
accessible_objects = self.get_modifiers_for(type_)
if len(accessible_objects) == 0:
raise ConstructionFailedException("No modifiers for " + str(type_))
return randomness.choice(list(accessible_objects))
def get_all_generatable_types(self) -> List[Type]:
"""Provides all types that can be generated, including primitives."""
generatable = list(self._generators.keys())
generatable.extend(PRIMITIVES)
return generatable
def select_concrete_type(self, select_from: Optional[Type]) -> Optional[Type]:
"""Select a concrete type from the given type.
This is required e.g. when handling union types.
Currently only unary types, Any and Union are handled."""
if select_from == Any:
return randomness.choice(self.get_all_generatable_types())
if is_union_type(select_from):
possible_types = get_args(select_from)
if possible_types is not None and len(possible_types) > 0:
return randomness.choice(possible_types)
return None
return select_from
| StarcoderdataPython |
6459048 | from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
setup_args = generate_distutils_setup(
packages=['coord_transforms', 'xpc', 'traj_x', 'xplane_ros_utils'],
package_dir={'':'src'}
)
setup(**setup_args) | StarcoderdataPython |
68910 | <reponame>alibaba/FederatedScope
import logging
from collections import deque
from federatedscope.core.worker import Server, Client
from federatedscope.core.gpu_manager import GPUManager
from federatedscope.core.auxiliaries.model_builder import get_model
logger = logging.getLogger(__name__)
class FedRunner(object):
"""
This class is used to construct an FL course, which includes `_set_up` and `run`.
Arguments:
data: The data used in the FL courses, which are formatted as {'ID':data} for standalone mode. More details can be found in federatedscope.core.auxiliaries.data_builder .
server_class: The server class is used for instantiating a (customized) server.
client_class: The client class is used for instantiating a (customized) client.
config: The configurations of the FL course.
client_config: The clients' configurations.
"""
def __init__(self,
data,
server_class=Server,
client_class=Client,
config=None,
client_config=None):
self.data = data
self.server_class = server_class
self.client_class = client_class
self.cfg = config
self.client_cfg = client_config
self.mode = self.cfg.federate.mode.lower()
self.gpu_manager = GPUManager(gpu_available=self.cfg.use_gpu,
specified_device=self.cfg.device)
if self.mode == 'standalone':
self.shared_comm_queue = deque()
self._setup_for_standalone()
# in standalone mode, by default, we print the trainer info only once for better logs readability
trainer_representative = self.client[1].trainer
if trainer_representative is not None:
trainer_representative.print_trainer_meta_info()
elif self.mode == 'distributed':
self._setup_for_distributed()
def _setup_for_standalone(self):
"""
To set up server and client for standalone mode.
"""
self.server = self._setup_server()
self.client = dict()
assert self.cfg.federate.client_num != 0, \
"In standalone mode, self.cfg.federate.client_num should be non-zero. " \
"This is usually cased by using synthetic data and users not specify a non-zero value for client_num"
# assume the client-wise data are consistent in their input&output shape
self._shared_client_model = get_model(
self.cfg.model, self.data[1], backend=self.cfg.backend
) if self.cfg.federate.share_local_model else None
if self.cfg.federate.method == "global":
assert 0 in self.data and self.data[
0] is not None, "In global training mode, we will use a proxy client to hold all the data. Please put the whole dataset in data[0], i.e., the same style with global evaluation mode"
from federatedscope.core.auxiliaries.data_builder import merge_data
self.data[1] = merge_data(all_data=self.data)
for client_id in range(1, self.cfg.federate.client_num + 1):
self.client[client_id] = self._setup_client(
client_id=client_id, client_model=self._shared_client_model)
def _setup_for_distributed(self):
"""
To set up server or client for distributed mode.
"""
self.server_address = {
'host': self.cfg.distribute.server_host,
'port': self.cfg.distribute.server_port
}
if self.cfg.distribute.role == 'server':
self.server = self._setup_server()
elif self.cfg.distribute.role == 'client':
# When we set up the client in the distributed mode, we assume the server has been set up and number with #0
self.client_address = {
'host': self.cfg.distribute.client_host,
'port': self.cfg.distribute.client_port
}
self.client = self._setup_client()
def run(self):
"""
To run an FL course, which is called after server/client has been set up.
For the standalone mode, a shared message queue will be set up to simulate ``receiving message``.
"""
if self.mode == 'standalone':
# trigger the FL course
for each_client in self.client:
self.client[each_client].join_in()
if self.cfg.federate.online_aggr:
# any broadcast operation would be executed client-by-client to avoid the existence of #clients messages at the same time.
# currently, only consider centralized topology
def is_broadcast(msg):
return len(msg.receiver) >= 1 and msg.sender == 0
cached_bc_msgs = []
cur_idx = 0
while True:
if len(self.shared_comm_queue) > 0:
msg = self.shared_comm_queue.popleft()
if is_broadcast(msg):
cached_bc_msgs.append(msg)
# assume there is at least one client
msg = cached_bc_msgs[0]
self._handle_msg(msg, rcv=msg.receiver[cur_idx])
cur_idx += 1
if cur_idx >= len(msg.receiver):
del cached_bc_msgs[0]
cur_idx = 0
else:
self._handle_msg(msg)
elif len(cached_bc_msgs) > 0:
msg = cached_bc_msgs[0]
self._handle_msg(msg, rcv=msg.receiver[cur_idx])
cur_idx += 1
if cur_idx >= len(msg.receiver):
del cached_bc_msgs[0]
cur_idx = 0
else:
# finished
break
else:
while len(self.shared_comm_queue) > 0:
msg = self.shared_comm_queue.popleft()
self._handle_msg(msg)
self.server._monitor.finish_fed_runner(fl_mode=self.mode)
return self.server.best_results
elif self.mode == 'distributed':
if self.cfg.distribute.role == 'server':
self.server.run()
return self.server.best_results
elif self.cfg.distribute.role == 'client':
self.client.join_in()
self.client.run()
def _setup_server(self):
"""
Set up the server
"""
self.server_id = 0
if self.mode == 'standalone':
if self.server_id in self.data:
server_data = self.data[self.server_id]
model = get_model(self.cfg.model,
server_data,
backend=self.cfg.backend)
else:
server_data = None
model = get_model(
self.cfg.model, self.data[1], backend=self.cfg.backend
) # get the model according to client's data if the server does not own data
kw = {'shared_comm_queue': self.shared_comm_queue}
elif self.mode == 'distributed':
server_data = self.data
model = get_model(self.cfg.model,
server_data,
backend=self.cfg.backend)
kw = self.server_address
else:
raise ValueError('Mode {} is not provided'.format(
self.cfg.mode.type))
if self.server_class:
server = self.server_class(
ID=self.server_id,
config=self.cfg,
data=server_data,
model=model,
client_num=self.cfg.federate.client_num,
total_round_num=self.cfg.federate.total_round_num,
device=self.gpu_manager.auto_choice(),
**kw)
if self.cfg.nbafl.use:
from federatedscope.core.trainers.trainer_nbafl import wrap_nbafl_server
wrap_nbafl_server(server)
else:
raise ValueError
logger.info('Server #{:d} has been set up ... '.format(self.server_id))
return server
def _setup_client(self, client_id=-1, client_model=None):
"""
Set up the client
"""
self.server_id = 0
if self.mode == 'standalone':
client_data = self.data[client_id]
kw = {'shared_comm_queue': self.shared_comm_queue}
elif self.mode == 'distributed':
client_data = self.data
kw = self.client_address
kw['server_host'] = self.server_address['host']
kw['server_port'] = self.server_address['port']
else:
raise ValueError('Mode {} is not provided'.format(
self.cfg.mode.type))
if self.client_class:
client_specific_config = self.cfg.clone()
if self.client_cfg:
client_specific_config.defrost()
client_specific_config.merge_from_other_cfg(
self.client_cfg.get('client_{}'.format(client_id)))
client_specific_config.freeze()
client = self.client_class(
ID=client_id,
server_id=self.server_id,
config=client_specific_config,
data=client_data,
model=client_model or get_model(
client_specific_config.model, client_data, backend=self.cfg.backend),
device=self.gpu_manager.auto_choice(),
**kw)
else:
raise ValueError
if client_id == -1:
logger.info('Client (address {}:{}) has been set up ... '.format(
self.client_address['host'], self.client_address['port']))
else:
logger.info(f'Client {client_id} has been set up ... ')
return client
def _handle_msg(self, msg, rcv=-1):
"""
To simulate the message handling process (used only for the standalone mode)
"""
if rcv != -1:
# simulate broadcast one-by-one
self.client[rcv].msg_handlers[msg.msg_type](msg)
return
sender, receiver = msg.sender, msg.receiver
download_bytes, upload_bytes = msg.count_bytes()
if not isinstance(receiver, list):
receiver = [receiver]
for each_receiver in receiver:
if each_receiver == 0:
self.server.msg_handlers[msg.msg_type](msg)
self.server._monitor.track_download_bytes(download_bytes)
else:
self.client[each_receiver].msg_handlers[msg.msg_type](msg)
self.client[each_receiver]._monitor.track_download_bytes(
download_bytes)
| StarcoderdataPython |
9737548 | <reponame>MartinKondor/leap-of-time
import pygame
from pygame.locals import *
import sys, os, traceback
if sys.platform in ["win32","win64"]: os.environ["SDL_VIDEO_CENTERED"]="1"
import PAdLib.occluder as occluder
import PAdLib.particles as particles
import PAdLib.shadow as shadow
pygame.display.init()
pygame.font.init()
screen_size = [512,512]
icon = pygame.Surface((1,1)); icon.set_alpha(0); pygame.display.set_icon(icon)
pygame.display.set_caption("Shadows/Particles Demo with PAdLib - <NAME> - 2013")
surface = pygame.display.set_mode(screen_size)
surf_lighting = pygame.Surface(screen_size)
shad = shadow.Shadow()
shad.set_radius(100.0)
shad.set_light_position([screen_size[0]/2,screen_size[1]/2])
surf_falloff = pygame.image.load("light_falloff100.png").convert()
emitter = particles.Emitter()
emitter.set_density(200)
emitter.set_angle(90.0,360.0)
emitter.set_speed([50.0,50.0])
emitter.set_life([1.0,1.0])
emitter.set_colors([(255,0,0),(255,255,0),(255,128,0),(0,0,0)])
particle_system = particles.ParticleSystem()
particle_system.add_emitter(emitter)
def get_input():
mouse_position = pygame.mouse.get_pos()
for event in pygame.event.get():
if event.type == QUIT: return False
elif event.type == KEYDOWN:
if event.key == K_ESCAPE: return False
emitter.set_position(mouse_position)
return True
def update(dt):
#Update the particle system.
particle_system.update(dt)
def draw():
#Lighting pass
light_occluders = []
for particle in particle_system.particles:
x,y = particle.position
light_occluders.append(
occluder.Occluder([[x,y],[x,y+1],[x+1,y+1],[x+1,y]])
)
shad.set_occluders(light_occluders)
mask,draw_pos = shad.get_mask_and_position(False)
mask.blit(surf_falloff,(0,0),special_flags=BLEND_MULT)
surf_lighting.fill((50,50,50))
surf_lighting.blit(mask,draw_pos,special_flags=BLEND_MAX)
#Scene pass
surface.fill((128,128,128))
particle_system.draw(surface)
#Composite pass
surface.blit(surf_lighting,(0,0),special_flags=BLEND_MULT)
#Redraw the particle system. In this case we don't want it shadowed
particle_system.draw(surface)
#Flip
pygame.display.flip()
def main():
target_fps = 60
clock = pygame.time.Clock()
while True:
if not get_input(): break
update(1.0/float(target_fps))
draw()
clock.tick(target_fps)
pygame.quit()
if __name__ == "__main__":
try:
main()
except:
traceback.print_exc()
pygame.quit()
input()
| StarcoderdataPython |
4947171 | import sublime
import sys
from imp import reload
# get all modules of OpenSees present
reload_mods = []
for mod in sys.modules:
if mod.startswith("OpenSees") and sys.modules[mod] != None:
reload_mods.append(mod)
# define order of dependencies for proper reloading
mods_load_order = [
"lib",
"lib.helpers",
"commands",
"commands.run_sequential",
"commands.run_single_parallel",
"commands.run_multiple_parallel"
]
# reload modules in order if they were present
for suffix in mods_load_order:
mod = "OpenSees." + suffix
if mod in reload_mods:
reload(sys.modules[mod]) | StarcoderdataPython |
3444803 | """
Copyright 2019 Trustees of the University of Pennsylvania
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from typing import Any
class BasicSchema:
count = 0
fields = []
types = []
def __init__(self, *args):
if len(args) == 1:
self.init1(args[0])
elif len(args) == 2:
self.init2(args[0], args[1])
elif len(args) == 3:
self.init3(args[0], args[1], args[2])
else:
raise TypeError('Illegal inputs')
def init1(self, name):
# type: (str) -> None
self.name = name
self.fields = []
self.types = []
def init2(self, name, fields_types):
# type: (str, dict) -> None
""" Initialize with a name and a dictionary of attributes to types """
self.name = name
self.fields = []
self.types = []
for k in fields_types.keys():
self.fields.append(k)
self.types.append(fields_types[k])
def init3(self, name, fields, types):
# type: (str, list, list) -> None
""" Initialize with a name and a list of field-type pairs """
self.name = name
self.fields = fields
self.types = types
def add_field(self, k, v):
# type: (str, Any) -> None
self.fields.append(k)
self.types.append(v)
def get_field(self, k):
# type: (str) -> Any
i = self.fields.index(k)
if i:
return self.types[i]
else:
return None
def get_name(self):
return self.name
def create_tuple(self, *args):
if len(args) == 0:
return self.create_tuple_blank()
elif isinstance(args[0], list):
return self.create_tuple_list(args[0])
else:
return self.create_tuple_dict(args[0])
def create_tuple_blank(self):
return BasicTuple(self)
def create_tuple_dict(self, content):
# type: (dict) -> BasicTuple
return BasicTuple(self, content)
def create_tuple_list(self, content):
# type (list) -> BasicTuple
dict2 = {}
for i, k in enumerate(self.fields):
dict2[k] = content[i]
return BasicTuple(self, dict2)
def __str__(self):
ret = self.name + '('
for i, f in enumerate(self.fields):
if i > 0:
ret = ret + ','
ret = ret + f + ':' + self.types[i]
return ret + ")"
class BasicTuple:
schema = None
data = {}
def __init__(self, *args):
if len(args) == 1:
self.init1(args[0])
elif len(args) == 2:
self.init2(args[0], args[1])
else:
raise TypeError('Illegal inputs')
def init1(self, schema):
# type: (BasicSchema) -> None
self.schema = schema
for i, name in enumerate(schema.fields):
self.data[name] = None
def init2(self, schema, values):
# type: (BasicSchema, dict) -> None
self.schema = schema
self.data = values
def __str__(self):
ret = self.schema.get_name() + '('
for i, f in enumerate(self.schema.fields):
if i > 0:
ret = ret + ','
if f is None:
raise TypeError("Can't have a null key")
elif self.data is None or self.data[f] is None:
ret = ret + f
else:
ret = ret + f + ':' + str(self.data[f])
return ret + ")"
def __getitem__(self, item):
if isinstance(item, int):
return self.data[self.schema.fields[item]]
else:
return self.data[item]
def __setitem__(self, key, value):
if key in self.schema.fields:
self.data[key] = value
elif isinstance(key, int):
self.data[self.schema.fields[key]] = value
| StarcoderdataPython |
6545469 | """
Created on Mon October 02 11:02:10 2017+5:30
@author: <NAME>
Code uses Python 2.7, packaged with Anaconda 4.4.0
Code developed on Elementary OS with Ubuntu 16.04 variant, with a Linux 4.10.0-33-generic as the Kernel.
Project 4:
-- Steps:
1.Create a csv file with a list of all presidents, their parties from 1920 onwards
2.Using Pandas load the .csv file into a Pandas dataframe.
3.Download data from an appropriate financial website such as Google Finance, Yahoo Finance, Quandl, CityFALCON, or another similar source
4.Calculate yearly returns for both the downloaded indices from 1920 onwards
5.Calculate measures of central tendency (mean return, median return, variance of returns) for each of the two groups
6.Represent the findings through suitable comparative graphical studies
"""
# Some Metadata about the script
__author__ = '<NAME> (<EMAIL>)'
__license__ = 'MIT'
__vcs_id__ = '$Id$'
__version__ = '1.0.0' # Versioning: http://www.python.org/dev/peps/pep-0386/
import logging # Logging class for logging in the case of an error, makes debugging easier
import sys # For exiting gracefully
import quandl # For fetching the Market Data
import pandas as pd # For fetching the data in a DataFrame
import matplotlib.pyplot as plt
def plot_group_bar_chart(plotting_dataframe):
"""
This function is used for plotting a grouped bar chart
:param plotting_dataframe: The DataFrame to plot
:return: None
"""
# Setting the positions and width for the bars
pos = list(range(len(plotting_dataframe['Mean Annual Return'])))
width = 0.25
# Plotting the bars
fig, ax = plt.subplots(figsize=(10, 5))
# Create a bar with Mean Annual Return data,
# in position pos,
plt.bar(pos, plotting_dataframe['Mean Annual Return'], width, alpha=0.5,
color='#EE3224', label=plotting_dataframe['Party'][0])
# Create a bar with Annual Median Return data,
# in position pos + some width buffer,
plt.bar([p + width for p in pos], plotting_dataframe['Median Annual Return'], width, alpha=0.5,
color='#F78F1E', label=plotting_dataframe['Party'][1])
# Create a bar with Annual Variance data,
# in position pos + some width buffer,
plt.bar([p + width * 2 for p in pos], plotting_dataframe['Annual Variance'], width, alpha=0.5,
color='#FFC222', label=plotting_dataframe['Party'][2])
# Set the y axis label
ax.set_ylabel('Values')
# Set the chart's title
ax.set_title('Annual Equity Index Performance since 1920 - Democrats vs Republicans')
# Set the position of the x ticks
ax.set_xticks([p + 1.5 * width for p in pos])
# Set the labels for the x ticks
ax.set_xticklabels(plotting_dataframe['Party'])
# Setting the x-axis and y-axis limits
plt.xlim(min(pos) - width, max(pos) + width * 4)
plt.ylim([0, max(plotting_dataframe['Mean Annual Return'] + plotting_dataframe['Median Annual Return']
+ plotting_dataframe['Annual Variance'])])
# Adding the legend and showing the plot
plt.legend(['Mean Annual Return', 'Median Annual Return', 'Annual Variance'], loc='upper left')
plt.grid()
plt.show()
def main():
"""
This function is called from the main block. The purpose of this function is to contain all the calls to
business logic functions
:return: int - Return 0 or 1, which is used as the exist code, depending on successful or erroneous flow
"""
# Wrap in a try block so that we catch any exceptions thrown by other functions and return a 1 for graceful exit
try:
# ===== Step 1: Load the CSV as a pandas DataFrame =====
presidents_dataframe = pd.DataFrame.from_csv('presidents.csv')
# Convert the Start and End columns to DateTime objects
presidents_dataframe['Start'] = pd.to_datetime(presidents_dataframe['Start'])
presidents_dataframe['End'] = pd.to_datetime(presidents_dataframe['End'])
# ===== Step 2: Download the two Indices =====
# By using the collapse parameter, we directly get yearly data instead fetching daily/monthly
# data and performing calculations on that dataset
djia_dataframe = quandl.get("BCB/UDJIAD1", collapse='annual')
snp_dataframe = quandl.get("MULTPL/SP500_REAL_PRICE_MONTH", collapse='annual')
# Filter so that they are 1920 onwards
djia_dataframe = djia_dataframe.loc['1920':]
snp_dataframe= snp_dataframe.loc['1920':'2016'] # Because the DJIA datasource does not have 2017's data
# ===== Step 2: Calculate the yearly returns =====
# Since our data is already in yearly format, we need to call pct_change() with 1 as the
# parameter. Assuming our data would have been daily or monthly, then we would have called
# pct_change(252) for yearly returns from daily, and pct_change(21) for yearly returns from monthly data
djia_yearly_returns = djia_dataframe['Value'].pct_change(1)
snp_yearly_returns = snp_dataframe['Value'].pct_change(1)
# Join the DataFrames
djia_dataframe = djia_dataframe.join(djia_yearly_returns, rsuffix='_Yearly_Returns')
snp_dataframe = snp_dataframe.join(snp_yearly_returns, rsuffix='_Yearly_Returns')
# Drop the first year since it contains a NaN value
djia_dataframe.dropna(inplace=True)
snp_dataframe.dropna(inplace=True)
# ===== Step 4: Segregate returns in terms of Presidency =====
presidency_list = []
for index, date_value in enumerate(snp_dataframe.index):
for p_index in range(0, len(presidents_dataframe)):
if presidents_dataframe.iloc[p_index]['End'] > date_value > presidents_dataframe.iloc[p_index]['Start']:
presidency_list.append(presidents_dataframe.iloc[p_index]['Party'])
break
# Map Yearly Returns to Party
djia_dataframe = djia_dataframe.join(pd.DataFrame(presidency_list, columns=['Party']).set_index(
djia_dataframe.index))
snp_dataframe = snp_dataframe.join(pd.DataFrame(presidency_list, columns=['Party']).set_index(
snp_dataframe.index))
# Combine SnP and DJIA frame
combined_dataframe = pd.DataFrame({'DJIA': djia_dataframe['Value_Yearly_Returns'],
'SNP': snp_dataframe['Value_Yearly_Returns'],
'Party': snp_dataframe['Party']}).reset_index(drop=True)
democrats_dataframe = combined_dataframe[combined_dataframe['Party'] == 'Democratic']
republicans_dataframe = combined_dataframe[combined_dataframe['Party'] == 'Republican']
# ===== Step 4: Calculate Central Tendency for each of the groups =====
democrats_central_tendency = democrats_dataframe.describe()
republicans_central_tendency = republicans_dataframe.describe()
# Create Plotting DataFrame
plotting_dataframe = pd.DataFrame([['Democrats-DJIA', democrats_central_tendency.loc['mean']['DJIA'],
democrats_central_tendency.loc['50%']['DJIA'],
democrats_central_tendency.loc['std']['DJIA'] ** 2],
['Democrats-SnP', democrats_central_tendency.loc['mean']['SNP'],
democrats_central_tendency.loc['50%']['SNP'],
democrats_central_tendency.loc['std']['SNP'] ** 2],
['Republicans-DJIA', republicans_central_tendency.loc['mean']['DJIA'],
republicans_central_tendency.loc['50%']['DJIA'],
republicans_central_tendency.loc['std']['DJIA'] ** 2],
['Republicans-SNP', republicans_central_tendency.loc['mean']['SNP'],
republicans_central_tendency.loc['50%']['SNP'],
republicans_central_tendency.loc['std']['SNP'] ** 2]],
columns=
['Party', 'Mean Annual Return', 'Median Annual Return', 'Annual Variance'])
# Plot the data
plot_group_bar_chart(plotting_dataframe)
except BaseException, e:
# Casting a wide net to catch all exceptions
print('\n%s' % str(e))
return 1
# Main block of the program. The program begins execution from this block when called from a cmd
if __name__ == '__main__':
# Initialize Logger
logging.basicConfig(format='%(asctime)s %(message)s: ')
logging.info('Application Started')
exit_code = main()
logging.info('Application Ended')
sys.exit(exit_code)
| StarcoderdataPython |
9663822 |
from ..utils import (
update_url_query,
int_or_none
)
from ..utilsEX import url_result
from ..extractor.pluralsight import PluralsightCourseIE as Old
class PluralsightCourseIE(Old):
def _real_extract(self, url):
course_id = self._match_id(url)
# TODO: PSM cookie
course = self._download_course(course_id, url, course_id)
title = course['title']
course_name = course['name']
course_data = course['modules']
description = course.get('description') or course.get('shortDescription')
entries = []
for num, module in enumerate(course_data, 1):
author = module.get('author')
module_name = module.get('name')
if not author or not module_name:
continue
for clip in module.get('clips', []):
clip_index = int_or_none(clip.get('index'))
if clip_index is None:
continue
clip_url = update_url_query(
'%s/player' % self._API_BASE, query={
'mode': 'live',
'course': course_name,
'author': author,
'name': module_name,
'clip': clip_index,
})
entries.append({
'_type': 'url_transparent',
'url': clip_url,
'duration': clip['duration'],
'title': module.get('title'),
})
return self.playlist_result(entries, course_id, title, description) | StarcoderdataPython |
336230 | """
Test that you can set breakpoint and hit the C++ language exception breakpoint
"""
from __future__ import print_function
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestCPPExceptionBreakpoint (TestBase):
mydir = TestBase.compute_mydir(__file__)
my_var = 10
@add_test_categories(['pyapi'])
@expectedFailureAll(oslist=["windows"], bugnumber="llvm.org/pr24538")
@expectedFailureNetBSD
def test_cpp_exception_breakpoint(self):
"""Test setting and hitting the C++ exception breakpoint."""
self.build()
self.do_cpp_exception_bkpt()
@add_test_categories(['pyapi'])
@expectedFailureAll(oslist=["windows"], bugnumber="llvm.org/pr24538")
@expectedFailureNetBSD
def test_dummy_target_cpp_exception_breakpoint(self):
"""Test setting and hitting the C++ exception breakpoint from dummy target."""
self.build()
self.do_dummy_target_cpp_exception_bkpt()
def setUp(self):
TestBase.setUp(self)
self.main_source = "main.c"
self.main_source_spec = lldb.SBFileSpec(self.main_source)
def do_cpp_exception_bkpt(self):
exe = self.getBuildArtifact("a.out")
error = lldb.SBError()
self.target = self.dbg.CreateTarget(exe)
self.assertTrue(self.target, VALID_TARGET)
exception_bkpt = self.target.BreakpointCreateForException(
lldb.eLanguageTypeC_plus_plus, False, True)
self.assertTrue(
exception_bkpt.IsValid(),
"Created exception breakpoint.")
process = self.target.LaunchSimple(
None, None, self.get_process_working_directory())
self.assertTrue(process, PROCESS_IS_VALID)
thread_list = lldbutil.get_threads_stopped_at_breakpoint(
process, exception_bkpt)
self.assertTrue(len(thread_list) == 1,
"One thread stopped at the exception breakpoint.")
def do_dummy_target_cpp_exception_bkpt(self):
exe = self.getBuildArtifact("a.out")
error = lldb.SBError()
dummy_exception_bkpt = self.dbg.GetDummyTarget().BreakpointCreateForException(
lldb.eLanguageTypeC_plus_plus, False, True)
self.assertTrue(
dummy_exception_bkpt.IsValid(),
"Created exception breakpoint in dummy target.")
self.target = self.dbg.CreateTarget(exe)
self.assertTrue(self.target, VALID_TARGET)
exception_bkpt = self.target.GetBreakpointAtIndex(0)
self.assertTrue(
exception_bkpt.IsValid(),
"Target primed with exception breakpoint from dummy target.")
process = self.target.LaunchSimple(
None, None, self.get_process_working_directory())
self.assertTrue(process, PROCESS_IS_VALID)
thread_list = lldbutil.get_threads_stopped_at_breakpoint(
process, exception_bkpt)
self.assertTrue(len(thread_list) == 1,
"One thread stopped at the exception breakpoint.")
| StarcoderdataPython |
11280957 | <gh_stars>0
""" (Sample) workload definition file.
"""
WORKLOAD = []
# replace the paths with your input file paths on stampede
for tj in range(1, 51):
task = {
"runtime" : 60, # minutes per task
"nmode" : "/home1/00988/tg802352/MMPBSASampleDATA/nmode.5h.py",
"com" : "/home1/00988/tg802352/MMPBSASampleDATA/com.top.2",
"rec" : "/home1/00988/tg802352/MMPBSASampleDATA/rec.top.2",
"lig" : "/home1/00988/tg802352/MMPBSASampleDATA/lig.top",
"traj" : "/home1/00988/tg802352/MMPBSASampleDATA/trajectories/rep%s.traj" % tj
}
WORKLOAD.append(task)
| StarcoderdataPython |
4989485 | <reponame>dhermes/hvplot
from distutils.version import LooseVersion
from . import patch, _hv
try:
import intake.plotting # noqa
patch('intake', extension='bokeh')
except:
import intake
if LooseVersion(intake.__version__) <= '0.1.5':
patch('intake', extension='bokeh')
patch('intake', 'plot')
else:
if not _hv.extension._loaded:
_hv.extension('bokeh', logo=False)
| StarcoderdataPython |
1680223 | import pytest
from plenum.common.constants import DOMAIN_LEDGER_ID
from plenum.common.txn_util import reqToTxn
from plenum.test.helper import sdk_signed_random_requests
NUM_BATCHES = 3
TXNS_IN_BATCH = 5
def create_txns(looper, sdk_wallet_client):
reqs = sdk_signed_random_requests(looper, sdk_wallet_client, TXNS_IN_BATCH)
return [reqToTxn(req) for req in reqs]
@pytest.fixture(scope='module')
def ledger(txnPoolNodeSet):
return txnPoolNodeSet[0].ledgers[DOMAIN_LEDGER_ID]
@pytest.fixture(scope='module')
def ledger_with_batches_appended(ledger,
looper, sdk_wallet_client):
for i in range(NUM_BATCHES):
txns = create_txns(looper, sdk_wallet_client)
ledger.append_txns_metadata(txns)
ledger.appendTxns(txns)
return ledger
| StarcoderdataPython |
6525477 | import types
class MyClass:
def __init__(self):
l = {}
s = '''
def func(self, x):
print(self, ".", x)
'''
exec(s, globals(), l)
self.func = types.MethodType(l['func'], self)
MyClass().func('test')
| StarcoderdataPython |
3385921 | <gh_stars>0
__pdoc__ = {}
__pdoc__['setup'] = False
__pdoc__['initialize_db_script'] = False
__pdoc__['test'] = False | StarcoderdataPython |
8113498 | # Copyright 2021 NREL
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# See https://floris.readthedocs.io for documentation
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import minimize
from .layout import LayoutOptimization
class PowerDensityOptimization(LayoutOptimization):
"""
PowerDensityOptimization is a subclass of the
:py:class:`~.tools.optimization.scipy.layout.LayoutOptimization` class
that performs power density optimization.
"""
def __init__(
self,
fi,
boundaries,
wd,
ws,
freq,
AEP_initial,
yawbnds=None,
x0=None,
bnds=None,
min_dist=None,
opt_method="SLSQP",
opt_options=None,
):
"""
Instantiate PowerDensityOptimization object with a FlorisInterface
object and assigns parameter values.
Args:
fi (:py:class:`floris.tools.floris_interface.FlorisInterface`):
Interface used to interact with the Floris object.
boundaries (iterable(float, float)): Pairs of x- and y-coordinates
that represent the boundary's vertices (m).
wd (np.array): An array of wind directions (deg).
ws (np.array): An array of wind speeds (m/s).
freq (np.array): An array of the frequencies of occurance
correponding to each pair of wind direction and wind speed
values.
AEP_initial (float): The initial Annual Energy
Production used for normalization in the optimization (Wh)
(TODO: Is Watt-hours the correct unit?).
yawbnds: TODO: This parameter isn't used. Remove it?
x0 (iterable, optional): The initial turbine locations,
ordered by x-coordinate and then y-coordiante
(ie. [x1, x2, ..., xn, y1, y2, ..., yn]) (m). If none are
provided, x0 initializes to the current turbine locations.
Defaults to None.
bnds (iterable, optional): Bounds for the optimization
variables (pairs of min/max values for each variable (m)). If
none are specified, they are set to (0, 1) for each turbine.
Defaults to None. TODO: Explain significance of (0, 1).
min_dist (float, optional): The minimum distance to be
maintained between turbines during the optimization (m). If not
specified, initializes to 4 rotor diameters. Defaults to None.
opt_method (str, optional): The optimization method used by
scipy.optimize.minize. Defaults to 'SLSQP'.
opt_options (dict, optional): Optimization options used by
scipy.optimize.minize. If none are specified, they are set t
{'maxiter': 100, 'disp': True, 'iprint': 2, 'ftol': 1e-9}.
Defaults to None.
"""
super().__init__(
fi,
boundaries,
wd,
ws,
freq,
AEP_initial,
x0=x0,
bnds=bnds,
min_dist=min_dist,
opt_method=opt_method,
opt_options=opt_options,
)
self.epsilon = np.finfo(float).eps
self.counter = 0
if opt_options is None:
self.opt_options = {"maxiter": 100, "disp": True, "iprint": 2, "ftol": 1e-9}
def _generate_constraints(self):
# grad_constraint1 = grad(self._space_constraint)
# grad_constraint2 = grad(self._distance_from_boundaries)
tmp1 = {
"type": "ineq",
"fun": lambda x, *args: self._space_constraint(x, self.min_dist),
"args": (self.min_dist,),
}
tmp2 = {
"type": "ineq",
"fun": lambda x, *args: self._distance_from_boundaries(
x, self.boundaries_norm
),
"args": (self.boundaries_norm,),
}
tmp3 = {"type": "ineq", "fun": lambda x, *args: self._AEP_constraint(x)}
self.cons = [tmp1, tmp2, tmp3]
def _set_opt_bounds(self):
self.bnds = [
(0.0, 1.0) for _ in range(2 * self.nturbs + self.nturbs * len(self.wd))
]
def _change_coordinates(self, locsx, locsy):
# Parse the layout coordinates
layout_array = [locsx, locsy]
# Update the turbine map in floris
self.fi.reinitialize_flow_field(layout_array=layout_array)
def _powDens_opt(self, optVars):
locsx = optVars[0 : self.nturbs]
locsy = optVars[self.nturbs : 2 * self.nturbs]
locsx_unnorm = [
self._unnorm(valx, self.bndx_min, self.bndx_max) for valx in locsx
]
locsy_unnorm = [
self._unnorm(valy, self.bndy_min, self.bndy_max) for valy in locsy
]
turb_controls = [
optVars[
2 * self.nturbs + i * self.nturbs : 3 * self.nturbs + i * self.nturbs
]
for i in range(len(self.wd))
]
turb_controls_unnorm = [
self._unnorm(yaw, self.yaw_min, self.yaw_max) for yaw in turb_controls
]
self._change_coordinates(locsx_unnorm, locsy_unnorm)
opt_area = self.find_layout_area(locsx_unnorm + locsy_unnorm)
AEP_sum = 0.0
for i in range(len(self.wd)):
for j, turbine in enumerate(self.fi.floris.farm.turbine_map.turbines):
turbine.yaw_angle = turb_controls_unnorm[i][j]
AEP_sum = AEP_sum + self._AEP_single_wd(
self.wd[i], self.ws[i], self.freq[i]
)
# print('AEP ratio: ', AEP_sum/self.AEP_initial)
return -1 * AEP_sum / self.AEP_initial * self.initial_area / opt_area
def _AEP_constraint(self, optVars):
locsx = optVars[0 : self.nturbs]
locsy = optVars[self.nturbs : 2 * self.nturbs]
locsx_unnorm = [
self._unnorm(valx, self.bndx_min, self.bndx_max) for valx in locsx
]
locsy_unnorm = [
self._unnorm(valy, self.bndy_min, self.bndy_max) for valy in locsy
]
turb_controls = [
optVars[
2 * self.nturbs + i * self.nturbs : 3 * self.nturbs + i * self.nturbs
]
for i in range(len(self.wd))
]
turb_controls_unnorm = [
self._unnorm(yaw, self.yaw_min, self.yaw_max) for yaw in turb_controls
]
self._change_coordinates(locsx_unnorm, locsy_unnorm)
AEP_sum = 0.0
for i in range(len(self.wd)):
for j, turbine in enumerate(self.fi.floris.farm.turbine_map.turbines):
turbine.yaw_angle = turb_controls_unnorm[i][j]
AEP_sum = AEP_sum + self._AEP_single_wd(
self.wd[i], self.ws[i], self.freq[i]
)
return AEP_sum / self.AEP_initial - 1.0
def _optimize(self):
self.residual_plant = minimize(
self._powDens_opt,
self.x0,
method=self.opt_method,
bounds=self.bnds,
constraints=self.cons,
options=self.opt_options,
)
opt_results = self.residual_plant.x
return opt_results
def optimize(self):
"""
This method finds the optimized layout of wind turbines for power
production given the provided frequencies of occurance of wind
conditions (wind speed, direction).
TODO: update the doc
Returns:
iterable: A list of the optimized x, y locations of each
turbine (m).
"""
print("=====================================================")
print("Optimizing turbine layout...")
print("Number of parameters to optimize = ", len(self.x0))
print("=====================================================")
opt_locs_norm = self._optimize()
print("Optimization complete.")
opt_locs = [
[
self._unnorm(valx, self.bndx_min, self.bndx_max)
for valx in opt_locs_norm[0 : self.nturbs]
],
[
self._unnorm(valy, self.bndy_min, self.bndy_max)
for valy in opt_locs_norm[self.nturbs : 2 * self.nturbs]
],
]
return opt_locs
def reinitialize_opt(
self,
boundaries=None,
yawbnds=None,
wd=None,
ws=None,
freq=None,
AEP_initial=None,
x0=None,
bnds=None,
min_dist=None,
opt_method=None,
opt_options=None,
):
"""
This method reinitializes any optimization parameters that are
specified. Otherwise, the current parameter values are kept.
Args:
boundaries (iterable(float, float)): Pairs of x- and y-coordinates
that represent the boundary's vertices (m).
yawbnds (iterable): A list of the min. and max. yaw offset that is
allowed during the optimization (deg). If none are specified,
initialized to (0, 25.0). Defaults to None.
wd (np.array): An array of wind directions (deg). Defaults to None.
ws (np.array): An array of wind speeds (m/s). Defaults to None.
freq (np.array): An array of the frequencies of occurance
correponding to each pair of wind direction and wind speed
values. Defaults to None.
AEP_initial (float): The initial Annual Energy
Production used for normalization in the optimization (Wh)
(TODO: Is Watt-hours the correct unit?). If not specified,
initializes to the AEP of the current Floris object. Defaults
to None.
x0 (iterable, optional): The initial turbine locations,
ordered by x-coordinate and then y-coordiante
(ie. [x1, x2, ..., xn, y1, y2, ..., yn]) (m). If none are
provided, x0 initializes to the current turbine locations.
Defaults to None.
bnds (iterable, optional): Bounds for the optimization
variables (pairs of min/max values for each variable (m)). If
none are specified, they are set to (0, 1) for each turbine.
Defaults to None.
min_dist (float, optional): The minimum distance to be
maintained between turbines during the optimization (m). If not
specified, initializes to 4 rotor diameters. Defaults to None.
opt_method (str, optional): The optimization method used by
scipy.optimize.minize. Defaults to None.
opt_options (dict, optional): Optimization options used by
scipy.optimize.minize. Defaults to None.
"""
if boundaries is not None:
self.boundaries = boundaries
self.bndx_min = np.min([val[0] for val in boundaries])
self.bndy_min = np.min([val[1] for val in boundaries])
self.bndx_max = np.max([val[0] for val in boundaries])
self.bndy_max = np.max([val[1] for val in boundaries])
self.boundaries_norm = [
[
self._norm(val[0], self.bndx_min, self.bndx_max),
self._norm(val[1], self.bndy_min, self.bndy_max),
]
for val in self.boundaries
]
if yawbnds is not None:
self.yaw_min = yawbnds[0]
self.yaw_max = yawbnds[1]
else:
self.yaw_min = 0.0
self.yaw_max = 25.0
if wd is not None:
self.wd = wd
if ws is not None:
self.ws = ws
if freq is not None:
self.freq = freq
if AEP_initial is not None:
self.AEP_initial = AEP_initial
else:
self.AEP_initial = self.fi.get_farm_AEP(self.wd, self.ws, self.freq)
if x0 is not None:
self.x0 = x0
else:
self.x0 = (
[
self._norm(coord.x1, self.bndx_min, self.bndx_max)
for coord in self.fi.floris.farm.turbine_map.coords
]
+ [
self._norm(coord.x2, self.bndy_min, self.bndy_max)
for coord in self.fi.floris.farm.turbine_map.coords
]
+ [self._norm(5.0, self.yaw_min, self.yaw_max)]
* len(self.wd)
* self.nturbs
)
if bnds is not None:
self.bnds = bnds
else:
self._set_opt_bounds()
if min_dist is not None:
self.min_dist = min_dist
else:
self.min_dist = 4 * self.fi.floris.farm.turbines[0].rotor_diameter
if opt_method is not None:
self.opt_method = opt_method
if opt_options is not None:
self.opt_options = opt_options
self.layout_x_orig = [
coord.x1 for coord in self.fi.floris.farm.turbine_map.coords
]
self.layout_y_orig = [
coord.x2 for coord in self.fi.floris.farm.turbine_map.coords
]
self._generate_constraints()
self.initial_area = self.find_layout_area(
self.layout_x_orig + self.layout_y_orig
)
def find_layout_area(self, locs):
"""
This method returns the area occupied by the wind farm.
Args:
locs (iterable): A list of the turbine coordinates, organized as
[x1, x2, ..., xn, y1, y2, ..., yn] (m).
Returns:
float: The area occupied by the wind farm (m^2).
"""
locsx = locs[0 : self.nturbs]
locsy = locs[self.nturbs :]
points = zip(locsx, locsy)
points = np.array(list(points))
hull = self.convex_hull(points)
area = self.polygon_area(
np.array([val[0] for val in hull]), np.array([val[1] for val in hull])
)
return area
def convex_hull(self, points):
"""
Finds the vertices that describe the convex hull shape given the input
coordinates.
Args:
points (iterable((float, float))): Coordinates of interest.
Returns:
list: Vertices describing convex hull shape.
"""
# find two hull points, U, V, and split to left and right search
u = min(points, key=lambda p: p[0])
v = max(points, key=lambda p: p[0])
left, right = self.split(u, v, points), self.split(v, u, points)
# find convex hull on each side
return [v] + self.extend(u, v, left) + [u] + self.extend(v, u, right) + [v]
def polygon_area(self, x, y):
"""
Calculates the area of a polygon defined by its (x, y) vertices.
Args:
x (iterable(float)): X-coordinates of polygon vertices.
y (iterable(float)): Y-coordinates of polygon vertices.
Returns:
float: Area of polygon.
"""
# coordinate shift
x_ = x - x.mean()
y_ = y - y.mean()
correction = x_[-1] * y_[0] - y_[-1] * x_[0]
main_area = np.dot(x_[:-1], y_[1:]) - np.dot(y_[:-1], x_[1:])
return 0.5 * np.abs(main_area + correction)
def split(self, u, v, points):
# TODO: Provide description of this method.
# return points on left side of UV
return [p for p in points if np.cross(p - u, v - u) < 0]
def extend(self, u, v, points):
# TODO: Provide description of this method.
if not points:
return []
# find furthest point W, and split search to WV, UW
w = min(points, key=lambda p: np.cross(p - u, v - u))
p1, p2 = self.split(w, v, points), self.split(u, w, points)
return self.extend(w, v, p1) + [w] + self.extend(u, w, p2)
def plot_opt_results(self):
"""
This method plots the original and new locations of the turbines in a
wind farm after layout optimization.
"""
locsx_old = [
self._unnorm(valx, self.bndx_min, self.bndx_max)
for valx in self.x0[0 : self.nturbs]
]
locsy_old = [
self._unnorm(valy, self.bndy_min, self.bndy_max)
for valy in self.x0[self.nturbs : 2 * self.nturbs]
]
locsx = [
self._unnorm(valx, self.bndx_min, self.bndx_max)
for valx in self.residual_plant.x[0 : self.nturbs]
]
locsy = [
self._unnorm(valy, self.bndy_min, self.bndy_max)
for valy in self.residual_plant.x[self.nturbs : 2 * self.nturbs]
]
plt.figure(figsize=(9, 6))
fontsize = 16
plt.plot(locsx_old, locsy_old, "ob")
plt.plot(locsx, locsy, "or")
# plt.title('Layout Optimization Results', fontsize=fontsize)
plt.xlabel("x (m)", fontsize=fontsize)
plt.ylabel("y (m)", fontsize=fontsize)
plt.axis("equal")
plt.grid()
plt.tick_params(which="both", labelsize=fontsize)
plt.legend(
["Old locations", "New locations"],
loc="lower center",
bbox_to_anchor=(0.5, 1.01),
ncol=2,
fontsize=fontsize,
)
verts = self.boundaries
for i in range(len(verts)):
if i == len(verts) - 1:
plt.plot([verts[i][0], verts[0][0]], [verts[i][1], verts[0][1]], "b")
else:
plt.plot(
[verts[i][0], verts[i + 1][0]], [verts[i][1], verts[i + 1][1]], "b"
)
| StarcoderdataPython |
5034561 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import argparse
import websocket
import logging
import time
import requests
import threading
from tornado import gen
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
def handle_request(response):
logging.info("handle_request")
def run_socket(*args):
logging.info("run_socket")
global sess
sess = requests.Session()
resp = sess.post(sock_addr, json='{}')
print('{}'.format(resp))
time.sleep(3)
def main():
parser = argparse.ArgumentParser(description='Start the process monitor server.')
parser.add_argument('-port', metavar='port', type=int,
default=8184,
help='port to run the server on.')
parser.add_argument('-loglevel', metavar='loglevel',
default='INFO',
help='log level (default = INFO)')
options = parser.parse_args()
logging.getLogger().setLevel(options.loglevel)
global sock_addr
sock_addr = "http://localhost:{}/process_request".format(options.port)
logging.info(sock_addr)
socket_thread = threading.Thread(
target=run_socket,
name='Socket-Thread'
)
socket_thread.start()
time.sleep(2)
# sock_addr = "ws://localhost:{}/process_socket".format(options.port)
# ws = websocket.WebSocketApp(sock_addr)
# ws.run_forever(ping_interval=0)
if __name__ == "__main__":
main()
# sess.close()
time.sleep(10)
| StarcoderdataPython |
4917616 | <gh_stars>0
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""eval file"""
import argparse
from mindspore import load_checkpoint, load_param_into_net, context
from mindspore.ops import Squeeze, Argmax
from mindspore.common import dtype as mstype
from mindspore import numpy as mnp
from src.config import Config
from src.dataset import build_dataset, build_dataloader
from src.hypertext import HModel
parser = argparse.ArgumentParser(description='HyperText Text Classification')
parser.add_argument('--model', type=str, default='HyperText',
help='HyperText')
parser.add_argument('--modelPath', default='./output/hypertext_iflytek.ckpt', type=str, help='save model path')
parser.add_argument('--datasetdir', default='./data/iflytek_public', type=str,
help='dataset dir iflytek_public tnews_public')
parser.add_argument('--batch_size', default=32, type=int, help='batch_size')
parser.add_argument('--datasetType', default='iflytek', type=str, help='iflytek/tnews')
parser.add_argument('--device', default='GPU', type=str, help='device GPU Ascend')
args = parser.parse_args()
config = Config(args.datasetdir, None, args.device)
if args.datasetType == 'tnews':
config.useTnews()
else:
config.useIflyek()
if config.device == 'GPU':
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
elif config.device == 'Ascend':
context.set_context(mode=context.GRAPH_MODE, device_target='Ascend')
vocab, train_data, dev_data, test_data = build_dataset(config, use_word=True, min_freq=int(config.min_freq))
test_iter = build_dataloader(test_data, config.batch_size, config.max_length)
config.n_vocab = len(vocab)
model_path = args.modelPath
hmodel = HModel(config).to_float(mstype.float16)
param_dict = load_checkpoint(model_path)
load_param_into_net(hmodel, param_dict)
squ = Squeeze(-1)
argmax = Argmax(output_type=mstype.int32)
cur, total = 0, 0
print('----------start test model-------------')
for d in test_iter.create_dict_iterator():
hmodel.set_train(False)
out = hmodel(d['ids'], d['ngrad_ids'])
predict = argmax(out)
acc = predict == squ(d['label'])
acc = mnp.array(acc, dtype=mnp.float16)
cur += (mnp.sum(acc, -1))
total += len(acc)
print('acc:', cur / total)
| StarcoderdataPython |
201826 | <filename>predictor1.py
import requests
import math
import http.client
import json
from pprint import pprint
from firebase import firebase,FirebaseAuthentication
firebase_db = firebase.FirebaseApplication('https://nbasort.firebaseio.com/', authentication=None)
##GET TEAMS PLAYING
result = firebase_db.get('/Schedule/-KnFSyixTJlx59nX6C1C/games/0',None)
data_str = json.dumps(result)
current_game = json.loads(data_str)
away_team_alias = current_game['away']['alias']
home_team_alias = current_game['home']['alias']
print away_team_alias
print home_team_alias
| StarcoderdataPython |
9646177 | from typing import Protocol
from app.core.models import ProductCategory
class CategoriesService(Protocol):
async def get_all(self) -> list[ProductCategory]:
...
| StarcoderdataPython |
5109918 | <reponame>Magicboomliu/Vowe-Format-Detection
__author__ = "<NAME>"
#encoding="utf-8"
import os
import numpy as np
import librosa
import glob
from explore_data import PixelShiftSound
if __name__ == "__main__":
'''
datatype = 0 : 10 only :frame length is 10ms, No frame shift, sample rate is 16K
datatype = 1 : 16 only :frame length is 16ms, No frame shift, sample rate is 16K
datatype = 2 : 16-8: frame length is 16ms, frame shift is 8ms. sample rate is 16K
'''
ps = PixelShiftSound(sample_rate=16000,frame_duration=0.010,frame_shift_duration=0,datatype=0)
wav_data,wav_label = ps.get_all_wav_data()
print("Wav Frame data:",wav_data.shape)
print("Wav Frame label:",wav_label.shape)
| StarcoderdataPython |
5155879 | test_cases = int(input().strip())
for t in range(1, test_cases + 1):
N = int(input().strip())
sentence = input().strip()
check = ('!', '?', '.')
idx = 0
result = []
# my name is <NAME>. my id is Rhs0266. what your id Bro?
for i in range(len(sentence)):
# 문장 쪼개기
if not sentence[i] in check:
continue
temp = sentence[idx:i]
idx = i + 2
"""
my name is <NAME>
my id is Rhs0266
what your id Bro
"""
# 체크
cnt = 0
words = temp.split()
for word in words:
if not word[0].isupper():
continue
if not word.isalpha():
continue
if len(word) == 1 or word[1:].islower():
cnt += 1
result.append(cnt)
print('#{}'.format(t), end=' ')
print(*result)
| StarcoderdataPython |
3209610 | <filename>scripts/discovery_tests_with_setup.py
# Need to import path to test/fixtures and test/scripts/
# Ex : export PYTHONPATH='$PATH:/root/test/fixtures/:/root/test/scripts/'
#
# To run tests, you can do 'python -m testtools.run tests'. To run specific tests,
# You can do 'python -m testtools.run -l tests'
# Set the env variable PARAMS_FILE to point to your ini file. Else it will try to pick params.ini in PWD
#
import os
import signal
from novaclient import client as mynovaclient
from novaclient import exceptions as novaException
import unittest
import fixtures
import testtools
import traceback
import traffic_tests
from contrail_test_init import *
from vn_test import *
from quantum_test import *
from vnc_api_test import *
from nova_test import *
from vm_test import *
from connections import ContrailConnections
from floating_ip import *
from policy_test import *
from multiple_vn_vm_test import *
from contrail_fixtures import *
from tcutils.wrappers import preposttest_wrapper
import uuid
#from analytics_tests import *
class TestDiscoveryFixture(testtools.TestCase, fixtures.TestWithFixtures):
# @classmethod
def setUp(self):
super(TestDiscoveryFixture, self).setUp()
if 'PARAMS_FILE' in os.environ:
self.ini_file = os.environ.get('PARAMS_FILE')
else:
self.ini_file = 'params.ini'
self.inputs = self.useFixture(ContrailTestInit(self.ini_file))
self.connections = ContrailConnections(self.inputs)
self.quantum_fixture = self.connections.quantum_fixture
self.nova_fixture = self.connections.nova_fixture
self.vnc_lib = self.connections.vnc_lib
self.logger = self.inputs.logger
self.agent_inspect = self.connections.agent_inspect
self.cn_inspect = self.connections.cn_inspect
self.analytics_obj = self.connections.analytics_obj
self.ds_obj = self.connections.ds_verification_obj
# end setUpClass
def cleanUp(self):
super(TestDiscoveryFixture, self).cleanUp()
# end cleanUp
def runTest(self):
pass
# end runTest
@preposttest_wrapper
def test_all_publishers_registered_to_discovery_service(self):
'''
Description:Validate all services are registered to discovery service
Steps:
1.Gets expected services to be published to discovery from testbed.py
2.Gets actually published services to discovery from <ip>:5998/services.json
3.Find out any diff between expected and actual list of publishers - fails test case if there is any diff
4.Checkes all the published services are up from discovery - fails if any of them down
Maintainer: <EMAIL>
'''
for ip in self.inputs.cfgm_ips:
self.logger.info("Verifying for ip %s" % (ip))
assert self.ds_obj.verify_registered_services_to_discovery_service(
ip)
return True
@preposttest_wrapper
def test_agent_gets_control_nodes_from_discovery(self):
'''
Description:Validate agents subscribed to control node service
Steps:
1.Get all xmpp-clients from connected to a xmpp server from discovery
2.From introspect of each of those xmpp-clients,verify if that client connected to the same xmpp server and connection established- fails otherwise
Maintainer: <EMAIL>
'''
for ip in self.inputs.cfgm_ips:
self.logger.info("Verifying for ip %s" % (ip))
assert self.ds_obj.verify_bgp_connection(ip)
return True
@preposttest_wrapper
def test_agents_connected_to_dns_service(self):
''' Validate agents subscribed to dns service
'''
for ip in self.inputs.cfgm_ips:
self.logger.info("Verifying for ip %s" % (ip))
assert self.ds_obj.verify_agents_connected_to_dns_service(ip)
return True
@preposttest_wrapper
def test_agents_connected_to_collector_service(self):
'''
Description: Validate agents subscribed to collector service
1.Verify all agents subscribed to collector service from discovery - fails otherwise
Maintainer: <EMAIL>
'''
for ip in self.inputs.cfgm_ips:
self.logger.info("Verifying for ip %s" % (ip))
assert self.ds_obj.verify_agents_connected_to_collector_service(ip)
return True
@preposttest_wrapper
def test_dns_agents_connected_to_collector_service(self):
''' Validate dns agents subscribed to collector service
'''
for ip in self.inputs.cfgm_ips:
self.logger.info("Verifying for ip %s" % (ip))
assert self.ds_obj.verify_dns_agent_connected_to_collector_service(
ip)
return True
@preposttest_wrapper
def test_control_nodes_connected_to_collector_service(self):
''' Validate control nodes subscribed to collector service
'''
for ip in self.inputs.cfgm_ips:
self.logger.info("Verifying for ip %s" % (ip))
assert self.ds_obj.verify_control_nodes_connected_to_collector_service(
ip)
return True
@preposttest_wrapper
def test_control_nodes_subscribed_to_ifmap_service(self):
'''
Description: Validate control nodes subscribed to ifmap service
1.Verify that control-node subscribed to ifmap server and the get the ifmap server info from discovery - fails otherwise
2.Go to control node introspect to verify if control node actually connected to that ifmap - fails otherwise
Maintainer: <EMAIL>
'''
for ip in self.inputs.cfgm_ips:
self.logger.info("Verifying for ip %s" % (ip))
assert self.ds_obj.verify_control_nodes_subscribed_to_ifmap_service(
ip)
return True
@preposttest_wrapper
def test_dns_agents_subscribed_to_ifmap_service(self):
''' Validate dns agents subscribed to ifmap service
'''
for ip in self.inputs.cfgm_ips:
self.logger.info("Verifying for ip %s" % (ip))
assert self.ds_obj.verify_dns_agent_subscribed_to_ifmap_service(ip)
return True
@preposttest_wrapper
def test_ApiServer_subscribed_to_collector_service(self):
''' Validate apiserver subscribed to collector service
'''
for ip in self.inputs.cfgm_ips:
self.logger.info("Verifying for ip %s" % (ip))
assert self.ds_obj.verify_ApiServer_subscribed_to_collector_service(
ip)
return True
@preposttest_wrapper
def test_Schema_subscribed_to_collector_service(self):
''' Validate schema subscribed to collector service
'''
assert self.ds_obj.verify_Schema_subscribed_to_collector_service()
return True
@preposttest_wrapper
def itest_cross_verification_objects_in_all_discovery(self):
''' cross verification objects in all discovery
'''
assert self.ds_obj.cross_verification_objects_in_all_discovery()
return True
@preposttest_wrapper
def test_ServiceMonitor_subscribed_to_collector_service(self):
''' Validate service monitor subscribed to collector service
'''
assert self.ds_obj.verify_ServiceMonitor_subscribed_to_collector_service(
)
return True
@preposttest_wrapper
def test_control_node_restart_and_validate_status_of_the_service(self):
''' Validate restart of control node services
'''
result = True
svc_lst = []
svc_lst = self.ds_obj.get_all_control_services(self.inputs.cfgm_ip)
for elem in svc_lst:
if (self.ds_obj.get_service_status(self.inputs.cfgm_ip, service_touple=elem) == 'up'):
self.logger.info("Service %s is up" % (elem,))
result = result and True
else:
self.logger.warn("Service %s is down" % (elem,))
result = result and False
svc_lst.remove(elem)
# Stopping the control node service
for elem in svc_lst:
ip = elem[0]
self.logger.info("Stopping service %s.." % (elem,))
self.inputs.stop_service('contrail-control', [ip])
time.sleep(20)
for elem in svc_lst:
ip = elem[0]
if (self.ds_obj.get_service_status(self.inputs.cfgm_ip, service_touple=elem) == 'up'):
self.logger.warn("Service %s is still up" % (elem,))
result = result and False
else:
self.logger.info("Service %s is down" % (elem,))
result = result and True
# Starting the control node service
for elem in svc_lst:
ip = elem[0]
self.logger.info("Starting service %s.." % (elem,))
self.inputs.start_service('contrail-control', [ip])
time.sleep(6)
for elem in svc_lst:
ip = elem[0]
if (self.ds_obj.get_service_status(self.inputs.cfgm_ip, service_touple=elem) == 'up'):
self.logger.info(
"Service %s came up after service was started" % (elem,))
result = result and True
else:
self.logger.info(
"Service %s is down even after service was started" % (elem,))
result = result and False
assert result
return True
@preposttest_wrapper
def test_agent_restart(self):
''' Validate agent start and stop
'''
assert self.ds_obj.verify_bgp_connection()
result = True
cmd = 'cd /etc/contrail;sed -i \'/ttl_min.*=.*/c\\ttl_min = 5\' discovery.conf'
for ip in self.inputs.cfgm_ips:
self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
cmd = 'cd /etc/contrail;sed -i \'/ttl_max.*=.*/c\\ttl_max = 10\' discovery.conf'
for ip in self.inputs.cfgm_ips:
self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
for ip in self.inputs.cfgm_ips:
self.inputs.restart_service('contrail-discovery', [ip])
time.sleep(2)
assert self.analytics_obj.verify_cfgm_uve_module_state(
self.inputs.collector_ips[0], self.inputs.cfgm_names[0], 'contrail-discovery')
for ip in self.inputs.compute_ips:
self.inputs.restart_service('contrail-vrouter', [ip])
time.sleep(20)
for ip in self.inputs.compute_ips:
in_use_initial = {}
in_use_after_stop = {}
in_use_after_start = {}
lst_in_use = []
lst_svc_id = []
t = {}
svc_id = []
svc_id = self.ds_obj.get_subscribed_service_id(
self.inputs.cfgm_ip, client=(ip, 'VRouterAgent'), service='xmpp-server')
for service in svc_id:
t = self.ds_obj.get_service_status_by_service_id(
self.inputs.cfgm_ip, service_id=service)
in_use_initial[service] = t['in_use']
self.logger.info(
"%s service id in use before agent %s restart: %s" %
(service, ip, t['in_use']))
compute_node_process = ['contrail-vrouter']
for process in compute_node_process:
try:
self.inputs.stop_service(process, [ip])
time.sleep(50)
for service in svc_id:
t = self.ds_obj.get_service_status_by_service_id(
self.inputs.cfgm_ip, service_id=service)
in_use_after_stop[service] = t['in_use']
self.logger.info(
"%s service id in use after agent %s restart: %s" %
(service, ip, t['in_use']))
for k, v in in_use_after_stop.iteritems():
for k1, v1 in in_use_initial.iteritems():
if (k1 == k):
if (int(v1) - int(v) == 1):
self.logger.info(
"in-use decremented for %s service-id after %s agent stopped" % (k1, ip))
result = result and True
else:
self.logger.warn(
"in-use not decremented for %s service-id after %s agent stopped" % (k1, ip))
result = result and False
except Exception as e:
print e
finally:
self.inputs.start_service(process, [ip])
time.sleep(10)
svc_id = self.ds_obj.get_subscribed_service_id(
self.inputs.cfgm_ip, client=(ip, 'VRouterAgent'), service='xmpp-server')
for service in svc_id:
t = self.ds_obj.get_service_status_by_service_id(
self.inputs.cfgm_ip, service_id=service)
in_use_after_start[service] = t['in_use']
self.logger.info(
"%s service id in use after agent %s restart: %s" %
(service, ip, t['in_use']))
for k, v in in_use_after_start.iteritems():
for k1, v1 in in_use_after_stop.iteritems():
if (k1 == k):
if (int(v) - int(v1) == 1):
self.logger.info(
"in-use incremented for %s service-id after %s agent started" % (k1, ip))
result = result and True
else:
self.logger.warn(
"in-use not incremented for %s service-id after %s agent started" % (k1, ip))
result = result and False
self.logger.info(
"************ END for %s *************" % (ip))
# reverting back the changes in discovery.conf
cmd = 'cd /etc/contrail;sed -i \'/ttl_min.*=.*/c\\ttl_min = 300\' discovery.conf'
for ip in self.inputs.cfgm_ips:
self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
cmd = 'cd /etc/contrail;sed -i \'/ttl_max.*=.*/c\\ttl_max = 1800\' discovery.conf'
for ip in self.inputs.cfgm_ips:
self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
for ip in self.inputs.cfgm_ips:
self.inputs.restart_service('contrail-discovery', [ip])
time.sleep(2)
assert self.analytics_obj.verify_cfgm_uve_module_state(
self.inputs.collector_ips[0], self.inputs.cfgm_names[0], 'contrail-discovery')
assert self.ds_obj.verify_bgp_connection()
assert result
time.sleep(300)
return True
@preposttest_wrapper
def test_change_parameters_in_discovery_conf(self):
''' Validate parameters in discovery.conf
-ttl_min
-ttl_max
-hc_max_miss
-policy
'''
# Changing the hc_max_miss=5 and verifying that the services are down
# after 25 sec
try:
cmd = 'cd /etc/contrail;sed -i \'/hc_max_miss.*=.*/c\hc_max_miss = 10\' discovery.conf'
for ip in self.inputs.cfgm_ips:
self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
self.inputs.restart_service('contrail-discovery', [ip])
assert self.analytics_obj.verify_cfgm_uve_module_state(
self.inputs.collector_ips[0], self.inputs.cfgm_names[0], 'contrail-discovery')
result = True
svc_lst = []
svc_lst = self.ds_obj.get_all_control_services(self.inputs.cfgm_ip)
for elem in svc_lst:
if (self.ds_obj.get_service_status(self.inputs.cfgm_ip, service_touple=elem) == 'up'):
self.logger.info("Service %s is up" % (elem,))
result = result and True
else:
self.logger.warn("Service %s is down" % (elem,))
result = result and False
svc_lst.remove(elem)
# Stopping the control node service
for elem in svc_lst:
ip = elem[0]
self.logger.info("Stopping service %s.." % (elem,))
self.inputs.stop_service('contrail-control', [ip])
time.sleep(15)
for elem in svc_lst:
ip = elem[0]
if (self.ds_obj.get_service_status(self.inputs.cfgm_ip, service_touple=elem) == 'up'):
self.logger.info("Service %s is still up" % (elem,))
result = result and True
else:
self.logger.warn("Service %s is down before 25 sec" %
(elem,))
result = result and False
time.sleep(45)
for elem in svc_lst:
ip = elem[0]
if (self.ds_obj.get_service_status(self.inputs.cfgm_ip, service_touple=elem) == 'up'):
self.logger.warn("Service %s is still up after 30 secs" %
(elem,))
result = result and False
else:
self.logger.info("Service %s is down after 30 sec" %
(elem,))
result = result and True
# Starting the control node service
for elem in svc_lst:
ip = elem[0]
self.logger.info("Starting service %s.." % (elem,))
self.inputs.start_service('contrail-control', [ip])
time.sleep(6)
except Exception as e:
print e
finally:
# Changing the hc_max_miss=3
cmd = 'cd /etc/contrail;sed -i \'/hc_max_miss.*=.*/c\hc_max_miss = 3\' discovery.conf'
for ip in self.inputs.cfgm_ips:
self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
self.inputs.restart_service('contrail-discovery', [ip])
assert self.analytics_obj.verify_cfgm_uve_module_state(
self.inputs.collector_ips[0], self.inputs.cfgm_names[0], 'contrail-discovery')
time.sleep(40) # workarond for bug 2489
assert result
# Change policy and verify discovery functionality: policy =
# [load-balance | round-robin | fixed]
self.logger.info("Changing the discovery policy to round-robin")
cmd = 'cd /etc/contrail;echo \'policy = round-robin \'>> discovery.conf'
self.inputs.run_cmd_on_server(
self.inputs.cfgm_ip, cmd, username='root', password='<PASSWORD>')
self.inputs.restart_service(
'contrail-discovery', [self.inputs.cfgm_ip])
assert self.analytics_obj.verify_cfgm_uve_module_state(
self.inputs.collector_ips[0], self.inputs.cfgm_names[0], 'contrail-discovery')
assert self.ds_obj.verify_bgp_connection()
self.logger.info("Changing the discovery policy to fixed")
cmd = 'cd /etc/contrail;sed -i \'/policy = round-robin/c\policy = fixed\' discovery.conf'
self.inputs.run_cmd_on_server(
self.inputs.cfgm_ip, cmd, username='root', password='<PASSWORD>')
self.inputs.restart_service(
'contrail-discovery', [self.inputs.cfgm_ip])
assert self.analytics_obj.verify_cfgm_uve_module_state(
self.inputs.collector_ips[0], self.inputs.cfgm_names[0], 'contrail-discovery')
assert self.ds_obj.verify_bgp_connection()
self.logger.info("Reverting back policy to default")
cmd = 'cd /etc/contrail;sed -i \'/policy = fixed/c\ \' discovery.conf'
self.inputs.run_cmd_on_server(
self.inputs.cfgm_ip, cmd, username='root', password='<PASSWORD>')
self.inputs.restart_service(
'contrail-discovery', [self.inputs.cfgm_ip])
assert self.analytics_obj.verify_cfgm_uve_module_state(
self.inputs.collector_ips[0], self.inputs.cfgm_names[0], 'contrail-discovery')
assert self.ds_obj.verify_bgp_connection()
return True
@preposttest_wrapper
def test_publish(self):
''' Validate short ttl
'''
self.logger.info(
"********TEST WILL FAIL IF RAN MORE THAN ONCE WITHOUT CLEARING THE ZOOKEEPER DATABASE*********")
service = 'dummy_service23'
port = 658093
result = True
try:
# Changing the hc_max_miss=3000 and verifying that the services are
# down after 25 mins
cmd = 'cd /etc/contrail;sed -i \'/hc_max_miss/c\hc_max_miss = 3000\' discovery.conf'
for ip in self.inputs.cfgm_ips:
self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
cmd = 'cd /etc/contrail;sed -i \'/ttl_short/c\\ttl_short = 2\' discovery.conf'
for ip in self.inputs.cfgm_ips:
self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
cmd = 'cd /etc/contrail;cat discovery.conf'
for ip in self.inputs.cfgm_ips:
out_put = self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
self.logger.info("%s" % (out_put))
self.inputs.restart_service('contrail-discovery', [ip])
assert self.analytics_obj.verify_cfgm_uve_module_state(
self.inputs.collector_ips[0], self.inputs.cfgm_names[0], 'contrail-discovery')
time.sleep(40) # workaround for bug 2489
base_ip = '192.168.1.'
expected_ttl = 2
cuuid = uuid.uuid4()
while(expected_ttl <= 32):
resp = None
resp = self.ds_obj.subscribe_service_from_discovery(
self.inputs.cfgm_ip, service=service, instances=1, client_id=str(cuuid))
ttl = resp['ttl']
self.logger.info("ttl : %s" % (ttl))
if (ttl <= expected_ttl):
result = result and True
else:
result = result and False
self.logger.info("Waiting for %s sec..." % (expected_ttl))
time.sleep(expected_ttl)
expected_ttl = expected_ttl * 2
self.logger.info("Verifying that the ttl sablizes at 32 sec..")
resp = None
resp = self.ds_obj.subscribe_service_from_discovery(
self.inputs.cfgm_ip, service=service, instances=1, client_id=str(cuuid))
ttl = resp['ttl']
self.logger.info("ttl : %s" % (ttl))
if (ttl <= 32):
result = result and True
else:
result = result and False
# Bringing up services
self.logger.info("Bringing up services...")
for x in range(1, 4):
svc_ip = base_ip + str(x)
svc = 'svc' + str(x)
self.logger.info("Publishing service with ip %s and port %s" %
(svc_ip, port))
svc = self.ds_obj.publish_service_to_discovery(
self.inputs.cfgm_ip, service=service, ip=svc_ip, port=port)
time.sleep(5)
self.logger.info("Verifying that the nornal ttl sent..")
resp = None
resp = self.ds_obj.subscribe_service_from_discovery(
self.inputs.cfgm_ip, service=service, instances=1, client_id=str(cuuid))
ttl = resp['ttl']
self.logger.info("ttl : %s" % (ttl))
if (ttl in range(300, 1800)):
result = result and True
else:
result = result and False
# Verify instnaces == 0 will send all services
cuuid = uuid.uuid4()
resp = self.ds_obj.subscribe_service_from_discovery(
iself.inputs.cfgm_ip, service=service, instances=0, client_id=str(cuuid))
resp = resp[service]
if len(resp) < 3:
result = result and False
self.logger.warn("Not all services returned")
expected_ip_list = ['192.168.1.1', '192.168.1.2', '192.168.1.3']
result1 = True
for elem in resp:
self.logger.info("%s" % (elem))
if (elem['ip-address'] in expected_ip_list and elem['port'] == port):
result1 = result1 and True
expected_ip_list.remove(elem['ip-address'])
else:
self.logger.info('inside else')
result1 = result1 and False
if result1:
self.logger.info(
"All services correctly received by subscriber")
result = result and result1
else:
self.logger.warn("All services not received by subscriber")
result = result and result1
self.logger.warn("Missing service as %s" % (expected_ip_list))
except Exception as e:
print e
finally:
cmd = 'cd /etc/contrail;sed -i \'/hc_max_miss/c\hc_max_miss = 3\' discovery.conf'
for ip in self.inputs.cfgm_ips:
self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
cmd = 'cd /etc/contrail;sed -i \'/ttl_short/c\\ttl_short = 1\' discovery.conf'
for ip in self.inputs.cfgm_ips:
self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
cmd = 'cd /etc/contrail;cat discovery.conf'
for ip in self.inputs.cfgm_ips:
out_put = self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
self.logger.info("%s" % (out_put))
self.inputs.restart_service('contrail-discovery', [ip])
assert self.analytics_obj.verify_cfgm_uve_module_state(
self.inputs.collector_ips[0], self.inputs.cfgm_names[0], 'contrail-discovery')
time.sleep(40)
resp = None
resp = self.ds_obj.cleanup_service_from_discovery(
self.inputs.cfgm_ip)
assert result
return True
@preposttest_wrapper
def test_cleanup(self):
''' cleanup service from discovery
'''
resp = None
resp = self.ds_obj.cleanup_service_from_discovery(self.inputs.cfgm_ip)
return True
@preposttest_wrapper
def test_scale_test(self):
''' Publish 100 sevices, subscribe to them and then delete them
'''
try:
service = 'dummy_service'
port = 658093
base_ip = '192.168.1.'
result = True
# Changing the hc_max_miss=3000 and verifying that the services are
# down after 25 mins
cmd = 'cd /etc/contrail;sed -i \'/hc_max_miss/c\hc_max_miss = 3000\' discovery.conf'
for ip in self.inputs.cfgm_ips:
self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
cmd = 'cd /etc/contrail;sed -i \'/ttl_short/c\\ttl_short = 2\' discovery.conf'
for ip in self.inputs.cfgm_ips:
self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
cmd = 'cd /etc/contrail;cat discovery.conf'
for ip in self.inputs.cfgm_ips:
out_put = self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
self.logger.info("%s" % (out_put))
self.inputs.restart_service('contrail-discovery', [ip])
assert self.analytics_obj.verify_cfgm_uve_module_state(
self.inputs.collector_ips[0], self.inputs.cfgm_names[0], 'contrail-discovery')
time.sleep(40) # workarond for bug 2489
# Bringing up services
self.logger.info("Bringing up services...")
threads = []
published_service_lst = []
for x in range(1, 101):
svc_ip = base_ip + str(x)
svc = 'svc' + str(x)
# self.logger.info("Publishing service with ip %s and port %s"%(svc_ip,port))
t = threading.Thread(target=self.ds_obj.publish_service_to_discovery, args=(
self.inputs.cfgm_ip, service, svc_ip, port))
threads.append(t)
for th in threads:
self.logger.info("Publishing service with ip %s and port %s" %
(svc_ip, port))
th.start()
for th in threads:
th.join()
# svc = self.ds_obj.publish_service_to_discovery(service=service,ip=svc_ip,port=port)
time.sleep(5)
self.logger.info("Verifying all services are up...")
svc = self.ds_obj.get_all_services_by_service_name(
self.inputs.cfgm_ip, service=service)
for elem in svc:
ip = elem[0]
self.logger.info("ip: %s" % (ip))
if (ip in (base_ip + str(x) for x in range(1, 101))):
self.logger.info("%s is added to discovery service" %
(elem,))
result = result and True
self.logger.info("Verifying if the service is up")
svc_status = self.ds_obj.get_service_status(
self.inputs.cfgm_ip, service_touple=elem)
if (svc_status == 'up'):
self.logger.info("svc is up")
result = result and True
else:
result = result and False
self.logger.warn("svc not up")
else:
self.logger.warn("%s is NOT added to discovery service" %
(elem,))
result = result and False
# Verify instnaces == 0 will send all services
cuuid = uuid.uuid4()
resp = self.ds_obj.subscribe_service_from_discovery(
self.inputs.cfgm_ip, service=service, instances=0, client_id=str(cuuid))
resp = resp[service]
if len(resp) < 100:
result = result and False
self.logger.warn("Not all services returned")
self.logger.info(
"Sending 100 subscription message to discovery..")
subs_threads = []
for i in range(100):
cuuid = uuid.uuid4()
t = threading.Thread(target=self.ds_obj.subscribe_service_from_discovery, args=(
self.inputs.cfgm_ip, service, 2, str(cuuid)))
subs_threads.append(t)
for th in subs_threads:
th.start()
time.sleep(3)
for th in subs_threads:
th.join()
# assert result
except Exception as e:
print e
finally:
# Chaging the discovery.conf to default
cmd = 'cd /etc/contrail;sed -i \'/hc_max_miss/c\hc_max_miss = 3\' discovery.conf'
for ip in self.inputs.cfgm_ips:
self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
cmd = 'cd /etc/contrail;sed -i \'/ttl_short/c\\ttl_short = 1\' discovery.conf'
for ip in self.inputs.cfgm_ips:
self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
cmd = 'cd /etc/contrail;cat discovery.conf'
for ip in self.inputs.cfgm_ips:
out_put = self.inputs.run_cmd_on_server(
ip, cmd, username='root', password='<PASSWORD>')
self.logger.info("%s" % (out_put))
self.inputs.restart_service('contrail-discovery', [ip])
assert self.analytics_obj.verify_cfgm_uve_module_state(
self.inputs.collector_ips[0], self.inputs.cfgm_names[0], 'contrail-discovery')
time.sleep(40) # workarond for bug 2489
resp = None
resp = self.ds_obj.cleanup_service_from_discovery(
self.inputs.cfgm_ip)
assert result
return True
# End test test_scale_test
# end TestDiscoveryFixture
| StarcoderdataPython |
11340355 | #
# lines.py
#
# purpose: Reproduce LineCurvature2D.m and LineNormals2D.m
# author: <NAME>
# e-mail: <EMAIL>
# web: http://ocefpaf.tiddlyspot.com/
# created: 17-Jul-2012
# modified: Mon 02 Mar 2015 10:07:06 AM BRT
#
# obs:
#
import numpy as np
def LineNormals2D(Vertices, Lines):
r"""This function calculates the normals, of the line points using the
neighbouring points of each contour point, and forward an backward
differences on the end points.
N = LineNormals2D(V, L)
inputs,
V : List of points/vertices 2 x M
(optional)
Lines : A N x 2 list of line pieces, by indices of the vertices
(if not set assume Lines=[1 2; 2 3 ; ... ; M-1 M])
outputs,
N : The normals of the Vertices 2 x M
Examples
--------
>>> import numpy as np
>>> import matplotlib.pyplot as plt
>>> data = np.load('testdata.npz')
>>> Lines, Vertices = data['Lines'], data['Vertices']
>>> N = LineNormals2D(Vertices, Lines)
>>> fig, ax = plt.subplots(nrows=1, ncols=1)
>>> _ = ax.plot(np.c_[Vertices[:, 0], Vertices[:,0 ] + 10 * N[:, 0]].T,
... np.c_[Vertices[:, 1], Vertices[:, 1] + 10 * N[:, 1]].T)
Function based on LineNormals2D.m written by
D.Kroon University of Twente (August 2011)
"""
eps = np.spacing(1)
if isinstance(Lines, np.ndarray):
pass
elif not Lines:
Lines = np.c_[np.arange(1, Vertices.shape[0]),
np.arange(2, Vertices.shape[0] + 1)]
else:
print("Lines is passed but not recognized.")
# Calculate tangent vectors.
DT = Vertices[Lines[:, 0] - 1, :] - Vertices[Lines[:, 1] - 1, :]
# Make influence of tangent vector 1/Distance (Weighted Central
# Differences. Points which are closer give a more accurate estimate of
# the normal).
LL = np.sqrt(DT[:, 0] ** 2 + DT[:, 1] ** 2)
DT[:, 0] = DT[:, 0] / np.maximum(LL ** 2, eps)
DT[:, 1] = DT[:, 1] / np.maximum(LL ** 2, eps)
D1 = np.zeros_like(Vertices)
D2 = np.zeros_like(Vertices)
D1[Lines[:, 0] - 1, :] = DT
D2[Lines[:, 1] - 1, :] = DT
D = D1 + D2
# Normalize the normal.
LL = np.sqrt(D[:, 0] ** 2 + D[:, 1] ** 2)
N = np.zeros_like(D)
N[:, 0] = -D[:, 1] / LL
N[:, 1] = D[:, 0] / LL
return N
def LineCurvature2D(Vertices, Lines=None):
r"""This function calculates the curvature of a 2D line. It first fits
polygons to the points. Then calculates the analytical curvature from
the polygons.
k = LineCurvature2D(Vertices,Lines)
inputs,
Vertices : A M x 2 list of line points.
(optional)
Lines : A N x 2 list of line pieces, by indices of the vertices
(if not set assume Lines=[1 2; 2 3 ; ... ; M-1 M])
outputs,
k : M x 1 Curvature values
Examples
--------
>>> import numpy as np
>>> import matplotlib.pyplot as plt
>>> data = np.load('testdata.npz', squeeze_me=True)
>>> Lines, Vertices = data['Lines'], data['Vertices']
>>> k = LineCurvature2D(Vertices, Lines)
>>> N = LineNormals2D(Vertices, Lines)
>>> k = k * 100
>>> fig, ax = plt.subplots(nrows=1, ncols=1)
>>> _ = ax.plot(np.c_[Vertices[:, 0], Vertices[:, 0] + k * N[:, 0]].T,
... np.c_[Vertices[:, 1], Vertices[:, 1] + k * N[:, 1]].T, 'g')
>>> _ = ax.plot(np.c_[Vertices[Lines[:, 0] - 1, 0],
... Vertices[Lines[:, 1] - 1, 0]].T,
... np.c_[Vertices[Lines[:, 0] - 1, 1],
... Vertices[Lines[:, 1] - 1, 1]].T, 'b')
>>> _ = ax.plot(Vertices[:, 0], Vertices[:, 1], 'r.')
Function based on LineCurvature2D.m written by
<NAME> of Twente (August 2011)
"""
# If no line-indices, assume a x[0] connected with x[1], x[2] with x[3].
if isinstance(Lines, np.ndarray):
pass
elif not Lines:
Lines = np.c_[np.arange(1, Vertices.shape[0]),
np.arange(2, Vertices.shape[0] + 1)]
else:
print("Lines is passed but not recognized.")
# Get left and right neighbor of each points.
Na = np.zeros(Vertices.shape[0], dtype=np.int)
Nb = np.zeros_like(Na)
# As int because we use it to index an array...
Na[Lines[:, 0] - 1] = Lines[:, 1]
Nb[Lines[:, 1] - 1] = Lines[:, 0]
# Check for end of line points, without a left or right neighbor.
checkNa = Na == 0
checkNb = Nb == 0
Naa, Nbb = Na, Nb
Naa[checkNa] = np.where(checkNa)[0]
Nbb[checkNb] = np.where(checkNb)[0]
# If no left neighbor use two right neighbors, and the same for right.
Na[checkNa] = Nbb[Nbb[checkNa]]
Nb[checkNb] = Naa[Naa[checkNb]]
# ... Also, I remove `1` to get python indexing correctly.
Na -= 1
Nb -= 1
# Correct for sampling differences.
Ta = -np.sqrt(np.sum((Vertices - Vertices[Na, :]) ** 2, axis=1))
Tb = np.sqrt(np.sum((Vertices - Vertices[Nb, :]) ** 2, axis=1))
# If no left neighbor use two right neighbors, and the same for right.
Ta[checkNa] = -Ta[checkNa]
Tb[checkNb] = -Tb[checkNb]
x = np.c_[Vertices[Na, 0], Vertices[:, 0], Vertices[Nb, 0]]
y = np.c_[Vertices[Na, 1], Vertices[:, 1], Vertices[Nb, 1]]
M = np.c_[np.ones_like(Tb),
-Ta,
Ta ** 2,
np.ones_like(Tb),
np.zeros_like(Tb),
np.zeros_like(Tb),
np.ones_like(Tb),
-Tb,
Tb ** 2]
invM = inverse3(M)
a = np.zeros_like(x)
b = np.zeros_like(a)
a[:, 0] = (invM[:, 0, 0] * x[:, 0] +
invM[:, 1, 0] * x[:, 1] +
invM[:, 2, 0] * x[:, 2])
a[:, 1] = (invM[:, 0, 1] * x[:, 0] +
invM[:, 1, 1] * x[:, 1] +
invM[:, 2, 1] * x[:, 2])
a[:, 2] = (invM[:, 0, 2] * x[:, 0] +
invM[:, 1, 2] * x[:, 1] +
invM[:, 2, 2] * x[:, 2])
b[:, 0] = (invM[:, 0, 0] * y[:, 0] +
invM[:, 1, 0] * y[:, 1] +
invM[:, 2, 0] * y[:, 2])
b[:, 1] = (invM[:, 0, 1] * y[:, 0] +
invM[:, 1, 1] * y[:, 1] +
invM[:, 2, 1] * y[:, 2])
b[:, 2] = (invM[:, 0, 2] * y[:, 0] +
invM[:, 1, 2] * y[:, 1] +
invM[:, 2, 2] * y[:, 2])
# Calculate the curvature from the fitted polygon.
k = (2 * (a[:, 1] * b[:, 2] - a[:, 2] * b[:, 1]) /
((a[:, 1] ** 2 + b[:, 1] ** 2) ** (3 / 2)))
return k
def inverse3(M):
r"""This function does inv(M), but then for an array of 3x3 matrices."""
adjM = np.zeros((M.shape[0], 3, 3))
adjM[:, 0, 0] = M[:, 4] * M[:, 8] - M[:, 7] * M[:, 5]
adjM[:, 0, 1] = -(M[:, 3] * M[:, 8] - M[:, 6] * M[:, 5])
adjM[:, 0, 2] = M[:, 3] * M[:, 7] - M[:, 6] * M[:, 4]
adjM[:, 1, 0] = -(M[:, 1] * M[:, 8] - M[:, 7] * M[:, 2])
adjM[:, 1, 1] = M[:, 0] * M[:, 8] - M[:, 6] * M[:, 2]
adjM[:, 1, 2] = -(M[:, 0] * M[:, 7] - M[:, 6] * M[:, 1])
adjM[:, 2, 0] = M[:, 1] * M[:, 5] - M[:, 4] * M[:, 2]
adjM[:, 2, 1] = -(M[:, 0] * M[:, 5] - M[:, 3] * M[:, 2])
adjM[:, 2, 2] = M[:, 0] * M[:, 4] - M[:, 3] * M[:, 1]
detM = (M[:, 0] * M[:, 4] * M[:, 8] - M[:, 0] * M[:, 7] * M[:, 5] -
M[:, 3] * M[:, 1] * M[:, 8] + M[:, 3] * M[:, 7] * M[:, 2] +
M[:, 6] * M[:, 1] * M[:, 5] - M[:, 6] * M[:, 4] * M[:, 2])
return adjM / detM[:, None, None]
if __name__ == '__main__':
import doctest
doctest.testmod()
| StarcoderdataPython |
5038260 | def areSimilar(a, b):
swap = list()
if a == b:
return True
if sorted(a) == sorted(b):
for i in range(len(a)):
if a[i] != b[i]:
swap.append(a[i])
if len(swap) == 2:
return True
else:
return False
else:
return False
| StarcoderdataPython |
3360378 | <filename>tests/caos/internal/utils/test_yaml.py
import os
import sys
import shutil
from io import StringIO
import unittest
from caos._internal.utils.working_directory import get_current_dir
from caos._internal.utils.yaml import get_virtual_environment_from_yaml, get_dependencies_from_yaml, get_tasks_from_yaml
class TestYamlUtil(unittest.TestCase):
def setUp(self) -> None:
self.new_stdout, self.old_stdout = StringIO(), sys.stdout
self.new_stderr, self.old_stderr = StringIO(), sys.stderr
sys.stdout, sys.stderr = self.new_stdout, self.new_stderr
if os.path.isdir("tmp"):
shutil.rmtree("tmp")
def tearDown(self) -> None:
sys.stdout, sys.stderr = self.old_stdout, self.old_stderr
def test_yaml_util_get_virtual_environment_from_yaml_default(self):
test_caos_yaml = "virtual_environment: 'venv'"
with open(file=os.path.abspath(get_current_dir() + "/" + "caos.yml"), mode="w") as file:
file.write(test_caos_yaml)
self.assertEqual("venv", get_virtual_environment_from_yaml())
def test_yaml_util_get_virtual_environment_from_yaml_custom(self):
test_caos_yaml = "virtual_environment: 'custom_venv'"
with open(file=os.path.abspath(get_current_dir() + "/" + "caos.yml"), mode="w") as file:
file.write(test_caos_yaml)
self.assertEqual("custom_venv", get_virtual_environment_from_yaml())
def test_yaml_util_get_dependencies_from_yaml(self):
test_caos_yaml = """\
dependencies:
dep1: "latest"
dep2: "^1.5.0"
dep3: "~2"
dep4: "./dep4-1.0.0-py3-none-any.whl"
dep5: "./dep5-1.0.0-py3-none-any.dist-info"
"""
with open(file=os.path.abspath(get_current_dir()+"/"+"caos.yml"), mode="w") as file:
file.write(test_caos_yaml)
dependencies = get_dependencies_from_yaml()
expected_result = {
"dep1": "dep1",
"dep2": "~=1.5",
"dep3": "~=2.0.0",
"dep4": "./dep4-1.0.0-py3-none-any.whl",
"dep5": "./dep5-1.0.0-py3-none-any.dist-info"
}
self.assertEqual(expected_result, dependencies)
def test_yaml_util_get_tasks_from_yaml(self):
test_caos_yaml = """\
tasks:
test:
- "caos unittest ./"
run:
- "caos python ./main.py"
test_and_run:
- test
- run
- "echo 'Done'"
"""
with open(file=os.path.abspath(get_current_dir()+"/"+"caos.yml"), mode="w") as file:
file.write(test_caos_yaml)
tasks = get_tasks_from_yaml()
expected_result = {
'test': ['caos unittest ./'],
'run': ['caos python ./main.py'],
'test_and_run': ['test', 'run', "echo 'Done'"]
}
self.assertEqual(expected_result, tasks)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1849685 | <reponame>abhabongse/aoc2020
from __future__ import annotations
import collections
import itertools
import os
import more_itertools
def main():
this_dir = os.path.dirname(os.path.abspath(__file__))
input_file = os.path.join(this_dir, 'input.txt')
adapters = read_input_files(input_file)
# Part 1: adapter sanity check
diff_counts = diff_counts_in_jolt_chain(adapters, gap=3)
p1_answer = diff_counts[1] * diff_counts[3]
print(p1_answer)
# Part 2: count possible configurations
p2_answer = count_valid_jolt_chains(adapters, gap=3)
print(p2_answer)
def diff_counts_in_jolt_chain(adapters: list[int], gap: int) -> collections.Counter:
"""
Counts the number of joltage differences between two consecutive power devices
when all adapters are connected in chain from the charging outlet (0) to built-in adapter
(assuming that the built-in adapter has the rating of the largest adapter plus gap).
"""
builtin_adapter = max(adapters) + gap
jolt_chain = itertools.chain([0], sorted(adapters), [builtin_adapter])
diff_counts = collections.Counter(hi - lo for lo, hi in more_itertools.windowed(jolt_chain, 2))
return diff_counts
def count_valid_jolt_chains(adapters: list[int], gap: int) -> int:
"""
Count the number of configurations of functioning jolt chains
from the charging outlet (0) to the built-in adapter
(where two consecutive power devices must be within the given gap).
This function implements a dynamic programming algorithm with O(nk) running time
where n is the number of adapters and k is the input gap size.
An improved version of this algorithm using O(n) time (not implemented here)
is to maintain a sliding window of preceding config counts plus their total sum.
"""
builtin_adapter = max(adapters) + gap
jolt_chain = itertools.chain([0], sorted(adapters), [builtin_adapter])
config_counts = collections.Counter({0: 1})
for jolt in jolt_chain:
for downstep in range(-gap, 0):
config_counts[jolt] += config_counts[jolt + downstep]
return config_counts[builtin_adapter]
def read_input_files(input_file: str) -> list[int]:
"""
Extracts a list of adapter joltages.
"""
with open(input_file) as input_fobj:
adapters = [int(line) for line in input_fobj]
return adapters
if __name__ == '__main__':
main()
| StarcoderdataPython |
1754652 | <filename>vimms_gym/agents.py
import numpy as np
from vimms.Agent import AbstractAgent
class DataDependantAction():
def __init__(self):
self.mz = None
self.rt = None
self.original_intensity = None
self.scaled_intensity = None
self.ms_level = 1
self.idx = None
self.valid = True
def target(self, mz, rt, original_intensity, scaled_intensity, idx):
self.mz = mz
self.rt = rt
self.original_intensity = original_intensity
self.scaled_intensity = scaled_intensity
self.idx = idx
self.ms_level = 2
# check if this ms2 action is valid
self.valid = True
if np.isclose(mz, 0) and np.isclose(rt, 0) and \
np.isclose(original_intensity, 0):
self.valid = False
class DataDependantAcquisitionAgent(AbstractAgent):
def __init__(self, isolation_window):
super().__init__()
self._initial_state()
self.isolation_window = isolation_window
def _initial_state(self):
self.target_ms1()
self.last_ms1_scan = None
def target_ms1(self):
self.dda_action = DataDependantAction()
return self.dda_action
def target_ms2(self, mz, rt, original_intensity, scaled_intensity, idx):
self.dda_action = DataDependantAction()
self.dda_action.target(mz, rt, original_intensity, scaled_intensity, idx)
return self.dda_action
def next_tasks(self, scan_to_process, controller, current_task_id):
self.act(scan_to_process)
if scan_to_process.ms_level == 1:
self.last_ms1_scan = scan_to_process
new_tasks = []
if self.dda_action.ms_level == 1:
scan_params = controller.get_ms1_scan_params()
elif self.dda_action.ms_level == 2:
precursor_scan_id = self.last_ms1_scan.scan_id
scan_params = controller.get_ms2_scan_params(
self.dda_action.mz, self.dda_action.original_intensity, precursor_scan_id,
self.isolation_window, 0, 0)
current_task_id += 1
new_tasks.append(scan_params)
next_processed_scan_id = current_task_id
return new_tasks, current_task_id, next_processed_scan_id
def update(self, last_scan, controller):
pass
def act(self, scan_to_process):
pass
def reset(self):
self._initial_state()
def _get_mzs_rt_intensities(self, scan_to_process):
mzs = scan_to_process.mzs
intensities = scan_to_process.intensities
rt = scan_to_process.rt
assert mzs.shape == intensities.shape
return mzs, rt, intensities
| StarcoderdataPython |
9699563 | <filename>SimPPS/PPSPixelDigiProducer/test/testDigi3.py<gh_stars>1-10
import FWCore.ParameterSet.Config as cms
process = cms.Process("testDigi")
# Specify the maximum events to simulate
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
process.load('FWCore.MessageService.MessageLogger_cfi')
process.MessageLogger.cerr.FwkReport.reportEvery=cms.untracked.int32(5000)
################## STEP 1 - process.generator
process.source = cms.Source("PoolSource",
duplicateCheckMode = cms.untracked.string("noDuplicateCheck"),
fileNames =
cms.untracked.vstring(
# 'file:/tmp/andriusj/6EC8FCC8-E2A8-E411-9506-002590596468.root'
# '/store/relval/CMSSW_7_4_0_pre6/RelValPhotonJets_Pt_10_13/GEN-SIM-RECO/MCRUN2_74_V1-v1/00000/6EC8FCC8-E2A8-E411-9506-002590596468.root'
# 'file:/home/ferro/ferroCMS/CMSSW_7_5_0/src/MYtest441_.root',
# 'file:/home/ferro/ferroCMS/CMSSW_7_5_0/src/MYtest442_.root',
# 'file:/home/ferro/ferroCMS/CMSSW_7_5_0/src/MYtest444_.root',
# 'file:/home/ferro/ferroCMS/CMSSW_7_5_0/src/MYtest445_.root',
# 'file:/home/ferro/ferroCMS/CMSSW_7_5_0/src/MYtest447_.root',
# 'file:/home/ferro/ferroCMS/CMSSW_7_5_0/src/MYtest448_.root',
'file:/home/ferro/ferroCMS/CMSSW_7_5_0_myCTPPS/CMSSW_7_5_0/src/MYtest44_.root'
)
)
# Use random number generator service
process.load("Configuration.TotemCommon.RandomNumbers_cfi")
process.RandomNumberGeneratorService.RPixDetDigitizer = cms.PSet(initialSeed =cms.untracked.uint32(137137))
process.load("TotemAnalysis.TotemNtuplizer.TotemNtuplizer_cfi")
process.TotemNtuplizer.outputFileName = "test.ntuple.root"
process.TotemNtuplizer.RawEventLabel = 'source'
process.TotemNtuplizer.RPReconstructedProtonCollectionLabel = cms.InputTag('RP220Reconst')
process.TotemNtuplizer.RPReconstructedProtonPairCollectionLabel = cms.InputTag('RP220Reconst')
process.TotemNtuplizer.RPMulFittedTrackCollectionLabel = cms.InputTag("RPMulTrackNonParallelCandCollFit")
process.TotemNtuplizer.includeDigi = cms.bool(True)
process.TotemNtuplizer.includePatterns = cms.bool(True)
process.digiAnal = cms.EDAnalyzer("CTPPSPixelDigiAnalyzer",
label=cms.untracked.string("RPixDetDigitizer"),
Verbosity = cms.int32(0),
RPixVerbosity = cms.int32(0),
RPixActiveEdgeSmearing = cms.double(0.020),
RPixActiveEdgePosition = cms.double(0.150)
)
process.p1 = cms.Path( process.digiAnal )
| StarcoderdataPython |
1861181 | import unittest
from src.aws.response_handler import ResponseHandler as AWSResponseHandler
from tests.common.constants import AWS_TEST_RESOURCES_LOCATION, \
TEST_CONFIDENCE_THRESHOLD, TRANSCRIBE_ITEMS_SUSPICIOUS_AWS, \
TRANSCRIBE_ITEMS_NOT_SUSPICIOUS_AWS
class TestAWSResponseHandler(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.response_handler = AWSResponseHandler(confidence_threshold=
TEST_CONFIDENCE_THRESHOLD)
def test_extract_results_with_suspicion(self):
self.assertListEqual(
self.response_handler.extract_results(
json_file=AWS_TEST_RESOURCES_LOCATION + 'suspicious.json'),
TRANSCRIBE_ITEMS_SUSPICIOUS_AWS
)
def test_extract_results_without_suspicion(self):
self.assertListEqual(
self.response_handler.extract_results(
json_file=AWS_TEST_RESOURCES_LOCATION + 'not_suspicious.json'),
TRANSCRIBE_ITEMS_NOT_SUSPICIOUS_AWS
)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
3510983 | <gh_stars>100-1000
from pygears.lib import reduce, drv, check
from pygears.typing import Queue, Uint
(drv(t=Queue[Uint[8]], seq=[[0, 1, 0, 1, 0, 1, 0]]),
drv(t=Uint[8], seq=[1])) \
| reduce(f=lambda x, y: (x << 1) | y) \
| check(ref=[0xaa])
| StarcoderdataPython |
1931179 | <reponame>xlab-si/iac-scan-runner
from typing import Optional
import iac_scan_runner.vars as env
from iac_scan_runner.check import Check
from iac_scan_runner.check_output import CheckOutput
from iac_scan_runner.check_target_entity_type import CheckTargetEntityType
from iac_scan_runner.utils import run_command
from pydantic import SecretStr
class ESLintCheck(Check):
def __init__(self):
super().__init__("es-lint", "ESLint is a tool for identifying and reporting on patterns found in "
"ECMAScript/JavaScript code", CheckTargetEntityType.iac)
def configure(self, config_filename: Optional[str], secret: Optional[SecretStr]) -> CheckOutput:
if config_filename:
self._config_filename = config_filename
return CheckOutput(f'Check: {self.name} has been configured successfully.', 0)
else:
raise Exception(f'Check: {self.name} requires you to pass a configuration file.')
def run(self, directory: str) -> CheckOutput:
if self._config_filename:
return run_command(
f'{env.ES_LINT_CHECK_PATH} -c {env.CONFIG_DIR}/{self._config_filename} '
f'--no-error-on-unmatched-pattern --ext .js .', directory
)
else:
return run_command(
f'{env.ES_LINT_CHECK_PATH} --no-error-on-unmatched-pattern --ext .js .', directory
)
| StarcoderdataPython |
5001718 | <reponame>multimodalspectroscopy/hvapy
from .HVA import network_optim
| StarcoderdataPython |
3283361 | """
Code to run all the post-processing jobs for a forecast day.
It is designed only to run for a single day.
Testing on mac:
python driver_post.py -gtx cas6_v3_lo8b -r backfill -d 2019.07.04 -ro 2 -test True
To run for real on apogee
python driver_post.py -gtx cas6_v0_u0kb -r forecast -ro 0 < /dev/null > post.log &
NOTE: the "< /dev/null" appears to be necessary when running by hand and you stay
logged on because (maybe) the daymovie0 job is somehow expecting standard input,
and shen it doesn't get it the job is "Stopped" and you have to use "fg" to start it again.
"""
import sys
import argparse
from datetime import datetime, timedelta
import subprocess
from time import time, sleep
from lo_tools import Lfun, zfun
parser = argparse.ArgumentParser()
# which run to use
parser.add_argument('-gtx', '--gtagex', type=str) # e.g. cas6_v3_l08b
parser.add_argument('-ro', '--roms_out_num', type=int, default=0) # 1 = Ldir['roms_out1'], etc.
# select time period and frequency
parser.add_argument('-r', '--run_type', type=str) # backfill or forecast
parser.add_argument('-d', '--date_string', default='', type=str) # e.g. 2019.07.04
parser.add_argument('-test', '--testing', default=False, type=Lfun.boolean_string)
# get the args and put into Ldir
args = parser.parse_args()
argsd = args.__dict__
for a in ['gtagex']:
if argsd[a] == None:
print('*** Missing required argument to forcing_argfun.intro(): ' + a)
sys.exit()
gridname, tag, ex_name = args.gtagex.split('_')
# get the dict Ldir
Ldir = Lfun.Lstart(gridname=gridname, tag=tag, ex_name=ex_name)
# add more entries to Ldir
for a in argsd.keys():
if a not in Ldir.keys():
Ldir[a] = argsd[a]
# set where to look for model output
if Ldir['roms_out_num'] == 0:
pass
elif Ldir['roms_out_num'] > 0:
Ldir['roms_out'] = Ldir['roms_out' + str(Ldir['roms_out_num'])]
# set day to process
if args.run_type == 'forecast':
Ldir['date_string'] = datetime.now().strftime(Lfun.ds_fmt)
elif args.run_type == 'backfill':
if len(Ldir['date_string'])==len(Lfun.ds_fmt)+2:
pass # assume a valid date was given
else:
print('Error: date_string needed for run_type = backfill')
sys.exit()
else:
print('Error: Unknown run_type')
sys.exit()
print((' Post-processing %s for %s' % (Ldir['run_type'], Ldir['date_string'])).center(60,'-'))
# check that all history files are in place
maxcount=480
sleeptime=60
his_dir = Ldir['roms_out'] / Ldir['gtagex'] / ('f' + Ldir['date_string'])
if Ldir['run_type'] == 'forecast':
ndays = Ldir['forecast_days']
else:
ndays = 1
his_fn_list = []
for his_num in range(1, int(24*ndays) + 2):
his_string = ('0000' + str(his_num))[-4:]
his_fn_list.append(his_dir / ('ocean_his_' + his_string + '.nc'))
all_found = False
ntries = 0
while all_found == False:
for his_fn in his_fn_list:
if his_fn.is_file():
all_found = True
elif not his_fn.is_file():
all_found = False
break
if all_found:
sleep(60) # make sure all copying is able to finish
break
ntries += 1
if ntries >= maxcount:
print('Never found all history files.')
sys.exit()
else:
sleep(sleeptime)
tt0 = time()
# loop over all jobs
if Ldir['testing'] == True:
job_list = ['daymovie0']
else:
#job_list = ['drifters0']
job_list = ['split0', 'surface0', 'layers0', 'ubc0', 'sequim0', 'daymovie0', 'critfc0', 'drifters0', 'archive0']
for job in job_list:
# make clean output directories (often just a place for Info)
out_dir = Ldir['LOo'] / 'post' / Ldir['gtagex'] / ('f' + Ldir['date_string']) / job
Lfun.make_dir(out_dir, clean=True)
Lfun.make_dir(out_dir / 'Data')
Lfun.make_dir(out_dir / 'Info')
j_fn = Ldir['LO'] / 'post' / job / 'post_main.py'
cmd_list = ['python3', str(j_fn),
'-gtx', Ldir['gtagex'], '-ro', str(Ldir['roms_out_num']),
'-r', args.run_type, '-d', Ldir['date_string'],
'-job', job, '-test', str(args.testing)]
proc = subprocess.Popen(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
with open(out_dir / 'Info' / 'screen_output.txt', 'w') as fout:
fout.write(stdout.decode())
if len(stderr) > 0:
with open(out_dir / 'Info' / 'subprocess_error.txt', 'w') as ffout:
ffout.write(stderr.decode())
# this screen output is intended to end up in the log that the cron job makes
res_fn = out_dir / 'Info' / 'results.txt'
if res_fn.is_file():
with open(res_fn, 'r') as fout:
for line in fout:
print(line.replace('\n',''))
else:
print('ERROR: missing results.txt file')
print('')
sys.stdout.flush()
print('Total time for all post jobs = %0.1f sec' % (time()-tt0))
| StarcoderdataPython |
1838929 | # Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
""" This provides a management command to django's manage.py called
create_consumer that will generate a oauth key and secret based on the consumer
name.
"""
from optparse import make_option
import hashlib
import random
import string
import time
from django.core.management.base import BaseCommand, CommandError
from oauth_provider.models import Consumer
from spotseeker_server.models import TrustedOAuthClient
class Command(BaseCommand):
help = (
"Creates a unique key and secret for clients "
"connecting to the server"
)
def add_arguments(self, parser):
parser.add_argument(
"--name",
dest="consumer_name",
default=False,
help="A name for the consumer",
)
parser.add_argument(
"--trusted",
dest="trusted",
action="store_true",
default=False,
help="Makes this consumer trusted "
"(Adds a TrustedOAuthClient for it)",
)
parser.add_argument(
"--silent",
dest="silent",
action="store_true",
default=False,
help="With silent set, the command will generate no output",
)
def handle(self, *args, **options):
if options["consumer_name"]:
consumer_name = options["consumer_name"]
else:
consumer_name = input("Enter consumer name: ")
key = hashlib.sha1(
"{0} - {1}".format(random.random(), time.time()).encode("utf-8")
).hexdigest()
# django-oauth-plus now wants secrets to be 16 chars
secret = "".join(
random.choice(string.ascii_letters + string.digits)
for _ in range(16)
)
consumer = Consumer.objects.create(
name=consumer_name, key=key, secret=secret
)
if options["trusted"]:
trusted = TrustedOAuthClient.objects.create(
consumer=consumer,
is_trusted=1,
bypasses_user_authorization=False,
)
if not options["silent"]:
self.stdout.write("Key: %s\n" % key)
self.stdout.write("Secret: %s\n" % secret)
| StarcoderdataPython |
3280560 | from django.utils.translation import gettext_lazy as _
CUIT_REGEX = r'^(20|23|24|27|30|33|34)-[0-9]{8}-[0-9]$'
# Permissions codenames to create groups, asign, and test.
CAN_CLOSE_SPONSORING_CODENAME = 'close_sponsoring'
CAN_VIEW_EVENT_ORGANIZERS_CODENAME = 'view_event_organizers'
CAN_VIEW_ORGANIZERS_CODENAME = 'view_organizers'
CAN_VIEW_EVENTS_CODENAME = 'view_events'
CAN_VIEW_SPONSORS_CODENAME = 'view_sponsors'
CAN_VIEW_EXPENSES_CODENAME = 'view_expenses'
CAN_VIEW_PROVIDERS_CODENAME = 'view_providers'
CAN_SET_SPONSORS_ENABLED_CODENAME = 'set_sponsors_enabled'
CAN_SET_APPROVED_INVOICE_CODENAME = 'set_invoice_approved'
CAN_SET_COMPLETE_PAYMENT_CODENAME = 'set_invoice_complete_payment'
CAN_SET_PARTIAL_PAYMENT_CODENAME = 'set_invoice_partial_payment'
# Messages constants, to use on views and test.
MUST_BE_EVENT_ORGANIZAER_MESSAGE = _(
'Para poder acceder a detalles del evento debe ser organizador del mismo.'
)
MUST_BE_ACCOUNT_OWNER_MESSAGE = _(
'Para poder modificar los datos de la cuenta debe ser dueño de la misma.'
)
MUST_BE_ORGANIZER_MESSAGE = _(
'Para realizar la acción requerida debe ser un organizador registrado.'
)
CANT_CHANGE_CLOSE_EVENT_MESSAGE = _(
"No se puede modificar un evento cerrado. Pida a un administrador que vuelva "
"a abrirlo, desde el administrador de eventos."
)
ORGANIZER_MAIL_NOTOFICATION_MESSAGE = _(
'Se le envio un mail al usuario organizador para que pueda '
'ingresar sus credenciales de autenticación'
)
DUPLICATED_SPONSOR_CATEGORY_MESSAGE = _(
'Ya tiene registrada una categoria de sponsor con este '
'nombre para el evento actual. '
'Las categorias de sponsor para un evento deben ser únicas.'
)
MUST_BE_APPROVED_INVOICE_MESSAGE = _(
'La factura debe estar aprobada para poder realizar la acción seleccionada'
)
MUST_EXISTS_SPONSOR_MESSAGE = _(
'No se puede asociar patrocinios sin sponsors habilitados'
)
MUST_EXISTS_SPONSOR_CATEGORY_MESSAGE = _(
'No se puede asociar patrocinios sin categorias de sponsor en el evento'
)
MUST_EXISTS_PROVIDERS_MESSAGE = _(
'No se puede crear un gasto de proveedor sin antes dar de alta proveedores'
)
CANT_CHANGE_PROVIDER_EXPENSE_WITH_PAYMENT = _(
'No se puede modificar un gasto con pago asociado al mismo'
)
INVOICE_APPOVED_MESSAGE = _('Factura aprobada exitosamente ')
INVOICE_SET_COMPLETE_PAYMENT_MESSAGE = _('Factura marcada como pago completo ')
INVOICE_SET_PARTIAL_PAYMENT_MESSAGE = _('Factura marcada como pago parcial ')
SPONSORING_SUCCESSFULLY_CLOSE_MESSAGE = _('Patrocinio cerrado exitosamente')
# Sponsoring/invoice states
SPONSOR_STATE_UNBILLED = _('no facturado')
SPONSOR_STATE_INVOICED = _('facturado')
SPONSOR_STATE_CHECKED = _('pendiente de pago')
SPONSOR_STATE_PARTIALLY_PAID = _('pago parcial')
SPONSOR_STATE_COMPLETELY_PAID = _('pago completo')
SPONSOR_STATE_CLOSED = _('cerrado')
# The idea is have formats supported by img tag
IMAGE_FORMATS = ['.jpeg', '.jpg', '.gif', '.png', '.svg', '.bmp']
DEFAULT_PAGINATION = 15
BIG_PAGINATION = 20
| StarcoderdataPython |
188444 | <filename>mobile/tests/utils/test_cache_helper.py
from unittest.mock import MagicMock
from django.core.cache import cache
from django.test import override_settings
from common.tests.core import SimpleTestCase
from mobile.utils.cache_helper import CacheHelper, get_or_set
@override_settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'unique-snowflake',
}
})
class CacheHelperTest(SimpleTestCase):
def tearDown(self):
cache.clear()
def test_get_or_set_when_already_have_value(self):
cache.set('cache_key', 'other_value')
CacheHelper.get_or_set('cache_key', 'value')
cache.get('cache_key').should.be.equal('other_value')
def test_get_or_set_when_not_set_yet(self):
CacheHelper.get_or_set('cache_key', 'value')
cache.get('cache_key').should.be.equal('value')
@override_settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'unique-snowflake',
}
})
class GetOrSetDecoratorTest(SimpleTestCase):
def tearDown(self):
cache.clear()
def test_get_or_set_when_already_have_value(self):
cache.set('cache_key', 'other_value')
method = MagicMock(return_value='value')
decorated_method = get_or_set('cache_key')(method)
decorated_method().should.equal('other_value')
def test_get_or_set_when_not_set_yet(self):
method = MagicMock(return_value='value')
decorated_method = get_or_set('cache_key')(method)
decorated_method().should.equal('value')
| StarcoderdataPython |
6500511 | <filename>imagenet-dataset-downloader.py
#!/usr/bin/env python
# TODO: find the proper way to acknowledge that this is based on https://github.com/itf/imagenet-download
import argparse
import urllib.request, urllib.error, urllib.parse
from socket import timeout as TimeoutError
from socket import error as SocketError
import time
import http.client
from ssl import CertificateError
import random
import os
import imghdr
import sys
WN_FULL_SUBTREE_WNIDS_URL = 'http://image-net.org/api/text/wordnet.structure.hyponym?full=1&wnid='
WN_IMG_LIST_URL = 'http://www.image-net.org/api/text/imagenet.synset.geturls?wnid='
WN_WNID_TO_WORD_URL = 'http://www.image-net.org/api/text/wordnet.synset.getwords?wnid='
class DownloadError(Exception):
"""Base class for exceptions in this module."""
def __init__(self, message=""):
self.message = message
def download(url, timeout=5, retry=3, sleep=0.8):
"""Downloads a file at given URL."""
count = 0
while True:
try:
f = urllib.request.urlopen(url, timeout=timeout)
if f is None:
raise DownloadError('Cannot open URL' + url)
content = f.read()
f.close()
break
except (urllib.error.HTTPError, http.client.HTTPException, CertificateError) as e:
count += 1
if count > retry:
raise DownloadError()
except (urllib.error.URLError, TimeoutError, SocketError, IOError) as e:
count += 1
if count > retry:
raise DownloadError('failed to open ' + url + ' after ' + str(retry) + ' retries')
time.sleep(sleep)
#except (Error) as e:
# print('otherwise uncaught error: ' + e)
return content
def get_url_request_list_function(request_url):
def get_url_request_list(wnid, timeout=5, retry=3):
url = request_url + wnid
response = download(url, timeout, retry).decode()
lst = str.split(response)
return lst
return get_url_request_list
get_full_subtree_wnid = get_url_request_list_function(WN_FULL_SUBTREE_WNIDS_URL)
get_image_urls = get_url_request_list_function(WN_IMG_LIST_URL)
def get_words_wnid(wnid, timeout=5, retry=3):
url = WN_WNID_TO_WORD_URL + wnid
response = download(url, timeout, retry).decode().strip()
return response
def make_directory(path):
if not os.path.isdir(path):
os.makedirs(path)
# TODO: raise error if directory already exists and isn't empty
# with new version of fastai, I don't need to split train/valid/test
def set_up_directories(rootdir, classname):
"""rootdir is the root directory,
classname is the wnid or human-readable name,
directories will be rootdir/train/classname and rootdir/valid/classname.
Creates directories if they don't exist;
throws an error if classname directories exist and aren't empty."""
# TODO: might be nice if train and valid weren't hard-coded
def set_up_directory_simple(rootdir, classname):
"""rootdir is the root directory,
classname is the wnid or human-readable name,
directory will be rootdir/classname
Creates directories if they don't exist;
throws an error if classname directories exist and aren't empty."""
dir_path = os.path.join(rootdir, classname)
make_directory(dir_path)
return dir_path
# this is written but not tested. ran out of time on Friday, pick up here on Monday
def download_images(image_url_list, n_images, dir_path, min_size, timeout, retry):
image_count = 0
for url in image_url_list:
if(image_count >= n_images):
break
try:
image = download(url, timeout, retry)
try:
extension = imghdr.what('', image) #check if valid image
if extension == "jpeg":
extension = "jpg"
if extension == None:
raise DownloadError()
except:
raise DownloadError()
if (sys.getsizeof(image) > min_size):
image_name = "image_" + str(image_count) + '.' + extension;
image_path = os.path.join(dir_path, image_name)
image_file = open(image_path, 'wb')
image_file.write(image)
image_file.close()
image_count+=1
#time.sleep(sleep)
print(f"downloaded {url} as {image_path}")
except DownloadError as e:
print('Could not download '+url+" : ")
def main(wnid,
out_dir,
nimages,
timeout,
retry,
human_readable,
min_size):
# get all subtree wnids
# TODO: wrap this in a try/except with good error message
subtree_wnids = get_full_subtree_wnid(wnid,timeout,retry)
for i in range(1,len(subtree_wnids)):
subtree_wnids[i] = subtree_wnids[i][1:] #removes dash
print("subtree_wnids: ", subtree_wnids)
# get image url list for all wnids
all_urls = []
for swnid in subtree_wnids:
# TODO: wrap this in a try/except with a good error message
wnid_urls = get_image_urls(swnid, timeout, retry)
all_urls += wnid_urls
print(len(all_urls), "image urls retrieved. Randomizing...")
# randomize in some way
# I have efficiency concerns about this,
# but incremental random sampling without replacement is complicated
random.shuffle(all_urls)
#print("all urls, suffled: ", all_urls)
# compute number of images in training, validation, and testing
# todo: multithread
# check/set up directories
if human_readable:
dir_name = get_words_wnid(wnid, timeout, retry)
else:
dir_name = wnid
#print("dir_name: ", dir_name)
dir_path = set_up_directory_simple(out_dir, dir_name)
download_images(all_urls, nimages, dir_path, min_size, timeout, retry)
if __name__ == '__main__':
p = argparse.ArgumentParser()
p.add_argument('wnid', help='Imagenet wnid, example n03489162')
p.add_argument('outdir', help='Output directory')
p.add_argument('--nimages', '-n', type=int, default=20,
metavar='N_IMAGES',
help='Number of images per class to download')
#p.add_argument('--valid', type=float, default=0.2,
# help='Percentage of images in validation set')
p.add_argument('--timeout', '-t', type=float, default=15,
help='Timeout per request in seconds')
p.add_argument('--retry', '-r', type=int, default=3,
help='Max count of retry for each request')
p.add_argument('--humanreadable', '-H', action='store_true',
help='Makes the folders human readable')
p.add_argument('--minsize', '-m', type=float, default=7000,
help='Min size of the images in bytes')
args = p.parse_args()
main(wnid = args.wnid,
out_dir = args.outdir,
nimages = args.nimages,
timeout = args.timeout,
retry = args.retry,
human_readable = args.humanreadable,
min_size = args.minsize)
| StarcoderdataPython |
1863921 | #/usr/bin#/python
import boto3
from elasticsearch import Elasticsearch
from datetime import datetime, timezone, timedelta
# Enviar cada linha do CSV para uma fila SQS
def open_file(file_path):
lines = None
with open(file_path, "r+", encoding='utf-8-sig') as file:
lines = file.readlines()
return lines
# Realizar o parser de cada linha utilizando uma Lambda e trabalhar com DLQ
def csv_parser(lines):
header = (lines[0]).split(";")
json_list_parsed = []
json_list = []
for line in lines[1:]:
line = line.split(";")
json_parsed = {}
for idx, value in enumerate(line):
if len(header) != len(line):
# Tentar tratar o erro posteriormente caso venha com alguma divergencia
continue
json_index = (header[idx].replace('"', "")).strip()
value = (value.replace('"', "")).strip()
if "keywords" in json_index:
json_parsed[json_index] = value.split(",")
continue
json_parsed[json_index] = value
json_list.append(json_parsed)
return json_list
# Outra lambda para realizar o enrich do texto
def comprehend_enrich_text(text):
client = boto3.client('comprehend', region_name="us-east-1")
if not text.get("descricaoTipo"):
return
response = client.detect_key_phrases(
Text=text.get("descricaoTipo"),
LanguageCode='pt'
)
key_phrases_list = []
for key_phrases in response["KeyPhrases"]:
key = key_phrases.get("Text")
key_phrases_list.append(key)
return key_phrases_list
def feed_data_into_es(es, data):
index_name = "proposicoes"
res = es.index(index=index_name,doc_type='_doc',id=data.get("id"),body=data)
print(res)
def main():
file_path = "data/proposicoes-2020.csv"
es = Elasticsearch([{'host':'search-es-for-demo-o7wbu5722qanrtbbula3lglv4e.us-east-1.es.amazonaws.com',
'port':443}],
use_ssl = True,
verify_certs = True
)
lines = open_file(file_path)
json_list = csv_parser(lines)
for value in json_list:
datetime_now = datetime.utcnow()
if value:
print("-"*20)
key_phrases_list = comprehend_enrich_text(value)
value["key_phrases"] = key_phrases_list
value["feed_date"] = datetime_now
feed_data_into_es(es, value)
if __name__ == "__main__":
main() | StarcoderdataPython |
6645014 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Jan 11 19:22:53 2020
@author: mavroudo
"""
import outlierDistanceActivities,outlierPairsCurveFitting, outlierPairWise
logFile="../BPI_Challenge_2012.xes"
with open("tests.txt","w") as f:
f.write("Neighbors Threshold Hits Misses\n")
#for neighbors in [20,50,100,250,1000]:
with open("timeDistanceTest.txt","w") as timeTest:
for neighbors in [10,25,50,75,100,150,200]:
distanceOutlierPairsPre,t1=outlierDistanceActivities.main(logFile,neighbors,stdDeviationTimes=4,threshold=None)
with open("timeDistributionTest.txt","w") as dtimeTest:
for threshold in [0.1,0.075,0.05,0.025,0.01,0.0075,0.005,0.0025,0.001]:
timeTest.write(str(neighbors)+","+str(t1)+"\n")
print(neighbors,threshold)
#[traceIndex,activity1,activity2,time1,time2,over/under/ok,over/under/ok,eventIndexInTrace1]
distributionOutlierPairs,time=outlierPairsCurveFitting.main(logFile,threshold)
dtimeTest.write(str(threshold)+","+str(time)+"\n")
#[traceIndex,activity1,activity2,scaledTime1,scaledTime2,eventIndexInTrace1,score]
distanceOutlierPairs=distanceOutlierPairsPre[:len(distributionOutlierPairs)]
hits=[]
miss=[]
for do in distanceOutlierPairs:
flag=False
for io in distributionOutlierPairs:
if do[0]==io[0] and do[1]==io[1] and do[2]==io[2] and do[7]==io[7]:
hits.append(do)
flag=True
break
if not flag:
miss.append(do)
f.write(str(neighbors)+" "+str(threshold)+" "+str(len(hits))+" "+str(len(miss))+"\n")
neighbors=10
distanceOutlierPairsPre,t1=outlierDistanceActivities.main(logFile,neighbors,stdDeviationTimes=4,threshold=None)
neighbors=50
distanceOutlierPairsPre50,t50=outlierDistanceActivities.main(logFile,neighbors,stdDeviationTimes=4,threshold=None)
neighbors=10
logFile="../BPI_Challenge_2012.xes"
import outlierDistanceActivities,outlierPairsCurveFitting
n=[20,50,100,250,500,750,1000]
d1=[outlierDistanceActivities.main(logFile,neighbors,stdDeviationTimes=4,threshold=None) for neighbors in n]
with open("outlierEventTime.txt","w") as f:
for index,x in enumerate(d1):
f.write(str(n[index])+","+str(x[1])+"\n")
with open("outliersDistanceEvents.txt","a+") as outFile:
outFile.write(str(n[index])+"\n")
for outlier in x[0]:
outFile.write(str(outlier)+"\n")
d=[outlierPairWise.main(logFile,neighbors,2524) for neighbors in n]
with open("rTreeDistance.txt","w") as f:
for index,x in enumerate(d):
f.write(str(n[index])+","+str(x[1])+","+str(x[2])+"\n")
#fig which is in the center of the distribution
import random
x=[random.randint(0,40) for i in range(30)]
y=[random.randint(80,120) for _ in range(30)]
k=x+[60]+y
#thresholds = [0.1,0.05,0.02,0.01,0.0075,0.005,0.0025,0.001]
thresholds=[0.0025,0.005,0.0075,0.01,0.0125,0.0150,0.0175,0.02]
d=[outlierPairsCurveFitting.main(logFile,threshold) for threshold in thresholds]
with open("distributionTimes.txt","w") as f:
for index,x in enumerate(d):
f.write(str(thresholds[index])+","+str(x[1])+"\n")
with open("outliersDistribution.txt","a+") as outFile:
outFile.write(str(threshold[index])+"\n")
for outlier in x[0]:
outFile.write(str(outlier)+"\n")
#compare the number of outliers in d (distributions) and d1 (distance)
n=[100,250,500,750,1000,1500,2000]
d1=[outlierDistanceActivities.main(logFile,neighbors,stdDeviationTimes=4,threshold=None) for neighbors in n]
import matplotlib.pyplot as plt
import numpy as np
thresholds=[0.001,0.0025,0.005,0.0075,0.01,0.0125,0.0150,0.0175,0.02]
d=[outlierPairsCurveFitting.main(logFile,threshold) for threshold in thresholds]
d_length=[len(x[0]) for x in d]
d1_length=[len(x[0]) for x in d1]
yaxis=np.arange(min(min(d_length),min(d1_length)),max(max(d1_length),max(d_length))+200,200)
minimum=min(min(d_length),min(d1_length))
maximum=max(max(d1_length),max(d_length))
fig=plt.figure()
ax=fig.add_subplot(111, label="distance")
ax2=fig.add_subplot(111, label="distribution", frame_on=False)
ax.plot(n, d1_length, color="C0")
ax.set_xlabel("Number of Neighbors", color="C0")
ax.set_ylabel("Number of outliers", color="C0")
ax.set_ylim([minimum,maximum])
ax.tick_params(axis='x', colors="C0")
#ax.tick_params(axis='y', colors="C0")
ax2.plot(thresholds, d_length, color="C1")
ax2.xaxis.tick_top()
ax2.set_xlabel('Threshold', color="C1")
ax2.xaxis.set_label_position('top')
ax2.tick_params(axis='x', colors="C1")
ax2.set_ylim([minimum,maximum])
plt.grid(True)
plt.savefig("numberofoutliers.png")
plt.show()
k=outlierPairsCurveFitting.main(logFile,0.003)
k1=outlierDistanceActivities.main(logFile,300,stdDeviationTimes=4,threshold=None)
hits=[]
miss=[]
for distrO in k[0]:
flag=False
for distanceO in k1[0]:
if distrO[0]==distanceO[0] and distrO[1]==distanceO[1] and distrO[2]==distanceO[2] and distrO[7]==distanceO[7]:
hits.append(distrO)
flag=True
break
if not flag:
miss.append(distrO)
| StarcoderdataPython |
3456347 | <reponame>Aledan862/core<gh_stars>0
"""The Smart Life Comfort integration."""
import asyncio
import logging
import socket
import requests
import sys
import homeassistant.helpers.config_validation as cv
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.discovery import async_load_platform
# from homeassistant.helpers.entity_registry import (
# async_get_registry,
# EntityRegistry
# )
from .const import (
DOMAIN,
DEFAULT_PORT,
EVENT,
SLC_START,
SLC_SYNC
)
from homeassistant.const import (
CONF_HOST,
# CONF_PASSWORD,
CONF_PORT,
CONF_TYPE,
# CONF_USERNAME,
# EVENT_COMPONENT_LOADED,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required("controller"): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
),
},
extra=vol.ALLOW_EXTRA,
)
# TODO List the platforms that you want to support.
# For your initial PR, limit it to 1 platform.
# PLATFORMS = ["light", "switch", "sensor"]
PLATFORMS = ["switch", "sensor"]
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Smart Life Comfort component."""
# Data that you want to share with your platforms
host = config[DOMAIN][CONF_HOST]
controllerip = config[DOMAIN]["controller"]
port = config[DOMAIN][CONF_PORT]
_LOGGER.info("SLC Host: " + controllerip)
hostname = socket.gethostname()
IPAddr = socket.gethostbyname(hostname)
_LOGGER.info("Your Computer Name is:" + hostname)
_LOGGER.info("Your Computer IP Address is:" + IPAddr)
res = False
try:
url = "http://" + str(controllerip) + ":" + str(port)
_LOGGER.debug("url: " + url)
response_code = requests.get(url).status_code
_LOGGER.debug("response_code: " + str(response_code))
if response_code == 200 or response_code == "200":
hass.data[DOMAIN] = config[DOMAIN]
_LOGGER.info("SLC platform started!")
res = True
hass.bus.async_fire(DOMAIN, {CONF_TYPE: "loaded"})
# hass.helpers.discovery.load_platform('sensor', DOMAIN, {}, config)
for platform in PLATFORMS:
_LOGGER.debug("starting SLC {}...".format(platform))
hass.async_create_task(async_load_platform(hass, platform, DOMAIN, {}, config))
else:
_LOGGER.error("unable to connect to LoxoneRio")
except:
e = sys.exc_info()[0]
_LOGGER.error(e)
return False
# slc = SLCclient(host="192.168.101.39", controllerip=controllerip)
slc = SLCclient(host=host, controllerip=controllerip)
async def message_callback(event_type, message):
if event_type == 2:
hass.bus.async_fire(SLC_START, message)
elif event_type == 3:
hass.bus.async_fire(SLC_SYNC, message)
elif event_type == 4:
hass.bus.async_fire(EVENT, message)
async def connect_handler(event):
if event.data["State"] == "CONNECTING":
# registry = await async_get_registry(hass)
# idList = hass.states.async_entity_ids("switch") + hass.states.async_entity_ids("light")
# for e in idList:
# if registry.async_get(e):
# if registry.async_get(e).platform == DOMAIN:
# _LOGGER.debug(registry.async_get(e))
await slc.connect(event.data)
async def start_slc_rio(event):
await slc.start()
async def stop_slc_rio(event):
_ = await slc.stop()
_LOGGER.debug(_)
res = False
try:
res = await slc.async_init()
except ConnectionError:
_LOGGER.error("Connection Error")
if res is True:
slc.message_call_back = message_callback
hass.bus.async_listen(SLC_START, connect_handler)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_slc_rio)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_slc_rio)
else:
res = False
_LOGGER.info("Error")
return res
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Smart Life Comfort from a config entry."""
# TODO Store an API object for your platforms to access
# hass.data[DOMAIN][entry.entry_id] = MyApi(...)
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
class SLCLink:
SOCKET_TIMEOUT = 2.0
RX_PORT = 5555
TX_PORT = 4445
# the_queue = queue.Queue()
# thread = None
link_ip = ""
def __init__(self, link_ip=None):
if link_ip is not None:
SLCLink.link_ip = link_ip
# methods
def send_not_reliable_message(self, msg):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # Internet # UDP
sock.sendto(msg.encode(), (SLCLink.link_ip, SLCLink.TX_PORT))
_LOGGER.debug("Send data: %s", msg)
return True
class SLCclient:
def __init__(self,
host="192.168.100.158",
port="5555",
controllerip="192.168.100.147"):
self._host = host
self._rxport = port
self._txport = 4445
self._controllerip = controllerip
self.message_call_back = None
self._pending = []
self.connect_retries = 10
self.connect_delay = 30
self.state = "CLOSED"
self.stream = None
async def async_init(self):
import asyncio_dgram
self.stream = await asyncio_dgram.bind((self._host, self._rxport))
_LOGGER.debug(f"Serving on {self.stream.sockname}")
self.state = "CONNECTED"
return True
async def slc_listen(self, stream):
try:
while True:
data, remote_addr = await self.stream.recv()
_LOGGER.debug(f"Echoing {data.decode()!r}")
event_type, parsed_data = await self.parse_slc_data(data.decode())
_LOGGER.debug(f"event_type {event_type}")
_LOGGER.debug(f"parsed_data {parsed_data}")
if self.message_call_back is not None:
if parsed_data != {}:
await self.message_call_back(event_type, parsed_data)
await asyncio.sleep(0)
except:
pass
async def parse_slc_data(self, data_string: str):
event_dict = {}
keys = ["Channel", "Number", "Value"]
if data_string.strip().upper() == "CONNECTING":
event_dict = {"State" : "CONNECTING"}
event_type = 2
elif data_string.strip().upper() == "STARTING":
event_dict = {"State" : "STARTING"}
event_type = 3
else:
data_string = data_string.strip().split(':')
if len(data_string) > 1:
event_dict = dict(zip(keys, data_string))
event_type = 4
return (event_type, event_dict)
async def keep_alive(self, future, interval_seconds):
while not future.done():
# print("waiting...")
await asyncio.sleep(interval_seconds)
print("done!")
async def start(self):
server_task = asyncio.create_task(self.slc_listen(self.stream))
keep_alive_task = asyncio.create_task(self.keep_alive(server_task, 1.0))
await asyncio.wait([server_task, keep_alive_task])
# self._pending.append(consumer_task)
# for task in pending:
# task.cancel()
# if self.state != "STOPPING":
# self.state == "CONNECTING"
# self._pending = []
# for i in range(self.connect_retries):
# _LOGGER.debug("reconnect: {} from {}".format(i + 1, self.connect_retries))
# await self.stop()
# await asyncio.sleep(self.connect_delay)
# res = await self.reconnect()
# if res is True:
# await self.start()
# break
async def reconnect(self):
return await self.async_init()
async def connect(self, data):
self.send_not_reliable_message_to("Connected#")
async def stop(self):
try:
self.state = "STOPPING"
self.stream.close()
return 1
except:
return -1
# methods
def send_not_reliable_message_to(self, msg):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # Internet # UDP
sock.sendto(msg.encode(), (self._controllerip, self._txport))
_LOGGER.info(msg)
return True
| StarcoderdataPython |
3503152 | """
Login/Logout APIs
"""
from flask import request, jsonify, abort
from flask_jwt_extended import jwt_required
from .. import app
from ..auth import authenticate, get_user
@app.route('/api/login', methods=['POST'])
def login():
"""
Authenticate user and return token
"""
data = request.get_json()
if 'username' in data and 'password' in data:
username = data['username']
password = data['password']
access_token = authenticate(username, password)
if access_token is not None:
print('access token: ' + access_token)
return jsonify({'access_token': access_token})
else:
abort(403)
else:
abort(400)
@app.route('/api/auth', methods=['GET'])
@jwt_required
def authcheck():
"""Test function to verify authentication status"""
user = get_user()
return jsonify({'current_identity': user.username})
| StarcoderdataPython |
4910357 | <filename>io_scene_vrm/importer/gltf2_addon_importer_user_extension.py<gh_stars>10-100
import secrets
import string
from collections import abc
from typing import Any, Optional
import bpy
class Gltf2AddonImporterUserExtension:
__current_import_id: Optional[str] = None
@classmethod
def update_current_import_id(cls) -> str:
import_id = "BlenderVrmAddonImport" + (
"".join(secrets.choice(string.digits) for _ in range(10))
)
cls.__current_import_id = import_id
return import_id
@classmethod
def clear_current_import_id(cls) -> None:
cls.__current_import_id = None
# https://github.com/KhronosGroup/glTF-Blender-IO/blob/6f9d0d9fc1bb30e2b0bb019342ffe86bd67358fc/addons/io_scene_gltf2/blender/imp/gltf2_blender_image.py#L51
def gather_import_image_after_hook(
self, img: Any, blender_image: Any, gltf_importer: Any
) -> None:
if self.__current_import_id is None:
return
if (
not hasattr(gltf_importer, "data")
or not hasattr(gltf_importer.data, "images")
or not isinstance(gltf_importer.data.images, abc.Sequence)
):
print(
f"WARNING: gather_import_image_after_hook: gltf_importer is unexpected structure: {gltf_importer}"
)
return
if img not in gltf_importer.data.images:
print(
f"WARNING: gather_import_image_after_hook: {img} not in {gltf_importer.data.images}"
)
return
index = gltf_importer.data.images.index(img)
if not isinstance(blender_image, bpy.types.Image):
return
blender_image[self.__current_import_id] = index
| StarcoderdataPython |
11264078 | from hamcrest.core.base_matcher import BaseMatcher
from hamcrest.core.core.isequal import IsEqual
from pyhamcrest_toolbox.multicomponent import MatcherPlugin
class MatcherPluginWrapper(MatcherPlugin):
"""This class allows turning good old matchers into matcher plugins
that can be used in MulticomponentMatchers.
"""
matcher_class = IsEqual
"""Set this variable to the class of the matcher you want to wrap.
If you don't, it IsEqual will be used (which is returned by the `equal_to`)
function. Note, that you have to specify a class, not a function."""
description_prefix = ""
"""The prefix that will be added to the description of the wrapped
matcher"""
mismatch_description_prefix = ""
"""The prefix to be added to the mismatch_description"""
def convert_item(self, item):
"""Convert the item that the MulticomponentMatcher deals with into
the item that your component plugin is responsible for.
The multicomponent matcher will pass the item that it has received, and
it is up to your matcher plugin to get the object that it works with
from that object. By default, returns the item itself.
"""
return item
def __init__(self, *args, **kwargs):
if not issubclass(self.matcher_class, BaseMatcher):
raise RuntimeError(
"type_ must be set and must inherit from BaseMatcher")
super().__init__()
self.matcher = self.matcher_class(*args, **kwargs)
def component_matches(self, item):
return self.matcher._matches(self.convert_item(item))
def describe_to(self, description):
description\
.append_text(self.description_prefix)
self.matcher.describe_to(description)
def describe_component_mismatch(self, item, mismatch_description):
mismatch_description.append_text(self.mismatch_description_prefix)
self.matcher.describe_mismatch(
self.convert_item(item), mismatch_description)
| StarcoderdataPython |
12848430 | __author__ = 'shuai'
class Solution:
# @param {integer[]} nums
# @return {string}
def largestNumber(self, nums):
ret = ""
for i in range(len(nums)):
for j in range(i + 1, len(nums)):
str_i = str(nums[i])
str_j = str(nums[j])
if str_i + str_j < str_j + str_i:
tmp = nums[i]
nums[i] = nums[j]
nums[j] = tmp
# to check if max equals 0 ,return '0'
if i == 0 and nums[i] == 0:
return '0'
ret += str(nums[i])
return ret
sol = Solution()
print sol.largestNumber([3, 30, 34, 5, 9])
| StarcoderdataPython |
336634 | <filename>common_utils.py
'''This file contains utilities common to all type of files and associated with the editor'''
from tkinter import *
from threading import *
from utils import *
def TellPos(command, editor, count, activity_log, code_input):
'''
Tells the current position of the cursor
'''
pos = "at line "
pos = pos + editor.index(INSERT)
pos = pos.replace('.', ' and column ')
Speak(pos)
def DeletePosLeft(index, editor, count, activity_log):
'''
Deletes <x> characters left of the cursor
'''
ind = index.split('c')
index = 'insert-'+index
deleted = editor.get(index, INSERT)
pos = editor.index(INSERT)
editor.delete(index, INSERT)
edit = str(count)+ '::' + pos + '->' + 'Deleted ' + ind[0] + ' characters towards left ' + deleted + '\n'
UpdateActivity(edit, activity_log)
def DeletePosRight(index, editor, count, activity_log):
'''
Deletes <x> characters right of the cursor
'''
ind = index.split('c')
index = 'insert+'+index
pos = editor.index(INSERT)
deleted = editor.get(INSERT, index)
editor.delete(INSERT, index)
edit = str(count)+ '::' + pos + '->' + 'Deleted ' + ind[0]+ ' characters towards right ' + deleted + '\n'
UpdateActivity(edit, activity_log)
def DeletePosAll(index, editor, count, activity_log):
'''
Clears the editor
'''
pos = editor.index(INSERT)
editor.delete('1.0',END)
edit = str(count)+ '::' + pos + '->' + 'Cleared editor\n'
UpdateActivity(edit, activity_log)
def MovePosUp(command,pos, editor):
'''
Moves <x> lines up from the current position
'''
index = 'insert-' + str(pos) + 'l'
editor.mark_set(INSERT, index)
def MovePosTo(command, pos, editor):
'''
Moves the cursor to line <x> and column <y>
Can be used in 3 ways
1.Move to line x
2.Move to column y
3.Move to line x column y
'''
words = command.split(' ')
pos = []
for word in words:
if word.isdigit()==True:
pos.append(word)
print(pos)
if 'line' not in command:
ind = editor.index(INSERT)
index = ind.split('.')
index = index[0] + pos[0]
elif 'column' not in command:
index = pos[0]+'.'+'0'
else:
index = pos[0]+'.'+pos[1]
# print(index)
editor.mark_set(INSERT, index)
def MovePosLeft(command, pos, editor):
'''
Moves <x> positions left of the current position or move left
'''
index = 'insert-' + str(pos) + 'c'
editor.mark_set(INSERT, index)
def MovePosRight(command, pos, editor):
'''
Moves <x> characters right of the current position or move right
'''
index = 'insert+' + str(pos) + 'c'
editor.mark_set(INSERT, index)
def MovePosDown(command, pos, editor):
'''
Moves <x> lines down or move down
'''
index = 'insert+' + str(pos) + 'l'
editor.mark_set(INSERT, index)
def MovePos(command, editor, count, activity_log, code_input):
'''
Common function for invoking specific actions associated with the cursor
'''
words = command.split(' ')
t = len(words)
if t==3:
pos = 1
else:
pos = words[t-2]
dict = {'up':MovePosUp, 'line':MovePosTo, 'column':MovePosTo, 'left':MovePosLeft,'right':MovePosRight,'down':MovePosDown}
for item in dict.keys():
if item in command:
dict[item](command, pos, editor)
break
def DeletePos(command, editor, count, activity_log, code_input):
'''
Common function for invoking specific actions associated with the deletion
'''
words = command.split(' ')
t = len(words)
if t==3:
pos = 1
else:
pos = words[t-2]
index = str(pos) + 'c'
dict = {'left':DeletePosLeft, 'right':DeletePosRight, 'all':DeletePosAll}
for item in dict.keys():
if item in command:
dict[item](index, editor, count, activity_log)
def Brackets(command, editor, count, activity_log):
'''
For adding the brackets
'''
if 'square' in command:
code = '[]'
elif 'round' in command:
code = '()'
elif 'curly' in command:
code = '{}'
editor.insert(INSERT, code)
editor.mark_set('insert','insert-1c')
def Extra(command, editor, count, activity_log):
'''
For adding anything onto the editor, just say 'add' followed by anything you want to add to the editor
'''
command = command.replace('add', '')
pos = editor.index(INSERT)
editor.insert(INSERT, command)
edit = str(count)+ '::' + pos + '-> ' + 'Adding ' + command + '\n'
UpdateActivity(edit, activity_log)
def Enter(command, editor, count, activity_log, code_input):
'''
Just say enter to invoke an action same as pressing enter
'''
editor.insert(INSERT, '\n')
beepy.beep(sound=3)
def Escape(command, editor, count, activity_log, code_input):
'''
Utility function for escaping from the local region defined by curly braces
For example someone added a for loop and added conditions and code to the for block, now he doesn't know (if he can't see) how many positions towards the right he should move to add code below the for block so he can just say escape and get right next to '} - closing brace'
'''
code = editor.get(INSERT, END)
count = 0
count_char = 0
for char in code:
if char=='{':
count+=1
elif char=='}' and count==0:
break
elif char=='}':
count-=1
count_char+=1
count_char+=1
ind = 'insert+' + str(count_char) + 'c'
editor.mark_set('insert',ind)
def ReadLine(command, editor, count, activity_log, code_input):
'''
For reading a line normally or character by character if the person isn't able to still understand what's in the line
1.Read line <x>
2.Read line <x> char by char
'''
lst = command.split(' ')
code = editor.get('1.0', END)
if len(lst) <= 2:
multi_thread(Speak, code)
else:
ind = int(lst[2])
lines = code.split('\n')
line = lines[ind-1]
if 'character' in command:
for char in line:
Speak(char)
else:
Speak(line) | StarcoderdataPython |
6451008 | import os
def find_file_above(file_to_find: str, path: str = None):
starting_directory = path or os.getcwd()
tree = FileTree(starting_directory)
while not tree.is_at_filesystem_root:
if file_to_find in tree.current_files:
return f"{tree.current_directory}/{file_to_find}"
tree.move_up_one_level()
class FileTree:
def __init__(self, starting_directory):
self.current_directory = starting_directory
@property
def current_files(self):
return os.listdir(self.current_directory)
@property
def parent_dir(self):
return os.path.dirname(self.current_directory)
@property
def is_at_filesystem_root(self):
return self.current_directory == self.parent_dir
def move_up_one_level(self):
self.current_directory = self.parent_dir
| StarcoderdataPython |
4996441 | """
With these settings, tests run faster.
"""
from dj_easy_log import load_loguru
from .base import * # noqa
from .base import env
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = env(
"DJANGO_SECRET_KEY",
default="<KEY>",
)
# https://docs.djangoproject.com/en/dev/ref/settings/#test-runner
TEST_RUNNER = "django.test.runner.DiscoverRunner"
REST_FRAMEWORK["TEST_REQUEST_DEFAULT_FORMAT"] = "json" # noqa
# PASSWORDS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"]
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
# Your stuff...
# ------------------------------------------------------------------------------
LOGGING["loggers"] = { # noqa
"rules": {
"handlers": ["console"],
"level": "DEBUG",
"propagate": True,
}
}
load_loguru(globals())
| StarcoderdataPython |
158565 | <filename>src/scopes/nonlocal.py
# Lexical scoping in Python and an interesting scenario with resolving value in class.
# Author: <NAME>
# Test suite 1
def test1():
"""
Try to change a nonlocal variable of a parent frame.
"""
def noeffect():
# Uncomment the following line to show that local variable referenced before assignment
# NB: UnboundLocalError will be raised
# print(a)
a = 2 # Does not affect `a` in the frame of `test1()`
print(a)
a = 1
print(a)
noeffect()
print(a)
# Test suite 2
def test2():
"""
Correct way to change a nonlocal variable.
"""
def affect():
# Uncomment the following lines to show use of local variable before calling nonlocal
# NB: SyntaxWarning will be raised on that `a` is assigned before nonlocal declaration
# print(a)
# a = 3
# print(a)
# Use of nonlocal statement to modify variable from parent frame (excluding global)
nonlocal a
print(a)
a = 2
print(a)
a = 1
print(a)
affect()
print(a)
| StarcoderdataPython |
135144 | from typing import List
class TestResult:
def __init__(self, name, kind, type_name, method, duration, result, exception_type, failure_message, stack_trace,
skip_reason, attachments):
"""
:type name: unicode
:type kind: unicode
:type type_name: unicode
:type method: unicode
:type duration: float
:type result: unicode
:type exception_type: unicode
:type failure_message: unicode
:type stack_trace: unicode
:type skip_reason: unicode
:type attachments: List[TestResultAttachment]
"""
self._name = name
self._kind = kind
self._type = type_name
self._method = method
self._duration_seconds = duration
self._result = result
self._exception_type = exception_type
self._failure_message = failure_message
self._stack_trace = stack_trace
self._skip_reason = skip_reason
self._attachments = attachments
@property
def name(self):
return self._name
@property
def kind(self):
return self._kind
@property
def type(self):
return self._type
@property
def method(self):
return self._method
@property
def duration_seconds(self):
return self._duration_seconds
@property
def result(self):
return self._result
@property
def exception_type(self):
return self._exception_type
@property
def failure_message(self):
return self._failure_message
@property
def stack_trace(self):
return self._stack_trace
@property
def skip_reason(self):
return self._skip_reason
@property
def output(self):
return self._output
@property
def attachments(self):
return self._attachments
class TestResultAttachment:
def __init__(self, name, text):
"""
:type name: unicode
:type text: unicode
"""
self._name = name
self._text = text
@property
def name(self):
return self._name
@property
def text(self):
return self._text
| StarcoderdataPython |
1844955 | #!/usr/bin/python
########################################################################################################################
#
# Copyright (c) 2014, Regents of the University of California
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
########################################################################################################################
"""Quick start script for bag flow - nand gate layout generator"""
__author__ = "<NAME>"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Prototype"
if __name__ == '__main__':
import laygo
import numpy as np
#initialize
laygen = laygo.GridLayoutGenerator(config_file="laygo_config.yaml")
#template and grid load
utemplib = laygen.tech+'_microtemplates_dense' #device template library name
laygen.load_template(filename=utemplib+'_templates.yaml', libname=utemplib)
laygen.load_grid(filename=utemplib+'_grids.yaml', libname=utemplib)
laygen.templates.sel_library(utemplib)
laygen.grids.sel_library(utemplib)
# library & cell creation
laygen.add_library('laygo_working')
laygen.add_cell('nand_test')
# grid variables
pg = 'placement_basic'
rg12 = 'route_M1_M2_cmos'
rg23 = 'route_M2_M3_cmos'
#placements
nr = ['nmos4_fast_boundary', 'nmos4_fast_center_nf2', 'nmos4_fast_boundary',
'nmos4_fast_boundary', 'nmos4_fast_center_nf2', 'nmos4_fast_boundary']
pr = ['pmos4_fast_boundary', 'pmos4_fast_center_nf2', 'pmos4_fast_boundary',
'pmos4_fast_boundary', 'pmos4_fast_center_nf2', 'pmos4_fast_boundary']
pd = ['top']+['right']*5
nrow = laygen.relplace(templatename=nr, gridname=pg)
prow = laygen.relplace(templatename=pr, gridname=pg, refobj=nrow[0], direction=pd, transform='MX')
#routes
# a
laygen.route(xy0=[0, 0], xy1=[0, 0], gridname0=rg12, refobj0=prow[4].pins['G0'], refobj1=nrow[4].pins['G0'], via0=[0, 0])
laygen.route(xy0=[-2, 0], xy1=[0, 0], gridname0=rg12, refobj0=prow[4].pins['G0'], refobj1=prow[4].pins['G0'])
ra0 = laygen.route(xy0=[0, 0], xy1=[0, 2], gridname0=rg23, refobj0=prow[4].pins['G0'], refobj1=prow[4].pins['G0'],
via0=[0, 0])
# b
laygen.route(xy0=[0, 0], xy1=[0, 0], gridname0=rg12, refobj0=nrow[1].pins['G0'], refobj1=prow[1].pins['G0'], via0=[0, 0])
laygen.route(xy0=[0, 0], xy1=[2, 0], gridname0=rg12, refobj0=nrow[1].pins['G0'], refobj1=nrow[1].pins['G0'])
rb0 = laygen.route(xy0=[0, 0], xy1=[0, 2], gridname0=rg23, refobj0=nrow[1].pins['G0'], refobj1=nrow[1].pins['G0'],
via0=[0, 0])
# internal
laygen.route(xy0=[0, 2], xy1=[0, 2], gridname0=rg12, refobj0=nrow[1].pins['D0'], refobj1=nrow[4].pins['S1'],
via0=[0, 0], via1=[[-2, 0], [0, 0]])
# output
laygen.route(xy0=[0, 2], xy1=[1, 2], gridname0=rg12, refobj0=prow[1].pins['D0'], refobj1=prow[4].pins['D0'],
via0=[0, 0], via1=[-1, 0])
laygen.route(xy0=[-1, 0], xy1=[1, 0], gridname0=rg12, refobj0=nrow[4].pins['D0'], refobj1=nrow[4].pins['D0'], via0=[1, 0])
ro0 = laygen.route(xy0=[1, 0], xy1=[1, 2], gridname0=rg23, refobj0=nrow[4].pins['D0'], via0=[0, 0],
refobj1=prow[4].pins['D0'], via1=[0, 0])
# power and ground route - vertical
for d in [nrow[1], prow[1], prow[4]]:
for s in ['S0', 'S1']:
laygen.route(None, gridname0=rg12, refobj0=d.pins[s], refobj1=d, via1=[0, 0], direction='y')
# power and ground route - horizontal
xy = laygen.get_template_xy(name=nrow[-1].cellname, gridname=rg12) * np.array([1, 0])
rvdd=laygen.route(None, xy0=[0, 0], xy1=xy, gridname0=rg12, refobj0=prow[0], refobj1=prow[-1])
rvss=laygen.route(None, xy0=[0, 0], xy1=xy, gridname0=rg12, refobj0=nrow[0], refobj1=nrow[-1])
#pins
for pn, rp in zip(['A', 'B', 'O'], [ra0, rb0, ro0]):
laygen.pin(name=pn, layer=laygen.layers['pin'][3], refobj=rp, gridname=rg23)
for pn, rp in zip(['VDD', 'VSS'], [rvdd, rvss]):
laygen.pin(name=pn, layer=laygen.layers['pin'][2], refobj=rp, gridname=rg12)
laygen.display()
# export
import bag
prj = bag.BagProject()
laygen.export_BAG(prj)
| StarcoderdataPython |
324177 | from typing import TYPE_CHECKING
from groupy.client import Groupy
from itests.setup import api_server
if TYPE_CHECKING:
from py.path import LocalPath
from tests.setup import SetupTest
def test_get_permissions(tmpdir, setup):
# type: (LocalPath, SetupTest) -> None
with setup.transaction():
setup.create_permission("some-permission")
setup.create_permission("another-permission")
with api_server(tmpdir) as api_url:
api_client = Groupy(api_url)
assert sorted(api_client.permissions) == ["another-permission", "some-permission"]
def test_get_permission(tmpdir, setup):
# type: (LocalPath, SetupTest) -> None
with setup.transaction():
setup.grant_permission_to_group("ssh", "foo", "sad-team")
setup.grant_permission_to_group("ssh", "bar", "team-sre")
setup.grant_permission_to_group("ssh", "*", "tech-ops")
with api_server(tmpdir) as api_url:
api_client = Groupy(api_url)
permission = api_client.permissions.get("ssh")
assert sorted(permission.groups) == ["sad-team", "team-sre", "tech-ops"]
| StarcoderdataPython |
3320691 | <filename>data/test/python/bf5d2878ae4d4260ba7cffcc0ed4562278239decurls.py
from django.conf.urls import patterns, include, url
from django.contrib import admin
from tastypie.api import Api
from progress.apps.food import api
admin.autodiscover()
v1_api = Api(api_name='1.0')
v1_api.register(api.DayResource())
v1_api.register(api.MealResource())
urlpatterns = patterns('',
url(r'api/',
include(v1_api.urls)),
url(r'^food/',
include('progress.apps.food.urls',
namespace='food')),
url(r'^$',
'progress.apps.home.views.home',
name='home'),
url(r'^admin/',
include(admin.site.urls)),
)
| StarcoderdataPython |
361612 | <gh_stars>0
from django.db import models
from django.test import TestCase
from hashlookup.querysets import gen_hash
from models import HashLookupTestModel
TEST_URL = 'https://test.com/help/about/article?date=2016-02-14&tool=main&from=producthunt'
class HashLookupTestCase(TestCase):
def setUp(self):
HashLookupTestModel.objects.create(url=TEST_URL, url_hash=gen_hash(TEST_URL))
def test_filter_queryset(self):
queryset = HashLookupTestModel.objects.filter(url=TEST_URL)
sql_with_params = queryset.query.sql_with_params()
#(u'SELECT "tests_hashlookuptestmodel"."id", "tests_hashlookuptestmodel"."url", "tests_hashlookuptestmodel"."url_hash" FROM "tests_hashlookuptestmodel" WHERE "tests_hashlookuptestmodel"."url" = %s', ('https://test.com/help/about/article?date=2016-02-14&tool=main&from=producthunt',))
self.assertEqual(queryset.count(), 1)
self.assertEqual(sql_with_params[1][0], TEST_URL)
self.assertIn('"tests_hashlookuptestmodel"."url" = %s', sql_with_params[0])
def test_filter_queryset_hash(self):
queryset = HashLookupTestModel.objects_hash.filter(url=TEST_URL)
sql_with_params = queryset.query.sql_with_params()
self.assertEqual(queryset.count(), 1)
self.assertEqual(sql_with_params[1][0], gen_hash(TEST_URL))
self.assertIn('"tests_hashlookuptestmodel"."url_hash" = %s', sql_with_params[0])
def test_filter_queryset_hash_contains(self):
queryset = HashLookupTestModel.objects_hash.filter(url__contains='test.com')
sql_with_params = queryset.query.sql_with_params()
self.assertEqual(queryset.count(), 1)
self.assertEqual(sql_with_params[1][0], "%test.com%")
self.assertIn('"tests_hashlookuptestmodel"."url" LIKE %s', sql_with_params[0])
| StarcoderdataPython |
3596228 | #
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import unittest
import difflib
from conary.lib.patch import patch, reverse, unifiedDiff
from conary_test import resources
class PatchTest(unittest.TestCase):
def runTest(self, fileName, sedCmd, changeCmd = None):
new = os.popen("sed '%s' < %s" % (sedCmd, fileName)).readlines()
orig = open(fileName).readlines()
if changeCmd:
target = os.popen("sed '%s' < %s" % (changeCmd, fileName)).readlines()
final = os.popen("sed '%s' < %s | sed '%s'" % (sedCmd, fileName,
changeCmd)).readlines()
check = os.popen("sed '%s' < %s | sed '%s'" % (changeCmd, fileName,
sedCmd)).readlines()
if "".join(final) != "".join(check):
self.fail("sed scripts have conflicting results")
else:
target = orig
final = new
diff = difflib.unified_diff(orig, new)
# advance past header
diff.next()
diff.next()
d = [ x for x in diff ]
(new2, conflicts) = patch(target, d)
diff = difflib.unified_diff(final, new2)
try:
diff.next()
print "%s '%s' failed:" % (fileName, sedCmd)
diff.next()
for line in diff:
line = line[:-1]
print "\t%s" % line
except StopIteration:
pass
def test1(self):
self.runTest(resources.get_archive() + "/services", "s/ircd/foobard/")
def test2(self):
self.runTest(resources.get_archive() + "/services", "s/chargen/mygen/;s/tftp/mytftp/")
def test3(self):
self.runTest(resources.get_archive() + "/services", "/chargen/ahello there")
def test4(self):
self.runTest(resources.get_archive() + "/services", "/tftp/d")
def test5(self):
self.runTest(resources.get_archive() + "/services", "s/tftp/mytftp/", "s/telnet/mytelnet/")
def test6(self):
self.runTest(resources.get_archive() + "/services", "s/tftp/mytftp/", "/chargen/d")
def test7(self):
self.runTest(resources.get_archive() + "/services", "s/tftp/mytftp/",
"/chargen/asome new lines")
def test8(self):
self.runTest(resources.get_archive() + "/tmpwatch", "/done/a # local changes",
"s/720/360/")
def test9(self):
old = []
new = [ "some lines", "of text", "to be patched" ]
diff = difflib.unified_diff(old, new, lineterm = "")
# advance past header
diff.next()
diff.next()
(new2, conflicts) = patch(old, diff)
assert(not conflicts)
diff = difflib.unified_diff(new, new2)
self.assertRaises(StopIteration, diff.next)
def test10(self):
"""test reversing a diff and applying it to the new file, check
too make sure you get the old file"""
old = [ "a", "b", "c", "same" ]
new = [ "1", "2", "3", "same" ]
diff = difflib.unified_diff(old, new, lineterm = "")
# advance past header
diff.next()
diff.next()
diff = reverse(diff)
(old2, conflicts) = patch(new, diff)
if old != old2:
raise AssertionError
def test11(self):
"""
test that a patch that appends to a file which has shrunk
works
"""
# orig file is 10 lines of "1"
old = [ '1' ] * 10
# local modification removes two of the lines of "1"
oldchanged = [ '1' ] * 8
# new file adds two lines
new = old + [ '2', '3' ]
# we expect for the result to be the local change plus two lines
newmerged = oldchanged + [ '2', '3' ]
diff = difflib.unified_diff(old, new, lineterm = "")
# advance past header
diff.next()
diff.next()
(results, conflicts) = patch(oldchanged, diff)
assert(results == newmerged)
assert(not conflicts)
def testConverge(self):
"""
tests applying a patch to a file when applying a patch to a
file that already has the change. This is used in cvc merge
"""
# orig file is 10 lines of "1"
base = [ '1' ] * 10
# the file changes the middle line to "2" on the branch
tip = base[:]
tip[5] = '2'
# the file on my local brach already has the change
trunk = tip[:]
diff = difflib.unified_diff(base, tip, lineterm = "")
# advance past header
diff.next()
diff.next()
# this should be like "patch appears to already be applied"
(results, conflicts) = patch(trunk, diff)
assert(results == tip)
assert(not conflicts)
def testEraseConflict(self):
base = """\
useradd
${UIDARG}
${HOMEDIR:+-d "${HOMEDIR}"}
$USER >/dev/null 2>&1 || :
;;
1
2
3
""".split('\n')
tip = """\
useradd
${UIDARG}
-M -d "${HOMEDIR}"
$USER >/dev/null 2>&1
;;
1
2
3
""".split('\n')
local = """\
useradd
${UIDARG} \
${HOMEDIR:+-d "${HOMEDIR}"}
${PASSWORD:+-s "${PASSWORD}"}
$USER >/dev/null 2>&1 || :
;;
1
2
3
""".split('\n')
diff = list(difflib.unified_diff(base, tip))
(results, conflicts) = patch(local, diff[2:])
assert(results == local)
assert(conflicts)
def testEraseAlreadyApplied(self):
first = [ "%s\n" % x for x in xrange(10) ]
second = first[:]
second.remove("4\n")
diff = list(difflib.unified_diff(first, second))
(results, conflicts) = patch(second, diff[2:])
assert(results == second)
assert(not conflicts)
def testMergeAtEnd(self):
first = [ "%s\n" % x for x in xrange(10) ]
second = first[:]
second.remove("7\n")
diff = list(difflib.unified_diff(first, second))
third = first[:]
third.remove("9\n")
result = patch(third, diff[2:])
assert(not result[1])
assert(result[0] == [ '0\n', '1\n', '2\n', '3\n', '4\n', '5\n',
'6\n', '8\n'])
def testNoTrailingNewline(self):
first = [ "line\n" ]
second = [ "line\n", "another" ]
diff = list(unifiedDiff(first, second))
assert(diff[-1] == '\ No newline at end of file\n')
assert(diff[-2][-1] == '\n')
result = patch(first, diff[2:])
assert(not result[1])
assert(result[0] == second)
first = [ "first" ]
second = [ "second" ]
diff = list(unifiedDiff(first, second))
result = patch(first, diff[2:])
assert(not result[1])
assert(result[0] == second)
first = [ "first" ]
second = [ "second\n" ]
diff = list(unifiedDiff(first, second))
result = patch(first, diff[2:])
assert(not result[1])
assert(result[0] == second)
| StarcoderdataPython |
3417715 | <gh_stars>1-10
from setuptools import setup
import os
root = os.path.dirname(os.path.abspath(__file__))
about = {}
with open(os.path.join(root, 'shadow_loop', '__version__.py'), 'r') as f:
exec(f.read(), about)
readme = ''
with open(os.path.join(root, 'README.md')) as f:
readme = f.read()
setup(name=about['__title__'],
author=about['__author__'],
author_email='<EMAIL>',
url='http://github.com/isanich/shadow-loop',
version=about['__version__'],
license=about['__license__'],
description='Submit _awaitable_ objects to a shadow event loop in a separate thread '
'and wait for their execution from synchronous code if needed.',
long_description=readme,
packages=['shadow_loop'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
include_package_data=True,
platforms='any',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
| StarcoderdataPython |
9767035 | <gh_stars>0
import os.path
import sys
from PyQt5.QtCore import QUrl, pyqtSignal
from PyQt5.QtGui import QShowEvent, QCloseEvent, QTextCursor
from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QPushButton, QTextEdit
from PyQt5.QtWidgets import QVBoxLayout, QHBoxLayout, QSizePolicy
from PyQt5.QtWebEngineWidgets import QWebEngineView, QWebEnginePage
class CustomWebEnginePage(QWebEnginePage):
sig_console_message = pyqtSignal(object, object, object, object)
def __init__(self, parent=None):
super().__init__(parent)
def javaScriptConsoleMessage(self, level, message, line, source):
self.sig_console_message.emit(level, message, line, source)
class DartCrawlerWindow(QMainWindow):
_dart_url: str = "https://dart.fss.or.kr/dsae001/main.do"
def __init__(self):
super().__init__()
self._webview = QWebEngineView()
self._btnStartCrawl = QPushButton('START')
self._btnGetResult = QPushButton('GET RESULT')
self._editConsole = QTextEdit()
self.initControl()
self.initLayout()
def initLayout(self):
widget = QWidget()
self.setCentralWidget(widget)
vbox = QVBoxLayout(widget)
vbox.setContentsMargins(0, 0, 0, 0)
vbox.setSpacing(4)
subwgt = QWidget()
subwgt.setSizePolicy(QSizePolicy.MinimumExpanding, QSizePolicy.Fixed)
hbox = QHBoxLayout(subwgt)
hbox.setContentsMargins(2, 4, 2, 0)
hbox.addWidget(self._btnStartCrawl)
hbox.addWidget(self._btnGetResult)
vbox.addWidget(subwgt)
vbox.addWidget(self._webview)
vbox.addWidget(self._editConsole)
def initControl(self):
self._btnStartCrawl.clicked.connect(self.startCrawl)
self._btnGetResult.clicked.connect(self.getResult)
webpage = CustomWebEnginePage(self._webview)
webpage.sig_console_message.connect(self.onWebPageConsoleMessage)
self._webview.setPage(webpage)
self._editConsole.setReadOnly(True)
self._editConsole.setFixedHeight(100)
self._editConsole.setLineWrapColumnOrWidth(-1)
self._editConsole.setLineWrapMode(QTextEdit.FixedPixelWidth)
def startCrawl(self):
script_path = os.path.abspath('./run_code.js')
with open(script_path, 'r', encoding='utf-8') as fp:
script = fp.read()
self._editConsole.clear()
self._webview.page().runJavaScript(script, self.callbackJavascript)
def getResult(self):
self._webview.page().runJavaScript("arr_leaf_nodes;", self.callbackResult)
def showEvent(self, a0: QShowEvent) -> None:
self._webview.load(QUrl(self._dart_url))
def closeEvent(self, a0: QCloseEvent) -> None:
self._webview.close()
self.deleteLater()
def addTextMessage(self, message: str):
cursor = QTextCursor(self._editConsole.textCursor())
cursor.movePosition(QTextCursor.End)
cursor.insertText(message + '\n')
vscroll = self._editConsole.verticalScrollBar()
vscroll.setValue(vscroll.maximum())
def onWebPageConsoleMessage(self, level, message, line, source):
text = f'{message} (lv:{level}, line:{line})'
self.addTextMessage(text)
def callbackJavascript(self, result: object):
text = f'>> {result}'
self.addTextMessage(text)
def callbackResult(self, result: object):
if isinstance(result, list):
# self._editConsole.clear()
############################################################################################################
# for test
import pickle
with open('result_list.pkl', 'wb') as fp:
pickle.dump(result, fp)
############################################################################################################
def parse_dict(obj: dict):
"""
[dict 구조]
node_id: str = jstree 노드 아이디
node_text: str = jstree 노드 텍스트 = 업종분류
parents: list of dict, dict element = {'id': 노드 아이디, 'text': 노드 텍스트 = 업종분류}
corp_info_arr: list of dict, dict element = {'name': 기업명, 'code': 기업 고유 번호, 'sector': 업종 id}
"""
pass
if __name__ == '__main__':
app = QApplication(sys.argv)
wnd = DartCrawlerWindow()
wnd.resize(600, 800)
wnd.show()
app.exec_()
| StarcoderdataPython |
1884121 | '''
done
'''
import _thread
import time
class PenampungThread:
def __init__(self, threadName):
self.name = threadName
def run(self, delay):
ctr = 0
while 1:
time.sleep(delay)
print ("%s: %s" % (self.name, str(ctr)))
ctr+=1
th1 = PenampungThread("thread-pisang")
th2 = PenampungThread("thread-mangga")
try:
_thread.start_new_thread(th1.run, (2,))
_thread.start_new_thread(th2.run, (4,))
except:
print ("Error: tidak bisa memulai thread")
while 1:
pass | StarcoderdataPython |
11248823 | from adaptador import MeuTaxi
from datetime import datetime
import gym
env = gym.make("Taxi-v3").env
def test_1():
state = env.reset()
state = env.encode(3, 2, 1, 0)
env.render()
inicio = datetime.now()
result = MeuTaxi(env.desc, env.decode(state))
fim = datetime.now()
print(fim - inicio)
assert result.path()[-1]==5
def test_2():
state = env.reset()
state = env.encode(3, 1, 2, 0)
env.render()
inicio = datetime.now()
result = MeuTaxi(env.desc, env.decode(state))
fim = datetime.now()
print(fim - inicio)
assert result.path()[-1]==5
def test_3():
state = env.reset()
state = env.encode(3, 1, 3, 0)
env.render()
inicio = datetime.now()
result = MeuTaxi(env.desc, env.decode(state))
fim = datetime.now()
print(fim - inicio)
assert result.path()[-1]==5
def test_4():
state = env.reset()
state = env.encode(3, 3, 0, 1)
env.render()
inicio = datetime.now()
result = MeuTaxi(env.desc, env.decode(state))
fim = datetime.now()
print(fim - inicio)
assert result.path()[-1]==5
def test_5():
state = env.reset()
state = env.encode(3, 1, 1, 2)
env.render()
inicio = datetime.now()
result = MeuTaxi(env.desc, env.decode(state))
fim = datetime.now()
print(fim - inicio)
assert result.path()[-1]==5
def test_6():
state = env.reset()
state = env.encode(3, 1, 3, 3)
env.render()
inicio = datetime.now()
result = MeuTaxi(env.desc, env.decode(state))
fim = datetime.now()
print(fim - inicio)
assert result.path()[-1]==5 | StarcoderdataPython |
4875956 | <gh_stars>1-10
from django.urls.conf import include, re_path
from core.tests.api import Api, NoteResource, UserResource
api = Api()
api.register(NoteResource())
api.register(UserResource())
urlpatterns = [
re_path(r'^api/', include(api.urls)),
]
| StarcoderdataPython |
3305351 | # type: ignore
# -*- coding: utf-8 -*-
#
# ramstk.analyses.milhdk217f.models.inductor.py is part of the RAMSTK Project
#
# All rights reserved.
# Copyright since 2007 Doyle "weibullguy" Rowland doyle.rowland <AT> reliaqual <DOT> com
"""Inductor MIL-HDBK-217F Constants and Calculations Module."""
# Standard Library Imports
from math import exp
from typing import Dict, Union
PART_COUNT_LAMBDA_B = {
1: {
1: [
0.0035,
0.023,
0.049,
0.019,
0.065,
0.027,
0.037,
0.041,
0.052,
0.11,
0.0018,
0.053,
0.16,
2.3,
],
2: [
0.0071,
0.046,
0.097,
0.038,
0.13,
0.055,
0.073,
0.081,
0.10,
0.22,
0.035,
0.11,
0.31,
4.7,
],
3: [
0.023,
0.16,
0.35,
0.13,
0.45,
0.21,
0.27,
0.35,
0.45,
0.82,
0.011,
0.37,
1.2,
16.0,
],
4: [
0.028,
0.18,
0.39,
0.15,
0.52,
0.22,
0.29,
0.33,
0.42,
0.88,
0.015,
0.42,
1.2,
19.0,
],
},
2: {
1: [
0.0017,
0.0073,
0.023,
0.0091,
0.031,
0.011,
0.015,
0.016,
0.022,
0.052,
0.00083,
0.25,
0.073,
1.1,
],
2: [
0.0033,
0.015,
0.046,
0.018,
0.061,
0.022,
0.03,
0.033,
0.044,
0.10,
0.0017,
0.05,
0.15,
2.2,
],
},
}
PART_COUNT_PI_Q = [0.25, 1.0, 10.0]
PART_STRESS_PI_Q = {
1: {1: [1.5, 5.0], 2: [3.0, 7.5], 3: [8.0, 30.0], 4: [12.0, 30.0]},
2: [0.03, 0.1, 0.3, 1.0, 4.0, 20.0],
}
PI_E = {
1: [
1.0,
6.0,
12.0,
5.0,
16.0,
6.0,
8.0,
7.0,
9.0,
24.0,
0.5,
13.0,
34.0,
610.0,
],
2: [
1.0,
4.0,
12.0,
5.0,
16.0,
5.0,
7.0,
6.0,
8.0,
24.0,
0.5,
13.0,
34.0,
610.0,
],
}
REF_TEMPS = {
1: {1: 329.0, 2: 352.0, 3: 364.0, 4: 400.0, 5: 398.0, 6: 477.0},
2: {1: 329.0, 2: 352.0, 3: 364.0, 4: 409.0},
}
def calculate_part_count(**attributes: Dict[str, Union[float, int, str]]) -> float:
"""Wrap get_part_count_lambda_b().
This wrapper allows us to pass an attribute dict from a generic parts
count function.
:param attributes: the attributes for the connection being calculated.
:return: _base_hr; the parts count base hazard rates.
:rtype: float
"""
return get_part_count_lambda_b(
attributes["subcategory_id"],
attributes["environment_active_id"],
attributes["family_id"],
)
def calculate_part_stress(
**attributes: Dict[str, Union[float, int, str]]
) -> Dict[str, Union[float, int, str]]:
"""Calculate the part stress hazard rate for an inductive device.
This function calculates the MIL-HDBK-217F hazard rate using the part
stress method.
:return: attributes; the keyword argument (hardware attribute)
dictionary with updated values.
:rtype: dict
"""
attributes["piC"] = float(attributes["construction_id"])
attributes["piQ"] = get_part_stress_quality_factor(
attributes["subcategory_id"],
attributes["quality_id"],
attributes["family_id"],
)
_power_input = attributes["voltage_dc_operating"] * attributes["current_operating"]
if attributes["subcategory_id"] == 2 and attributes["specification_id"] == 2:
attributes["temperature_rise"] = get_temperature_rise_spec_sheet(
int(attributes["page_number"])
)
elif attributes["power_operating"] > 0.0 and attributes["area"] > 0.0:
attributes["temperature_rise"] = calculate_temperature_rise_power_loss_surface(
attributes["power_operating"], attributes["area"]
)
elif attributes["power_operating"] > 0.0 and attributes["weight"] > 0.0:
attributes["temperature_rise"] = calculate_temperature_rise_power_loss_weight(
attributes["power_operating"], attributes["weight"]
)
elif _power_input > 0.0 and attributes["weight"] > 0.0:
attributes["temperature_rise"] = calculate_temperature_rise_input_power_weight(
_power_input, attributes["weight"]
)
else:
attributes["temperature_rise"] = 0.0
attributes["temperature_hot_spot"] = calculate_hot_spot_temperature(
attributes["temperature_active"], attributes["temperature_rise"]
)
attributes["lambda_b"] = calculate_part_stress_lambda_b(
attributes["subcategory_id"],
attributes["insulation_id"],
attributes["temperature_hot_spot"],
)
attributes["hazard_rate_active"] = (
attributes["lambda_b"] * attributes["piQ"] * attributes["piE"]
)
if attributes["subcategory_id"] == 2:
attributes["hazard_rate_active"] = (
attributes["hazard_rate_active"] * attributes["piC"]
)
return attributes
def calculate_hot_spot_temperature(
temperature_active: float,
temperature_rise: float,
) -> float:
"""Calculate the coil or transformer hot spot temperature.
:return: _temperature_hot_spot; the calculated hot spot temperature.
:rtype: float
"""
return temperature_active + 1.1 * temperature_rise
def calculate_part_stress_lambda_b(
subcategory_id: int,
insulation_id: int,
temperature_hot_spot: float,
) -> float:
"""Calculate part stress base hazard rate (lambda b) from MIL-HDBK-217F.
This function calculates the MIL-HDBK-217F hazard rate using the parts
stress method.
:param subcategory_id: the subcategory ID for the inductive device being calculated.
:param insulation_id: the insulation class ID for the inductive device being
calculated.
:param temperature_hot_spot: the hot spot temperature for the inductive device
being calculated.
:return: _lambda_b; the calculated parts stress lambda_b.
:rtype: float
:raise: KeyError when passed an unknown subcategory ID or insulation ID.
"""
_dic_factors = {
1: {
1: [0.0018, 15.6],
2: [0.002, 14.0],
3: [0.0018, 8.7],
4: [0.002, 10.0],
5: [0.00125, 3.8],
6: [0.00159, 8.4],
},
2: {
1: [0.000335, 15.6],
2: [0.000379, 14.0],
3: [0.000319, 8.7],
4: [0.00035, 10.0],
},
}
_ref_temp = REF_TEMPS[subcategory_id][insulation_id]
_f0 = _dic_factors[subcategory_id][insulation_id][0]
_f1 = _dic_factors[subcategory_id][insulation_id][1]
return _f0 * exp(((temperature_hot_spot + 273.0) / _ref_temp) ** _f1)
def calculate_temperature_rise_input_power_weight(
power_input: float,
weight: float,
) -> float:
"""Calculate the temperature rise based on input power and xfmr weight.
.. attention:: input power must be calculated by the calling function from
voltage and current as it is not an attribute of an inductive device.
:param power_input: the input power in W.
:param weight: the weight of the xfmr in lbf.
:retur: _temperature_rise; the calculated temperature rise in C.
:rtype: float
:raise: ZeroDivisionError if passed a weight=0.0.
"""
return 2.1 * (power_input / weight**0.6766)
def calculate_temperature_rise_power_loss_surface(
power_operating: float,
area: float,
) -> float:
"""Calculate the temperature rise based on the power loss and surface area.
:param power_operating: the power loss in W.
:param area: the radiating surface area of the case in sq. inches.
:return: _temperature_rise; the calculated temperature rise in C.
:rtype: float
:raise: ZeroDivisionError if passed an area=0.0.
"""
return 125.0 * power_operating / area
def calculate_temperature_rise_power_loss_weight(
power_operating: float,
weight: float,
) -> float:
"""Calculate the temperature rise based on the power loss and xfmr weight.
:param power_operating: the power loss in W.
:param weight: the weight of the device in lbf.
:return: _temperature_rise; the calculated temperature rise in C.
:rtype: float
:raise: ZeroDivisionError if passed a weight=0.0.
"""
return 11.5 * (power_operating / weight**0.6766)
def get_part_count_lambda_b(
subcategory_id: int,
environment_active_id: int,
family_id: int,
) -> float:
"""Retrieve the parts count base hazard rate (lambda b) from MIL-HDBK-217F.
This function calculates the MIL-HDBK-217F hazard rate using the parts
count method.
This function calculates the MIL-HDBK-217F hazard rate using the parts
count method. The dictionary PART_COUNT_217F_LAMBDA_B contains the
MIL-HDBK-217F parts count base hazard rates. Keys are for
PART_COUNT_217F_LAMBDA_B are:
#. subcategory_id
#. environment_active_id
#. family id; if the inductor subcategory is NOT family dependent, then
the second key will be zero.
Current subcategory IDs are:
+----------------+-------------------------------+-----------------+
| Subcategory | Inductor | MIL-HDBK-217F |
| ID | Style | Section |
+================+===============================+=================+
| 1 | Transformer | 11.1 |
+----------------+-------------------------------+-----------------+
| 2 | Coil | 11.2 |
+----------------+-------------------------------+-----------------+
These keys return a list of base hazard rates. The hazard rate to use is
selected from the list depending on the active environment.
:param subcategory_id: the subcategory ID for the inductive device being calculated.
:param environment_active_id: the active operating environment ID for the inductive
device being calculated.
:param family_id: the family ID for the inductive device being calculated.
:return: _base_hr; the part count base hazard rate.
:rtype: float
:raise: KeyError if passed an unknown subcategory ID or family ID.
:raise: IndexError if passed an unknown active environment ID.
"""
return PART_COUNT_LAMBDA_B[subcategory_id][family_id][environment_active_id - 1]
def get_part_stress_quality_factor(
subcategory_id: int,
quality_id: int,
family_id: int,
) -> float:
"""Select the MIL-HDBK-217F quality factor for the inductor device.
:param subcategory_id: the subcategory identifier.
:param quality_id: the quality level identifier.
:param family_id: the device family identifier.
:return: _pi_q; the selected quality factor
:rtype: float
:raise: IndexError if passed an unknown quality ID.
:raise: KeyError if passed an unknown subcategory ID or family ID.
"""
return (
PART_STRESS_PI_Q[subcategory_id][family_id][quality_id - 1]
if subcategory_id == 1
else PART_STRESS_PI_Q[subcategory_id][quality_id - 1]
)
def get_temperature_rise_spec_sheet(page_number: int) -> float:
"""Retrieve the temperature rise based on the spec sheet from MIL-C-39010.
:param page_number: the spec sheet to retrieve the temperature rise
for.
:return: _temperature_rise; the spec sheet temperature rise.
:rtype: float
:raise: KeyError if an unknown spec sheet is passed.
"""
return {
1: 15.0,
2: 15.0,
3: 15.0,
4: 35.0,
5: 15.0,
6: 35.0,
7: 15.0,
8: 35.0,
9: 15.0,
10: 15.0,
11: 35.0,
12: 35.0,
13: 15.0,
14: 15.0,
}[page_number]
def set_default_values(
**attributes: Dict[str, Union[float, int, str]],
) -> Dict[str, Union[float, int, str]]:
"""Set the default value of various parameters.
:param attributes: the attribute dict for the inductove device being calculated.
:return: attributes; the updated attribute dict.
:rtype: dict
"""
if attributes["rated_temperature_max"] <= 0.0:
attributes["rated_temperature_max"] = _set_default_max_rated_temperature(
attributes["subcategory_id"]
)
if attributes["temperature_rise"] <= 0.0:
attributes["temperature_rise"] = _set_default_temperature_rise(
attributes["subcategory_id"],
attributes["family_id"],
)
return attributes
def _set_default_max_rated_temperature(subcategory_id: int) -> float:
"""Set the default maximum rated temperature.
:param subcategory_id: the subcategory ID of the inductive device with missing
defaults.
:return: _rated_temperature_max
:rtype: float
"""
return 130.0 if subcategory_id == 1 else 125.0
def _set_default_temperature_rise(
subcategory_id: int,
family_id: int,
) -> float:
"""Set the default temperature rise.
:param subcategory_id: the subcategory ID of the inductive device with missing
defaults.
:param family_id: the family ID of the inductive device with missing defaults.
:return: _temperature_rise
:rtype: float
"""
return 30.0 if subcategory_id == 1 and family_id == 3 else 10.0
| StarcoderdataPython |
1943829 | <filename>yolov4_1.py
import tensorflow as tf
import matplotlib.pyplot as plt
import cv2
import numpy as np
from fastnms import fastnms
import colorsys
import random
import time
def draw(image, boxes, scores, classes, all_classes, colors):
image_h = image_w = 416
for box, score, cl in zip(boxes, scores, classes):
x0, y0, x1, y1 = box
left = max(0, np.floor(x0 + 0.5).astype(int))
top = max(0, np.floor(y0 + 0.5).astype(int))
right = min(image.shape[1], np.floor(x1 + 0.5).astype(int))
bottom = min(image.shape[0], np.floor(y1 + 0.5).astype(int))
bbox_color = colors[cl]
# bbox_thick = 1 if min(image_h, image_w) < 400 else 2
bbox_thick = 1
cv2.rectangle(image, (left, top), (right, bottom), bbox_color, bbox_thick)
bbox_mess = '%.2f' % score
t_size = cv2.getTextSize(bbox_mess, 0, 0.5, thickness=1)[0]
cv2.rectangle(image, (left, top), (left + t_size[0], top - t_size[1] - 3), bbox_color, -1)
cv2.putText(image, bbox_mess, (left, top - 2), cv2.FONT_HERSHEY_SIMPLEX,
0.5, (0, 0, 0), 1, lineType=cv2.LINE_AA)
plt.imshow(image)
plt.show()
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
cv2.imwrite('result.jpg', image)
def decode(conv_output, anchors, stride, num_class):
anchor_per_scale = len(anchors)
conv_shape = tf.shape(conv_output)
batch_size = int(sess.run(conv_shape[0], feed_dict1))
output_size = int((sess.run(conv_shape[1], feed_dict1) / anchor_per_scale) ** 0.5)
conv_output = tf.reshape(conv_output, (batch_size, output_size, output_size, anchor_per_scale, 5 + num_class))
conv_raw_dxdy = conv_output[:, :, :, :, 0:2]
conv_raw_dwdh = conv_output[:, :, :, :, 2:4]
conv_raw_conf = conv_output[:, :, :, :, 4:5]
conv_raw_prob = conv_output[:, :, :, :, 5:]
y = tf.tile(tf.range(output_size, dtype=tf.int32)[:, tf.newaxis], [1, output_size])
x = tf.tile(tf.range(output_size, dtype=tf.int32)[tf.newaxis, :], [output_size, 1])
xy_grid = tf.concat([x[:, :, tf.newaxis], y[:, :, tf.newaxis]], axis=-1)
xy_grid = tf.tile(xy_grid[tf.newaxis, :, :, tf.newaxis, :], [batch_size, 1, 1, anchor_per_scale, 1])
xy_grid = tf.cast(xy_grid, tf.float32)
pred_xy = (tf.sigmoid(conv_raw_dxdy) + xy_grid) * stride
pred_wh = (tf.exp(conv_raw_dwdh) * anchors) # exp
pred_xywh = tf.concat([pred_xy, pred_wh], axis=-1)
pred_conf = tf.sigmoid(conv_raw_conf)
pred_prob = tf.sigmoid(conv_raw_prob)
pred_xywh = tf.reshape(pred_xywh, (batch_size, -1, 4)) # [-1, -1, 4]
pred_conf = tf.reshape(pred_conf, (batch_size, -1, 1)) # [-1, -1, 1]
pred_prob = tf.reshape(pred_prob, (batch_size, -1, num_class)) # [-1, -1, 80]
pred_xywh = sess.run(pred_xywh, feed_dict1)
pred_conf = sess.run(pred_conf, feed_dict1)
pred_prob = sess.run(pred_prob, feed_dict1)
return pred_xywh, pred_conf, pred_prob
if __name__ == '__main__':
# 验证时的分数阈值和nms_iou阈值
conf_thresh = 0.40
nms_thresh = 0.60
keep_top_k = 50
nms_top_k = 50
input_shape = (416, 416)
all_classes = ['face']
anchors = np.array([
[[12, 16], [19, 36], [40, 28]],
[[36, 75], [76, 55], [72, 146]],
[[142, 110], [192, 243], [459, 401]]
])
dellist = []
timelist = []
# 定义颜色
hsv_tuples = [(1.0 * x / 1, 1., 1.) for x in range(1)]
colors = list(map(lambda x: colorsys.hsv_to_rgb(*x), hsv_tuples))
colors = list(map(lambda x: (int(x[0] * 255), int(x[1] * 255), int(x[2] * 255)), colors))
random.seed(0)
random.shuffle(colors)
random.seed(None)
image_origin1 = cv2.imread('image1.jpg')
assert image_origin1 is not None, 'Image is not found, No such file or directory'
image_origin1 = cv2.resize(image_origin1, input_shape, interpolation=cv2.INTER_CUBIC)
image_origin1 = cv2.cvtColor(image_origin1, cv2.COLOR_BGR2RGB)
# plt.imshow(image_origin1)
# plt.show()
image1 = image_origin1.reshape(1, input_shape[0], input_shape[1], 3)
image_origin2 = cv2.imread('image2.jpg')
assert image_origin2 is not None, 'Image is not found, No such file or directory'
image_origin2 = cv2.resize(image_origin2, input_shape, interpolation=cv2.INTER_CUBIC)
image_origin2 = cv2.cvtColor(image_origin2, cv2.COLOR_BGR2RGB)
# plt.imshow(image_origin2)
# plt.show()
image2 = image_origin2.reshape(1, input_shape[0], input_shape[1], 3)
with tf.gfile.GFile('yolov4.pb', "rb") as pb:
graph_def = tf.GraphDef()
graph_def.ParseFromString(pb.read())
# We load the graph_def in the default graph
with tf.Graph().as_default() as graph:
tf.import_graph_def(
graph_def,
name="", # name可以自定义,修改name之后记得在下面的代码中也要改过来
)
# 打印网络结构
for op in graph.get_operations():
print(op.name, op.values())
node_in = graph.get_tensor_by_name('inputs:0') # shape=(?, 416, 416, 3)
output_s = graph.get_tensor_by_name('detector/yolo-v4/Reshape_8:0') # small: (?,507,6) ->13*13*3=507
output_m = graph.get_tensor_by_name('detector/yolo-v4/Reshape_4:0') # medium: (?,2028,6) ->26*26*3=2028
output_l = graph.get_tensor_by_name('detector/yolo-v4/Reshape:0') # large: (?,8112,6) ->52*52*3=8112
"""
参考github例程,同时使用大中小三个尺度feature map,0.12fps
"""
with tf.Session(graph=graph) as sess:
feed_dict1 = {node_in: image1}
feed_dict2 = {node_in: image2}
# warm up
sess.run([output_s, output_m, output_l], feed_dict1)
timelist.append(time.time())
output_s, output_m, output_l = sess.run([output_s, output_m, output_l], feed_dict2)
timelist.append(time.time()), print('1',timelist[-1] - timelist[-2]) # time 1
pred_xywh_s, pred_conf_s, pred_prob_s = decode(output_s, anchors[0], 8, 1)
pred_xywh_m, pred_conf_m, pred_prob_m = decode(output_m, anchors[1], 16, 1)
pred_xywh_l, pred_conf_l, pred_prob_l = decode(output_l, anchors[2], 32, 1)
# print(pred_xywh_s, pred_conf_m, pred_prob_l)
pred_score_s = pred_conf_s * pred_prob_s
pred_score_m = pred_conf_m * pred_prob_m
pred_score_l = pred_conf_l * pred_prob_l
timelist.append(time.time()), print('2',timelist[-1] - timelist[-2]) # time 2
# all_pred_boxes = tf.concat([pred_xywh_s, pred_xywh_m, pred_xywh_l], axis=1) # [batch_size, -1, 4]
# all_pred_scores = tf.concat([pred_score_s, pred_score_m, pred_score_l], axis=1) # [batch_size, -1, 80]
all_pred_boxes = np.concatenate((pred_xywh_s, pred_xywh_m, pred_xywh_l), axis=1)
all_pred_scores = np.concatenate((pred_score_s, pred_score_m, pred_score_l), axis=1)
timelist.append(time.time()), print('3',timelist[-1] - timelist[-2]) # time 3 tf: 0.004s, np: 0.0009s
output = fastnms(all_pred_boxes, all_pred_scores, conf_thresh, nms_thresh, keep_top_k, nms_top_k)
timelist.append(time.time()), print('4',timelist[-1] - timelist[-2]) # time 4
output = sess.run(output, feed_dict2)
boxes, scores, classes = output[0][0], output[1][0], output[2][0]
print(f"boxes: {boxes} \nscores: {scores}\nclasses: {classes}")
timelist.append(time.time()), print('5',timelist[-1] - timelist[-2]) # time 5
# (0, 0)在左上角
for idx1, value1 in enumerate(boxes):
if value1[2] - value1[0] < 20 or value1[3] - value1[1] < 20:
dellist.append(idx1)
continue
for value2 in value1:
if value2 > input_shape[0]+30 or value2 < -30:
dellist.append(idx1)
break
boxes = np.delete(boxes, dellist, 0)
scores = np.delete(scores, dellist)
classes = np.delete(classes, dellist)
# stop timing
timelist.append(time.time()), print('6',timelist[-1] - timelist[-2]) # time 6
print('fps: ',1 / (timelist[-1] - timelist[1]))
sess.close()
if boxes is not None:
draw(image_origin2, boxes, scores, classes, all_classes, colors)
| StarcoderdataPython |
3213823 | '''
config.py
Holds severals datastructures for configs of Editing UI, Hugo and IPFS
Author: <NAME>
eMail: <EMAIL>
GPG-Key-ID: <KEY>
GPG-Fingerprint: A757 5741 FD1E 63E8 357D 48E2 3C68 AE70 B2F8 AA17
License: MIT License
'''
import json
import toml
import logging
import os, os.path
logger = logging.getLogger("LibreBlogging")
configfile = "config/libreblogging.json"
# Prepare LibreBlogging config
logger.info("Reading LibreBlogging config file")
try:
os.mkdir(configfile.split('/')[0])
except FileExistsError as e:
logger.info("Config directory is already there.")
libreblogging = {}
if os.path.isfile(configfile):
# Config file exists, read it!
try:
with open(configfile, 'r') as cfile:
libreblogging = json.load(cfile)
except:
logger.warning(f"Error reading config file: {configfile}")
else:
# Config file does not exist. Create it and fill with defaults!
libreblogging = {
"name": "LibreBlogging",
"version": "0.0.1",
"description": "Web interface to create a static blog",
"date_format": "%B %d, %Y - %T %Z",
"timezone": "Europe/Berlin",
"env": {
"VIRTUAL_HOST": "localhost"
},
"hugo": {
"basedir": "hugo-site/",
"postsdir": "hugo-site/content/posts/",
"configfile": "hugo-site/config.toml"
},
"ipfs": {
"basedir": "ipfs-data/",
"configfile": "ipfs-data/config"
}
}
with open(configfile, 'w') as cfile:
json.dump(libreblogging, cfile)
try:
libreblogging['env']['VIRTUAL_HOST'] = os.environ['VIRTUAL_HOST']
except KeyError:
libreblogging['env'] = {}
libreblogging['env']['VIRTUAL_HOST'] = "localhost"
# Read hugo config
logger.info("Reading hugo config file")
hugo = {}
try:
hugo = toml.load(libreblogging['hugo']['configfile'])
except:
logger.warning("Error loading hugo config file. Is it there?")
# If params section does not exist, create it
try:
x = hugo['params']
except KeyError:
hugo['params'] = {}
# Read IPFS config
ipfs = {}
logger.info("Reading IPFS config file")
try:
with open(libreblogging['ipfs']['configfile'], 'r') as f:
ipfs = json.load(f)
except:
logger.warning("Error loading IPFS config file. Is it there?")
| StarcoderdataPython |
12817730 | """!
@brief Running an experiment with the improved version of SuDoRmRf on
universal source separation with multiple sources.
@author <NAME> {<EMAIL>}
@copyright University of Illinois at Urbana-Champaign
"""
import os
import sys
current_dir = os.path.dirname(os.path.abspath('__file__'))
root_dir = os.path.abspath(os.path.join(current_dir, '../../../'))
sys.path.append(root_dir)
from __config__ import API_KEY
from comet_ml import Experiment, OfflineExperiment
import torch
from torch.nn import functional as F
from tqdm import tqdm
from pprint import pprint
import sudo_rm_rf.dnn.experiments.utils.improved_cmd_args_parser_v2 as parser
import sudo_rm_rf.dnn.experiments.utils.mixture_consistency \
as mixture_consistency
import sudo_rm_rf.dnn.experiments.utils.dataset_setup as dataset_setup
import sudo_rm_rf.dnn.losses.sisdr as sisdr_lib
import sudo_rm_rf.dnn.losses.snr as snr_lib
import sudo_rm_rf.dnn.losses.norm as norm_lib
import sudo_rm_rf.dnn.models.improved_sudormrf as improved_sudormrf
import sudo_rm_rf.dnn.models.groupcomm_sudormrf_v2 as sudormrf_gc_v2
import sudo_rm_rf.dnn.models.causal_improved_sudormrf_v3 as \
causal_improved_sudormrf
import sudo_rm_rf.dnn.models.sudormrf as initial_sudormrf
import sudo_rm_rf.dnn.utils.cometml_loss_report as cometml_report
import sudo_rm_rf.dnn.utils.cometml_log_audio as cometml_audio_logger
import sudo_rm_rf.dnn.utils.log_audio as offline_audio_logger
# torch.backends.cudnn.enabled = False
args = parser.get_args()
hparams = vars(args)
generators = dataset_setup.setup(hparams)
# Hardcode n_sources for all the experiments with musdb
assert hparams['n_channels'] == 1, 'Mono source separation is available for now'
audio_loggers = dict(
[(n_src,
cometml_audio_logger.AudioLogger(fs=hparams["fs"],
bs=1,
n_sources=n_src))
for n_src in range(1, hparams['max_num_sources'] + 1)])
# offline_savedir = os.path.join('/home/thymios/offline_exps',
# hparams["project_name"],
# '_'.join(hparams['cometml_tags']))
# if not os.path.exists(offline_savedir):
# os.makedirs(offline_savedir)
# audio_logger = offline_audio_logger.AudioLogger(dirpath=offline_savedir,
# fs=hparams["fs"], bs=hparams["batch_size"], n_sources=4)
# Hardcode the test generator for each one of the number of sources
for n_src in range(hparams['min_num_sources'], hparams['max_num_sources']+1):
for split_name in ['val', 'test']:
loader = dataset_setup.create_loader_for_simple_dataset(
dataset_name='FUSS',
separation_task=hparams['separation_task'],
data_split=split_name, sample_rate=hparams['fs'],
n_channels=hparams['n_channels'], min_or_max=hparams['min_or_max'],
zero_pad=hparams['zero_pad_audio'],
timelegth=hparams['audio_timelength'],
normalize_audio=hparams['normalize_audio'],
n_samples=0, min_num_sources=n_src, max_num_sources=n_src)
gen_name = '{}_{}_srcs'.format(split_name, n_src)
generators[gen_name] = loader.get_generator(
batch_size=hparams['batch_size'], num_workers=hparams['n_jobs'])
# experiment = OfflineExperiment(API_KEY, offline_directory=offline_savedir)
experiment = Experiment(API_KEY, project_name=hparams['project_name'])
experiment.log_parameters(hparams)
experiment_name = '_'.join(hparams['cometml_tags'])
for tag in hparams['cometml_tags']:
experiment.add_tag(tag)
if hparams['experiment_name'] is not None:
experiment.set_name(hparams['experiment_name'])
else:
experiment.set_name(experiment_name)
os.environ['CUDA_VISIBLE_DEVICES'] = ','.join(
[cad for cad in hparams['cuda_available_devices']])
back_loss_tr_loss_name, back_loss_tr_loss = (
'tr_back_loss_SNR',
# norm_lib.L1(return_individual_results=False)
# norm_lib.PermInvariantL1(n_sources=hparams["n_sources"],
# weighted_norm=True)
# 'tr_back_loss_SISDRi',
snr_lib.PermInvariantSNRwithZeroRefs(
n_sources=hparams["max_num_sources"],
zero_mean=False,
backward_loss=True,
inactivity_threshold=-40.)
)
val_losses = {}
all_losses = []
for val_set in [x for x in generators if not x == 'train']:
if generators[val_set] is None:
continue
n_actual_sources = int(val_set.split('_')[1])
if n_actual_sources == 1:
single_source = False
improvement = False
metric_name = 'SISDR'
n_estimated_sources = 1
else:
single_source = False
improvement = True
n_estimated_sources = hparams['max_num_sources']
metric_name = 'SISDRi'
val_losses[val_set] = {}
all_losses.append(val_set + '_{}'.format(metric_name))
val_losses[val_set][val_set + '_{}'.format(metric_name)] = \
sisdr_lib.StabilizedPermInvSISDRMetric(
zero_mean=True,
single_source=single_source,
n_estimated_sources=n_estimated_sources,
n_actual_sources=n_actual_sources,
backward_loss=False,
improvement=improvement,
return_individual_results=True)
all_losses.append(back_loss_tr_loss_name)
if hparams['model_type'] == 'relu':
model = improved_sudormrf.SuDORMRF(out_channels=hparams['out_channels'],
in_channels=hparams['in_channels'],
num_blocks=hparams['num_blocks'],
upsampling_depth=hparams['upsampling_depth'],
enc_kernel_size=hparams['enc_kernel_size'],
enc_num_basis=hparams['enc_num_basis'],
num_sources=hparams['max_num_sources'])
elif hparams['model_type'] == 'causal':
model = causal_improved_sudormrf.CausalSuDORMRF(
in_audio_channels=1,
out_channels=hparams['out_channels'],
in_channels=hparams['in_channels'],
num_blocks=hparams['num_blocks'],
upsampling_depth=hparams['upsampling_depth'],
enc_kernel_size=hparams['enc_kernel_size'],
enc_num_basis=hparams['enc_num_basis'],
num_sources=hparams['max_num_sources'])
elif hparams['model_type'] == 'softmax':
model = initial_sudormrf.SuDORMRF(out_channels=hparams['out_channels'],
in_channels=hparams['in_channels'],
num_blocks=hparams['num_blocks'],
upsampling_depth=hparams['upsampling_depth'],
enc_kernel_size=hparams['enc_kernel_size'],
enc_num_basis=hparams['enc_num_basis'],
num_sources=hparams['max_num_sources'])
elif hparams['model_type'] == 'groupcomm_v2':
model = sudormrf_gc_v2.GroupCommSudoRmRf(
in_audio_channels=hparams['n_channels'],
out_channels=hparams['out_channels'],
in_channels=hparams['in_channels'],
num_blocks=hparams['num_blocks'],
upsampling_depth=hparams['upsampling_depth'],
enc_kernel_size=hparams['enc_kernel_size'],
enc_num_basis=hparams['enc_num_basis'],
num_sources=hparams['max_num_sources'],
group_size=16)
else:
raise ValueError('Invalid model: {}.'.format(hparams['model_type']))
numparams = 0
for f in model.parameters():
if f.requires_grad:
numparams += f.numel()
experiment.log_parameter('Parameters', numparams)
print('Trainable Parameters: {}'.format(numparams))
model = torch.nn.DataParallel(model).cuda()
opt = torch.optim.Adam(model.parameters(), lr=hparams['learning_rate'])
# lr_scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(
# optimizer=opt, mode='max', factor=1. / hparams['divide_lr_by'],
# patience=hparams['patience'], verbose=True)
def normalize_tensor_wav(wav_tensor, eps=1e-8, std=None):
mean = wav_tensor.mean(-1, keepdim=True)
if std is None:
std = wav_tensor.std(-1, keepdim=True)
return (wav_tensor - mean) / (std + eps)
def online_augment(clean_sources):
# clean_sources: (batch, n_sources, time)
# Online mixing over samples of the batch. (This might cause to get
# mixtures from the same type of sound but it's highly improbable).
# Keep the exact same SNR distribution with the initial mixtures.
n_sources = clean_sources.shape[1]
batch_size = clean_sources.shape[0]
initial_biases = torch.mean(clean_sources, dim=-1, keepdim=True)
initial_energies = torch.std(clean_sources, dim=-1, keepdim=True)
augmented_wavs_l = []
for i in range(n_sources):
augmented_wavs_l.append(clean_sources[torch.randperm(batch_size), i])
augmented_wavs = torch.stack(augmented_wavs_l, 1)
# augmented_wavs = normalize_tensor_wav(augmented_wavs)
# augmented_wavs = (augmented_wavs * initial_energies) + initial_biases
augmented_wavs = augmented_wavs[:, torch.randperm(n_sources)]
augmented_wavs *= (torch.rand(batch_size, n_sources).unsqueeze(-1) + 0.5)
return augmented_wavs
tr_step = 0
val_step = 0
prev_epoch_val_loss = 0.
for i in range(hparams['n_epochs']):
res_dic = {}
for loss_name in all_losses:
res_dic[loss_name] = {'mean': 0., 'std': 0., 'median': 0., 'acc': []}
print("FUSS Sudo-RM-RF: {} - {} || Epoch: {}/{}".format(
experiment.get_key(), experiment.get_tags(), i+1, hparams['n_epochs']))
model.train()
sum_loss = 0.
train_tqdm_gen = tqdm(generators['train'], desc='Training')
for cnt, data in enumerate(train_tqdm_gen):
opt.zero_grad()
# data shape: (batch, n_sources, time_samples)
clean_wavs = online_augment(data)
clean_wavs = clean_wavs.cuda()
input_mixture = torch.sum(clean_wavs, -2, keepdim=True)
# input_mixture = normalize_tensor_wav(input_mixture)
input_mix_std = input_mixture.std(-1, keepdim=True)
input_mix_mean = input_mixture.mean(-1, keepdim=True)
input_mixture = (input_mixture - input_mix_mean) / (
input_mix_std + 1e-9)
# input_mix_std = input_mixture.std(-1, keepdim=True)
# input_mix_mean = input_mixture.mean(-1, keepdim=True)
# input_mixture = (input_mixture - input_mix_mean) / (input_mix_std + 1e-9)
# clean_wavs = normalize_tensor_wav(clean_wavs, std=input_mix_std)
rec_sources_wavs = model(input_mixture)
# rec_sources_wavs = (rec_sources_wavs * input_mix_std) + input_mix_mean
rec_sources_wavs = mixture_consistency.apply(rec_sources_wavs,
input_mixture)
# l = back_loss_tr_loss(normalize_tensor_wav(rec_sources_wavs),
# normalize_tensor_wav(clean_wavs))
l = back_loss_tr_loss(rec_sources_wavs,
clean_wavs)
l.backward()
if hparams['clip_grad_norm'] > 0:
torch.nn.utils.clip_grad_norm_(model.parameters(),
hparams['clip_grad_norm'])
opt.step()
sum_loss += l.detach().item()
train_tqdm_gen.set_description(
"Training, Running Avg Loss: {}".format(sum_loss / (cnt + 1)))
if hparams['patience'] > 0:
if tr_step % hparams['patience'] == 0:
new_lr = (hparams['learning_rate']
/ (hparams['divide_lr_by'] ** (tr_step // hparams['patience'])))
print('Reducing Learning rate to: {}'.format(new_lr))
for param_group in opt.param_groups:
param_group['lr'] = new_lr
tr_step += 1
for val_set in [x for x in generators if not x == 'train']:
if generators[val_set] is not None:
n_actual_sources = int(val_set.split('_')[1])
model.eval()
n_songs_written = 10
with torch.no_grad():
for data in tqdm(generators[val_set],
desc='Validation on {}'.format(val_set)):
clean_wavs = data.cuda()
input_mixture = torch.sum(clean_wavs, -2, keepdim=True)
# input_mixture = normalize_tensor_wav(input_mixture)
input_mix_std = input_mixture.std(-1, keepdim=True)
input_mix_mean = input_mixture.mean(-1, keepdim=True)
input_mixture = (input_mixture - input_mix_mean) / (
input_mix_std + 1e-9)
rec_sources_wavs = model(input_mixture)
# rec_sources_wavs = (rec_sources_wavs * input_mix_std) + input_mix_mean
rec_sources_wavs = mixture_consistency.apply(
rec_sources_wavs,
input_mixture)
for loss_name, loss_func in val_losses[val_set].items():
# l, best_perm = loss_func(
# normalize_tensor_wav(rec_sources_wavs),
# normalize_tensor_wav(clean_wavs),
# return_best_permutation=True)
l, best_perm = loss_func(
rec_sources_wavs,
clean_wavs,
return_best_permutation=True)
res_dic[loss_name]['acc'] += l.tolist()
audio_loggers[n_actual_sources].log_batch(
rec_sources_wavs[:, best_perm.long().cuda()][0, 0].unsqueeze(0),
clean_wavs[0].unsqueeze(0),
input_mixture[0].unsqueeze(0),
experiment, step=val_step, tag=val_set)
val_step += 1
res_dic = cometml_report.report_losses_mean_and_std(res_dic,
experiment,
tr_step,
val_step)
for loss_name in res_dic:
res_dic[loss_name]['acc'] = []
pprint(res_dic)
| StarcoderdataPython |
3375939 | # -*- coding: utf-8 -*-
import os
import sys
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError, ClientError, ParamError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.cws.v20180312 import cws_client as cws_client_v20180312
from tencentcloud.cws.v20180312 import models as models_v20180312
from jmespath import search
import time
from tccli import six
def doCreateVulsReport(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateVulsReportRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateVulsReport(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeVuls(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeVulsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeVuls(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyMonitorAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyMonitorAttributeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyMonitorAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSitesScans(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSitesScansRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateSitesScans(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSites(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSitesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateSites(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateVulsMisinformation(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateVulsMisinformationRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateVulsMisinformation(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeConfigRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSites(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSitesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeSites(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSitesVerification(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSitesVerificationRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeSitesVerification(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifySiteAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifySiteAttributeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifySiteAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyConfigAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyConfigAttributeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyConfigAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeVulsNumberTimeline(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeVulsNumberTimelineRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeVulsNumberTimeline(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeMonitors(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeMonitorsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeMonitors(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteMonitors(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteMonitorsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteMonitors(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateMonitors(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateMonitorsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateMonitors(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSites(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSitesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteSites(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeVulsNumber(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeVulsNumberRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeVulsNumber(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doVerifySites(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.VerifySitesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.VerifySites(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSiteQuota(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CwsClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSiteQuotaRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeSiteQuota(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20180312": cws_client_v20180312,
}
MODELS_MAP = {
"v20180312": models_v20180312,
}
ACTION_MAP = {
"CreateVulsReport": doCreateVulsReport,
"DescribeVuls": doDescribeVuls,
"ModifyMonitorAttribute": doModifyMonitorAttribute,
"CreateSitesScans": doCreateSitesScans,
"CreateSites": doCreateSites,
"CreateVulsMisinformation": doCreateVulsMisinformation,
"DescribeConfig": doDescribeConfig,
"DescribeSites": doDescribeSites,
"DescribeSitesVerification": doDescribeSitesVerification,
"ModifySiteAttribute": doModifySiteAttribute,
"ModifyConfigAttribute": doModifyConfigAttribute,
"DescribeVulsNumberTimeline": doDescribeVulsNumberTimeline,
"DescribeMonitors": doDescribeMonitors,
"DeleteMonitors": doDeleteMonitors,
"CreateMonitors": doCreateMonitors,
"DeleteSites": doDeleteSites,
"DescribeVulsNumber": doDescribeVulsNumber,
"VerifySites": doVerifySites,
"DescribeSiteQuota": doDescribeSiteQuota,
}
AVAILABLE_VERSION_LIST = [
"v20180312",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
if os.environ.get(OptionsDefine.ENV_ROLE_ARN) and os.environ.get(OptionsDefine.ENV_ROLE_SESSION_NAME):
cred[OptionsDefine.RoleArn] = os.environ.get(OptionsDefine.ENV_ROLE_ARN)
cred[OptionsDefine.RoleSessionName] = os.environ.get(OptionsDefine.ENV_ROLE_SESSION_NAME)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
elif not g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param.replace('_', '-') in [OptionsDefine.RoleArn, OptionsDefine.RoleSessionName]:
if param.replace('_', '-') in cred:
g_param[param] = cred[param.replace('_', '-')]
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["cws"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["cws"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
if g_param[OptionsDefine.Waiter]:
param = eval(g_param[OptionsDefine.Waiter])
if 'expr' not in param:
raise Exception('`expr` in `--waiter` must be defined')
if 'to' not in param:
raise Exception('`to` in `--waiter` must be defined')
if 'timeout' not in param:
if 'waiter' in conf and 'timeout' in conf['waiter']:
param['timeout'] = conf['waiter']['timeout']
else:
param['timeout'] = 180
if 'interval' not in param:
if 'waiter' in conf and 'interval' in conf['waiter']:
param['interval'] = conf['waiter']['interval']
else:
param['timeout'] = 5
param['interval'] = min(param['interval'], param['timeout'])
g_param['OptionsDefine.WaiterInfo'] = param
# 如果在配置文件中读取字段的值,python2中的json.load函数会读取unicode类型的值,因此这里要转化类型
if six.PY2:
for key, value in g_param.items():
if isinstance(value, six.text_type):
g_param[key] = value.encode('utf-8')
return g_param
| StarcoderdataPython |
1787779 | <gh_stars>1-10
"""
Lost_unfound
"""
import logging
import time
from tasks import ceph_manager
from tasks.util.rados import rados
from teuthology import misc as teuthology
from teuthology.orchestra import run
log = logging.getLogger(__name__)
def task(ctx, config):
"""
Test handling of lost objects on an ec pool.
A pretty rigid cluster is brought up andtested by this task
"""
if config is None:
config = {}
assert isinstance(config, dict), \
'lost_unfound task only accepts a dict for configuration'
first_mon = teuthology.get_first_mon(ctx, config)
(mon,) = ctx.cluster.only(first_mon).remotes.keys()
manager = ceph_manager.CephManager(
mon,
ctx=ctx,
logger=log.getChild('ceph_manager'),
)
manager.wait_for_clean()
profile = config.get('erasure_code_profile', {
'k': '2',
'm': '2',
'crush-failure-domain': 'osd'
})
profile_name = profile.get('name', 'lost_unfound')
manager.create_erasure_code_profile(profile_name, profile)
pool = manager.create_pool_with_unique_name(
erasure_code_profile_name=profile_name,
min_size=2)
# something that is always there, readable and never empty
dummyfile = '/etc/group'
# kludge to make sure they get a map
rados(ctx, mon, ['-p', pool, 'put', 'dummy', dummyfile])
manager.flush_pg_stats([0, 1])
manager.wait_for_recovery()
# create old objects
for f in range(1, 10):
rados(ctx, mon, ['-p', pool, 'put', 'existing_%d' % f, dummyfile])
rados(ctx, mon, ['-p', pool, 'put', 'existed_%d' % f, dummyfile])
rados(ctx, mon, ['-p', pool, 'rm', 'existed_%d' % f])
# delay recovery, and make the pg log very long (to prevent backfill)
manager.raw_cluster_cmd(
'tell', 'osd.1',
'injectargs',
'--osd-recovery-delay-start 1000 --osd-min-pg-log-entries 100000000'
)
manager.kill_osd(0)
manager.mark_down_osd(0)
manager.kill_osd(3)
manager.mark_down_osd(3)
for f in range(1, 10):
rados(ctx, mon, ['-p', pool, 'put', 'new_%d' % f, dummyfile])
rados(ctx, mon, ['-p', pool, 'put', 'existed_%d' % f, dummyfile])
rados(ctx, mon, ['-p', pool, 'put', 'existing_%d' % f, dummyfile])
# take out osd.1 and a necessary shard of those objects.
manager.kill_osd(1)
manager.mark_down_osd(1)
manager.raw_cluster_cmd('osd', 'lost', '1', '--yes-i-really-mean-it')
manager.revive_osd(0)
manager.wait_till_osd_is_up(0)
manager.revive_osd(3)
manager.wait_till_osd_is_up(3)
manager.flush_pg_stats([0, 2, 3])
manager.wait_till_active()
manager.flush_pg_stats([0, 2, 3])
# verify that there are unfound objects
unfound = manager.get_num_unfound_objects()
log.info("there are %d unfound objects" % unfound)
assert unfound
testdir = teuthology.get_testdir(ctx)
procs = []
if config.get('parallel_bench', True):
procs.append(mon.run(
args=[
"/bin/sh", "-c",
" ".join(['adjust-ulimits',
'ceph-coverage',
'{tdir}/archive/coverage',
'rados',
'--no-log-to-stderr',
'--name', 'client.admin',
'-b', str(4<<10),
'-p' , pool,
'-t', '20',
'bench', '240', 'write',
]).format(tdir=testdir),
],
logger=log.getChild('radosbench.{id}'.format(id='client.admin')),
stdin=run.PIPE,
wait=False
))
time.sleep(10)
# mark stuff lost
pgs = manager.get_pg_stats()
for pg in pgs:
if pg['stat_sum']['num_objects_unfound'] > 0:
# verify that i can list them direct from the osd
log.info('listing missing/lost in %s state %s', pg['pgid'],
pg['state']);
m = manager.list_pg_unfound(pg['pgid'])
log.info('%s' % m)
assert m['num_unfound'] == pg['stat_sum']['num_objects_unfound']
log.info("reverting unfound in %s", pg['pgid'])
manager.raw_cluster_cmd('pg', pg['pgid'],
'mark_unfound_lost', 'delete')
else:
log.info("no unfound in %s", pg['pgid'])
manager.raw_cluster_cmd('tell', 'osd.0', 'debug', 'kick_recovery_wq', '5')
manager.raw_cluster_cmd('tell', 'osd.2', 'debug', 'kick_recovery_wq', '5')
manager.raw_cluster_cmd('tell', 'osd.3', 'debug', 'kick_recovery_wq', '5')
manager.flush_pg_stats([0, 2, 3])
manager.wait_for_recovery()
if not config.get('parallel_bench', True):
time.sleep(20)
# verify result
for f in range(1, 10):
err = rados(ctx, mon, ['-p', pool, 'get', 'new_%d' % f, '-'])
assert err
err = rados(ctx, mon, ['-p', pool, 'get', 'existed_%d' % f, '-'])
assert err
err = rados(ctx, mon, ['-p', pool, 'get', 'existing_%d' % f, '-'])
assert err
# see if osd.1 can cope
manager.revive_osd(1)
manager.wait_till_osd_is_up(1)
manager.wait_for_clean()
run.wait(procs)
| StarcoderdataPython |
3372100 | from .ev_charging_env import EVChargingEnv
| StarcoderdataPython |
3570283 | #!/usr/bin/env python
from okdataset import ChainableList, Context, Logger
logger = Logger("sum example")
context = Context()
logger.info("Building list")
l = ChainableList([ x for x in xrange(1, 30) ])
logger.info("Building dataset")
ds = context.dataSet(l, label="sum", bufferSize=1)
logger.info("Calling reduce")
print ds.reduce(lambda x, y: x + y)
logger.info("All done!")
| StarcoderdataPython |
1668972 | from rubygems_utils import RubyGemsTestUtils
class RubyGemsTestrubygems_aws_sdk_networkmanager(RubyGemsTestUtils):
def test_gem_list_rubygems_aws_sdk_networkmanager(self):
self.gem_is_installed("aws-sdk-networkmanager")
def test_load_aws_sdk_networkmanager(self):
self.gem_is_loadable("aws-sdk-networkmanager")
| StarcoderdataPython |
5113635 | import numpy
def check_nd(denoiser):
shape = (17, 7, 13, 9)
for ndim in range(1, 5):
image = numpy.zeros(shape[:ndim])
try:
denoiser(image)
assert True
except Exception:
assert False
| StarcoderdataPython |
324889 | <filename>tests/playbook/test_num_blocks_error_handling__count.py
import pytest
from ansiblemetrics.playbook.num_blocks_error_handling import NumBlocksErrorHandling
script_0_1 = '- name: disable the server in haproxy\n\tshell: echo "disable server myapplb/{{ inventory_hostname }}" ' \
'| socat stdio /var/lib/haproxy/stats\n\tdelegate_to: "{{ item }}"\n\tloop: "{{ groups.lbservers }}" '
script_0_2 = '---\n-\n# NOTE (leseb): wait for mon discovery and quorum resolution\n# the admin key is not ' \
'instantaneously created so we have to wait a bit\n- name: "wait for {{ cluster }}.client.admin.keyring ' \
'exists"\n\twait_for:\n\t\tpath: /etc/ceph/{{ cluster }}.client.admin.keyring\n\twhen: cephx '
script_1 = '- name: Attempt and graceful roll back demo\n\tblock:\n\t\t- debug:\n\t\t\t\tmsg: \'I execute ' \
'normally\'\n\t\t- name: i force a failure\n\t\t\tcommand: /bin/false\n\t\t- debug:\n\t\t\t\tmsg: \'I ' \
'never execute, due to the above task failing, :-(\'\n\trescue:\n\t\t- debug:\n\t\t\t\tmsg: \'I caught an ' \
'error\'\n\t\t- name: i force a failure in middle of recovery! >:-)\n\t\t\tcommand: /bin/false\n\t\t- ' \
'debug:\n\t\t\t\tmsg: \'I also never execute :-(\'\n\talways:\n\t\t- debug:\n\t\t\t\tmsg: "This always ' \
'executes"\n\n- name: A task with a block that does not handle errors\n\tblock:\n\t\t- ' \
'debug:\n\t\t\t\tmsg: \'I execute normally\'\n\t\t- name: i force a failure\n\t\t\tcommand: ' \
'/bin/false\n\t\t- debug:\n\t\t\t\tmsg: \'I never execute, due to the above task failing, :-(\' '
TEST_DATA = [
(script_0_1, 0),
(script_0_2, 0),
(script_1, 1)
]
@pytest.mark.parametrize('script, expected', TEST_DATA)
def test(script, expected):
script = script.expandtabs(2)
assert NumBlocksErrorHandling(script).count() == expected
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.