content stringlengths 7 1.05M |
|---|
def BuscarCaja(p,q):
global L
mitad=(p+q)//2
if q==p+1 or q==p:
if L[p]==1: return p
else: return q
elif 1 in L[mitad:q+1]:
return BuscarCaja(mitad,q)
else:
return BuscarCaja(p,mitad-1)
L=[0,0,0,0,1,0]
print("Indice: "+str(BuscarCaja(0,len(L)-1)))
L=[0,1,0,0,0,0,0,0,0]
print("Indice: "+str(BuscarCaja(0,len(L)-1)))
|
# 32. Longest Valid Parentheses
# ttungl@gmail.com
# Given a string containing just the characters '(' and ')', find the length of the longest valid (well-formed) parentheses substring.
# For "(()", the longest valid parentheses substring is "()", which has length = 2.
# Another example is ")()())", where the longest valid parentheses substring is "()()", which has length = 4.
class Solution(object):
def longestValidParentheses(self, s):
"""
:type s: str
:rtype: int
"""
# sol 1:
# using stack
# runtime: 65ms
res, lst, stack = 0, -1, []
for i in range(len(s)):
if s[i] == '(':
if lst >= 0: # last time matching ")".
stack.append(lst)
lst = -1
else: # at first or last index, not matching ")"
stack.append(i)
else: # ")"
if stack:
stk = stack.pop()
if i - stk + 1 > res:
res = i - stk + 1
lst = stk
else:
lst = -1 # unmatched ")".
return res
# sol 2
# DP-1
# runtime: 66ms
dp = [0] * len(s)
res = lcount = 0
for i in range(len(s)):
if s[i] == '(':
lcount += 1 # left count
elif lcount > 0:
dp[i] = dp[i - 1] + 2 # pair
dp[i] += (dp[i - dp[i]] if i >= dp[i] else 0)
res = max(res, dp[i])
lcount -= 1
return res
|
class TimeoutError(Exception):
"""Error raised when .result() on a future doesn't return within time."""
class EventLoopNotInitialized(Exception):
"""Event loop not initialized."""
class RequestBodyNotBytes(Exception):
"""Request body must be bytes."""
|
#!/usr/bin/env python3
class Solution(object):
def searchInsert(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
if(len(nums) == 1):
if(target > nums[0]):
return 1
elif(target < nums[0]):
return 0
else:
return 0
# Split the array in half
halfLength = len(nums)//2
arrOne = nums[0:halfLength]
arrTwo = nums[halfLength:]
print("Target = {}".format(target))
print("Orig = {}".format(nums))
print("Array one = {}".format(arrOne))
print("Array two = {}".format(arrTwo))
first = self.searchInsert(arrOne, target)
second = self.searchInsert(arrTwo, target)
result = first + second
return result
def test(self):
arr = [1,2,3,4]
target = 5
k = self.searchInsert(arr, target)
print(k)
arr = [1,3,5,7,9]
target = 2
k = self.searchInsert(arr, target)
print(k)
arr = [1,4,9,16,25,36]
target = 10
k = self.searchInsert(arr, target)
print(k)
s = Solution()
s.test()
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
#
# binary_heap_implement.py
# python
#
# 🎂"Here's to the crazy ones. The misfits. The rebels.
# The troublemakers. The round pegs in the square holes.
# The ones who see things differently. They're not found
# of rules. And they have no respect for the status quo.
# You can quote them, disagree with them, glority or vilify
# them. About the only thing you can't do is ignore them.
# Because they change things. They push the human race forward.
# And while some may see them as the creazy ones, we see genius.
# Because the poeple who are crazy enough to think thay can change
# the world, are the ones who do."
#
# Created by Chyi Yaqing on 02/23/19 12:31.
# Copyright © 2019. Chyi Yaqing.
# All rights reserved.
#
# Distributed under terms of the
# MIT
"""
Binary Heap Data Structure
A Heap is a special Tree-based data structure in which the tree is a
complete binary tree.
Heaps can be of two types:
1) Max-Heap: In a Max-Heap the key present at the root node must be
greatest among the keys present at all of it's children. The same property
must be recursively true for all sub-trees in that Binary Tree.
2) Min-Heap: In a Min-Heap the key present at the root node must be minimum
among the keys present at all of it's children. The same property must be
recursively true for all sub-trees in that Binary Tree.
How is Binary Heap represented?
A Binary Heap is a Complete Binary Tree. A binary heap is typically
represented as an array.
1) The root element will be at Arr[1]
2) Below table shows indexes of other nodes for the ith node,
Arr[(i)//2] Returns the parent node
Arr[2*i] Returns the left child node
Arr[(2*i)+1] Returns the right child node
Operations on Max/Min Heap:
1) getMin()/getMax(): it returns the root element of Min/Max Heap, Time
Complexity of this operation is O(1)
2) extractMin()/extractMax(): Removes the minimum/Maximum element from
MinHeap/MaxHeap. Time Complexity of this Operation is O(Logn) as this
operation needs to maintain the heap property (by calling heapify())
after removing root
3) decreaseKey(): Decreases value of key. The time complexity of this
opeation is O(Logn). If the decreases key value of a node is greater
than the parent of node, then we don't need to do anything. Otherwise,
we need to traverse up to fix the violated heap property.
4) insert(): Inserting a new key takes O(Logn) time. We add a new key at
the end of the tree. If new key is greater than its parent, then don't
need to do anything. Otherwise, we need to traverse up to fix the
violated heap property.
5) delete(): Deleting a key also takes O(Logn) time. We replace the key to
be deleted with minum infinite by calling decreaeKey(). After
decreaseKey(), the minus infinite value must reach root, so we
extractMin() to remove the key.
A Complete binary tree: A complete binary tree is a tree in which each level
has all of its nodes. The exception to this is the bottom level of the tree,
which we fill in from left to right.
"""
class MinHeap:
def __init__(self):
self.heapList = [0] # this zero is not used
self.currentSize = 0 # to keep track of the current size of the heap
def heapifyUp(self, i):
# keep swapping until get to the top of the tree
while i // 2 > 0:
# If the newly added item is less than its parent,
# swap the item with its parent
if self.heapList[i] < self.heapList[i // 2]:
self.heapList[i//2], self.heapList[i] = (
self.heapList[i], self.heapList[i//2])
i //= 2
def heapifyDown(self, i):
while (i*2) <= self.currentSize:
mc = self.minChild(i)
if self.heapList[i] > self.heapList[mc]:
self.heapList[i], self.heapList[mc] = (
self.heapList[mc], self.heapList[i])
i = mc
def minChild(self, i):
if i*2 + 1 > self.currentSize:
return i * 2
else:
if self.heapList[2*i] < self.heapList[2*i+1]:
return i * 2
else:
return i * 2 + 1
def delete(self):
retval = self.heapList[1]
# take the last item in the list and moving it to the root position
self.heapList[1] = self.heapList[self.currentSize]
self.currentSize -= 1
self.heapList.pop()
self.heapifyDown(1)
return retval
def insert(self, data):
self.currentSize += 1
self.heapList.append(data)
# comparting the newly added item with its parent.
self.heapifyUp(self.currentSize)
# build an entire heap from a list of keys.
# 完全二叉树,下标从n/2+1到n的节点都是叶子结点
def buildHeap(self, alist):
# 因为叶子节点往下堆化只能自己跟自己比较,所以直接从第一个非叶子节点开始
i = len(alist) // 2
self.currentSize = len(alist)
self.heapList = [0] + alist[:]
while i > 0:
self.heapifyDown(i)
i -= 1
def sort(self, alist):
self.buildHeap(alist)
while self.currentSize > 1:
# take the root position in the list and moving it to the last
self.heapList[self.currentSize], self.heapList[1] = (
self.heapList[1], self.heapList[self.currentSize])
self.currentSize -= 1
self.heapifyDown(1)
return self.heapList[1:]
def printHeap(self):
print("\nPrint Min Heap {}".format(self.heapList[1:]))
class MaxHeap:
def __init__(self):
self.heapList = [0] # this zero is not used
self.currentSize = 0 # to keep track of the current size of the heap
def heapifyUp(self, i):
# keep swapping until get to the top of the tree
while i // 2 > 0:
# If the newly added item is less than its parent,
# swap the item with its parent
if self.heapList[i] > self.heapList[i // 2]:
self.heapList[i//2], self.heapList[i] = (
self.heapList[i], self.heapList[i//2])
i //= 2
def heapifyDown(self, i):
while (i*2) <= self.currentSize:
mc = self.minChild(i)
if self.heapList[i] < self.heapList[mc]:
self.heapList[i], self.heapList[mc] = (
self.heapList[mc], self.heapList[i])
i = mc
def minChild(self, i):
if i*2 + 1 > self.currentSize:
return i * 2
else:
if self.heapList[2*i] > self.heapList[2*i+1]:
return i * 2
else:
return i * 2 + 1
def delete(self):
retval = self.heapList[1]
# take the last item in the list and moving it to the root position
self.heapList[1] = self.heapList[self.currentSize]
self.currentSize -= 1
self.heapList.pop()
self.heapifyDown(1)
return retval
def insert(self, data):
self.currentSize += 1
self.heapList.append(data)
# comparting the newly added item with its parent.
self.heapifyUp(self.currentSize)
# build an entire heap from a list of keys.
# 完全二叉树,下标从n/2+1到n的节点都是叶子结点
def buildHeap(self, alist):
# 因为叶子节点往下堆化只能自己跟自己比较,所以直接从第一个非叶子节点开始
i = len(alist) // 2
self.currentSize = len(alist)
self.heapList = [0] + alist[:]
while i > 0:
self.heapifyDown(i)
i -= 1
def sort(self, alist):
self.buildHeap(alist)
while self.currentSize > 1:
# take the root position in the list and moving it to the last
self.heapList[self.currentSize], self.heapList[1] = (
self.heapList[1], self.heapList[self.currentSize])
self.currentSize -= 1
self.heapifyDown(1)
return self.heapList[1:]
def printHeap(self):
print("\nPrint Max Heap : {}".format(self.heapList[1:]))
if __name__ == '__main__':
minheap = MinHeap()
minheap.insert(40)
minheap.insert(39)
minheap.insert(30)
minheap.printHeap()
minheap.delete()
minheap.printHeap()
alist = [9, 7, 8, 5, 6, 3, 4, 1, 2]
print("\nOriginal List: {}".format(alist))
print("Sorted List is : {}".format(minheap.sort(alist)))
|
SUCCEED='SUCCEED'
FAILED='FAILED'
FORCE_RESIZE_IMAGE=True
DEFAULT_AVAILABLE_PRODUCT_FOR_SHOP=60
DEFAULT_SHIPPING_FEE=2000
CURRENCY=' تومان'
# Image Size for models
PRODUCT_IMAGE_WIDTH=600
PRODUCT_IMAGE_HEIGHT=600
PRODUCT_THUMBNAIL_WIDTH=250
PRODUCT_THUMBNAIL_HEIGHT=250
CATEGORY_IMAGE_WIDTH=250
CATEGORY_IMAGE_HEIGHT=250
PROFILE_IMAGE_WIDTH=150
PROFILE_IMAGE_HEIGHT=150
SUPPLIER_IMAGE_WIDTH=500
SUPPLIER_IMAGE_HEIGHT=350
SHIPPER_IMAGE_WIDTH=500
SHIPPER_IMAGE_HEIGHT=350
DEFAULT_ORDER_LIST_FOR_USER=20
DEFAULT_ORDER_LIST_FOR_SUPPLIER=200
DEFAULT_ORDER_LIST_FOR_SHIPPER=20
CAN_EDIT_MOBILE=False
CAN_EDIT_EMAIL=False
ACTION_LIKE='LIKE'
ACTION_DISLIKE='DISLIKE'
NOTIFICATION_UNSEEN_COUNT=10
NOTIFICATION_SEEN_COUNT=10
NOTIFICATION_ALL_COUNT=20 |
"""
Copyright 2021 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
vm_basic_info = {
'MachineId':'',
'MachineName':'',
'PrimaryIPAddress':'',
'PublicIPAddress':'',
'IpAddressListSemiColonDelimited':'',
'TotalDiskAllocatedGiB':0,
'TotalDiskUsedGiB':0,
'MachineTypeLabel':'',
'AllocatedProcessorCoreCount':0,
'MemoryGiB':0,
'HostingLocation':'',
'OsType':'',
'OsPublisher':'',
'OsName':'',
'OsVersion':'',
'MachineStatus':'',
'ProvisioningState':'',
'CreateDate':'',
'IsPhysical':0,
'Source':'AWS'
}
vm_tag = {
'MachineId':'',
'Key':'',
'Value':''
}
vm_disk = {
'MachineId':'',
'DiskLabel':'',
'SizeInGib':'',
'UsedInGib':'',
'StorageTypeLabel':''
}
vm_perf = {
'MachineId':'',
'TimeStamp':'',
'CpuUtilizationPercentage':'',
'AvailableMemoryBytes':'',
'DiskReadOperationsPerSec':'',
'DiskWriteOperationsPerSec':'',
'NetworkBytesPerSecSent':'',
'NetworkBytesPerSecReceived':''
} |
def main(request, response):
cookie = request.cookies.first(b"COOKIE_NAME", None)
response_headers = [(b"Content-Type", b"text/javascript"),
(b"Access-Control-Allow-Credentials", b"true")]
origin = request.headers.get(b"Origin", None)
if origin:
response_headers.append((b"Access-Control-Allow-Origin", origin))
cookie_value = b'';
if cookie:
cookie_value = cookie.value;
return (200, response_headers,
b"export const cookie = '"+cookie_value+b"';")
|
#!/usr/bin/env python
# Copyright Contributors to the Open Shading Language project.
# SPDX-License-Identifier: BSD-3-Clause
# https://github.com/AcademySoftwareFoundation/OpenShadingLanguage
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_c_float_v_floatarray.tif test_spline_c_float_v_floatarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_c_float_u_floatarray.tif test_spline_c_float_u_floatarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_c_float_c_floatarray.tif test_spline_c_float_c_floatarray")
outputs.append ("spline_c_float_v_floatarray.tif")
outputs.append ("spline_c_float_u_floatarray.tif")
outputs.append ("spline_c_float_c_floatarray.tif")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_u_float_v_floatarray.tif test_spline_u_float_v_floatarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_u_float_u_floatarray.tif test_spline_u_float_u_floatarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_u_float_c_floatarray.tif test_spline_u_float_c_floatarray")
outputs.append ("spline_u_float_v_floatarray.tif")
outputs.append ("spline_u_float_u_floatarray.tif")
outputs.append ("spline_u_float_c_floatarray.tif")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_v_float_v_floatarray.tif test_spline_v_float_v_floatarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_v_float_u_floatarray.tif test_spline_v_float_u_floatarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_v_float_c_floatarray.tif test_spline_v_float_c_floatarray")
outputs.append ("spline_v_float_v_floatarray.tif")
outputs.append ("spline_v_float_u_floatarray.tif")
outputs.append ("spline_v_float_c_floatarray.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_c_float_v_floatarray.tif test_deriv_spline_c_float_v_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_c_float_u_floatarray.tif test_deriv_spline_c_float_u_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_c_float_c_floatarray.tif test_deriv_spline_c_float_c_floatarray")
outputs.append ("deriv_spline_c_float_v_floatarray.tif")
outputs.append ("deriv_spline_c_float_u_floatarray.tif")
outputs.append ("deriv_spline_c_float_c_floatarray.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_u_float_v_floatarray.tif test_deriv_spline_u_float_v_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_u_float_u_floatarray.tif test_deriv_spline_u_float_u_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_u_float_c_floatarray.tif test_deriv_spline_u_float_c_floatarray")
outputs.append ("deriv_spline_u_float_v_floatarray.tif")
outputs.append ("deriv_spline_u_float_u_floatarray.tif")
outputs.append ("deriv_spline_u_float_c_floatarray.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_v_float_v_floatarray.tif test_deriv_spline_v_float_v_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_v_float_u_floatarray.tif test_deriv_spline_v_float_u_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_v_float_c_floatarray.tif test_deriv_spline_v_float_c_floatarray")
outputs.append ("deriv_spline_v_float_v_floatarray.tif")
outputs.append ("deriv_spline_v_float_u_floatarray.tif")
outputs.append ("deriv_spline_v_float_c_floatarray.tif")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_c_float_v_colorarray.tif test_spline_c_float_v_colorarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_c_float_u_colorarray.tif test_spline_c_float_u_colorarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_c_float_c_colorarray.tif test_spline_c_float_c_colorarray")
outputs.append ("spline_c_float_v_colorarray.tif")
outputs.append ("spline_c_float_u_colorarray.tif")
outputs.append ("spline_c_float_c_colorarray.tif")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_u_float_v_colorarray.tif test_spline_u_float_v_colorarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_u_float_u_colorarray.tif test_spline_u_float_u_colorarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_u_float_c_colorarray.tif test_spline_u_float_c_colorarray")
outputs.append ("spline_u_float_v_colorarray.tif")
outputs.append ("spline_u_float_u_colorarray.tif")
outputs.append ("spline_u_float_c_colorarray.tif")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_v_float_v_colorarray.tif test_spline_v_float_v_colorarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_v_float_u_colorarray.tif test_spline_v_float_u_colorarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_v_float_c_colorarray.tif test_spline_v_float_c_colorarray")
outputs.append ("spline_v_float_v_colorarray.tif")
outputs.append ("spline_v_float_u_colorarray.tif")
outputs.append ("spline_v_float_c_colorarray.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_c_float_v_colorarray.tif -o DxOut deriv_spline_c_float_v_colorarrayDx.tif -o DyOut deriv_spline_c_float_v_colorarrayDy.tif test_deriv_spline_c_float_v_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_c_float_u_colorarray.tif -o DxOut deriv_spline_c_float_u_colorarrayDx.tif -o DyOut deriv_spline_c_float_u_colorarrayDy.tif test_deriv_spline_c_float_u_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_c_float_c_colorarray.tif -o DxOut deriv_spline_c_float_c_colorarrayDx.tif -o DyOut deriv_spline_c_float_c_colorarrayDy.tif test_deriv_spline_c_float_c_colorarray")
outputs.append ("deriv_spline_c_float_v_colorarray.tif")
outputs.append ("deriv_spline_c_float_v_colorarrayDx.tif")
outputs.append ("deriv_spline_c_float_v_colorarrayDy.tif")
outputs.append ("deriv_spline_c_float_u_colorarray.tif")
outputs.append ("deriv_spline_c_float_u_colorarrayDx.tif")
outputs.append ("deriv_spline_c_float_u_colorarrayDy.tif")
outputs.append ("deriv_spline_c_float_c_colorarray.tif")
outputs.append ("deriv_spline_c_float_c_colorarrayDx.tif")
outputs.append ("deriv_spline_c_float_c_colorarrayDy.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_u_float_v_colorarray.tif -o DxOut deriv_spline_u_float_v_colorarrayDx.tif -o DyOut deriv_spline_u_float_v_colorarrayDy.tif test_deriv_spline_u_float_v_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_u_float_u_colorarray.tif -o DxOut deriv_spline_u_float_u_colorarrayDx.tif -o DyOut deriv_spline_u_float_u_colorarrayDy.tif test_deriv_spline_u_float_u_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_u_float_c_colorarray.tif -o DxOut deriv_spline_u_float_c_colorarrayDx.tif -o DyOut deriv_spline_u_float_c_colorarrayDy.tif test_deriv_spline_u_float_c_colorarray")
outputs.append ("deriv_spline_u_float_v_colorarray.tif")
outputs.append ("deriv_spline_u_float_v_colorarrayDx.tif")
outputs.append ("deriv_spline_u_float_v_colorarrayDy.tif")
outputs.append ("deriv_spline_u_float_u_colorarray.tif")
outputs.append ("deriv_spline_u_float_u_colorarrayDx.tif")
outputs.append ("deriv_spline_u_float_u_colorarrayDy.tif")
outputs.append ("deriv_spline_u_float_c_colorarray.tif")
outputs.append ("deriv_spline_u_float_c_colorarrayDx.tif")
outputs.append ("deriv_spline_u_float_c_colorarrayDy.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_v_float_v_colorarray.tif -o DxOut deriv_spline_v_float_v_colorarrayDx.tif -o DyOut deriv_spline_v_float_v_colorarrayDy.tif test_deriv_spline_v_float_v_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_v_float_u_colorarray.tif -o DxOut deriv_spline_v_float_u_colorarrayDx.tif -o DyOut deriv_spline_v_float_u_colorarrayDy.tif test_deriv_spline_v_float_u_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_v_float_c_colorarray.tif -o DxOut deriv_spline_v_float_c_colorarrayDx.tif -o DyOut deriv_spline_v_float_c_colorarrayDy.tif test_deriv_spline_v_float_c_colorarray")
outputs.append ("deriv_spline_v_float_v_colorarray.tif")
outputs.append ("deriv_spline_v_float_v_colorarrayDx.tif")
outputs.append ("deriv_spline_v_float_v_colorarrayDy.tif")
outputs.append ("deriv_spline_v_float_u_colorarray.tif")
outputs.append ("deriv_spline_v_float_u_colorarrayDx.tif")
outputs.append ("deriv_spline_v_float_u_colorarrayDy.tif")
outputs.append ("deriv_spline_v_float_c_colorarray.tif")
outputs.append ("deriv_spline_v_float_c_colorarrayDx.tif")
outputs.append ("deriv_spline_v_float_c_colorarrayDy.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_vNoDeriv_float_v_colorarray.tif -o DxOut deriv_spline_vNoDeriv_float_v_colorarrayDx.tif -o DyOut deriv_spline_vNoDeriv_float_v_colorarrayDy.tif test_deriv_spline_vNoDeriv_float_v_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_vNoDeriv_float_u_colorarray.tif -o DxOut deriv_spline_vNoDeriv_float_u_colorarrayDx.tif -o DyOut deriv_spline_vNoDeriv_float_u_colorarrayDy.tif test_deriv_spline_vNoDeriv_float_u_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_vNoDeriv_float_c_colorarray.tif -o DxOut deriv_spline_vNoDeriv_float_c_colorarrayDx.tif -o DyOut deriv_spline_vNoDeriv_float_c_colorarrayDy.tif test_deriv_spline_vNoDeriv_float_c_colorarray")
outputs.append ("deriv_spline_vNoDeriv_float_v_colorarray.tif")
outputs.append ("deriv_spline_vNoDeriv_float_v_colorarrayDx.tif")
outputs.append ("deriv_spline_vNoDeriv_float_v_colorarrayDy.tif")
outputs.append ("deriv_spline_vNoDeriv_float_u_colorarray.tif")
outputs.append ("deriv_spline_vNoDeriv_float_u_colorarrayDx.tif")
outputs.append ("deriv_spline_vNoDeriv_float_u_colorarrayDy.tif")
outputs.append ("deriv_spline_vNoDeriv_float_c_colorarray.tif")
outputs.append ("deriv_spline_vNoDeriv_float_c_colorarrayDx.tif")
outputs.append ("deriv_spline_vNoDeriv_float_c_colorarrayDy.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_v_float_vNoDeriv_colorarray.tif -o DxOut deriv_spline_v_float_vNoDeriv_colorarrayDx.tif -o DyOut deriv_spline_v_float_vNoDeriv_colorarrayDy.tif test_deriv_spline_v_float_vNoDeriv_colorarray")
outputs.append ("deriv_spline_v_float_vNoDeriv_colorarray.tif")
outputs.append ("deriv_spline_v_float_vNoDeriv_colorarrayDx.tif")
outputs.append ("deriv_spline_v_float_vNoDeriv_colorarrayDy.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_u_float_vNoDeriv_colorarray.tif -o DxOut deriv_spline_u_float_vNoDeriv_colorarrayDx.tif -o DyOut deriv_spline_u_float_vNoDeriv_colorarrayDy.tif test_deriv_spline_u_float_vNoDeriv_colorarray")
outputs.append ("deriv_spline_u_float_vNoDeriv_colorarray.tif")
outputs.append ("deriv_spline_u_float_vNoDeriv_colorarrayDx.tif")
outputs.append ("deriv_spline_u_float_vNoDeriv_colorarrayDy.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_c_float_vNoDeriv_colorarray.tif -o DxOut deriv_spline_c_float_vNoDeriv_colorarrayDx.tif -o DyOut deriv_spline_c_float_vNoDeriv_colorarrayDy.tif test_deriv_spline_c_float_vNoDeriv_colorarray")
outputs.append ("deriv_spline_c_float_vNoDeriv_colorarray.tif")
outputs.append ("deriv_spline_c_float_vNoDeriv_colorarrayDx.tif")
outputs.append ("deriv_spline_c_float_vNoDeriv_colorarrayDy.tif")
# expect a few LSB failures
failthresh = 0.008
failpercent = 3
|
# coding: utf-8
n, r, avg = [int(i) for i in input().split()]
li = []
for i in range(n):
tmp = [int(i) for i in input().split()]
li.append([tmp[1],tmp[0]])
li.sort(reverse=True)
dis = avg*n-sum([i[1] for i in li])
ans = 0
while dis > 0:
exam = li.pop()
if dis <= r-exam[1]:
ans += exam[0]*dis
dis = 0
else:
ans += exam[0]*(r-exam[1])
dis -= r-exam[1]
print(ans)
|
#elif basic
a = 10
if(a < 1):
print('benar')
elif(a > 1):
print('salah')
elif(a == 1):
print('input salah')
else:
print('input salah')
|
class Solution:
def createTargetArray(self, nums, index):
target = []
for n, i in zip(nums, index):
target.insert(i, n)
return target
|
"""
2. Add Two Numbers
Input: (2 -> 4 -> 3) + (5 -> 6 -> 4)
Output: 7 -> 0 -> 8
Explanation: 342 + 465 = 807.
"""
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def addTwoNumbers(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
carry = 0
cur = dummy = ListNode(0)
while l1 or l2 or carry:
if l1:
carry += l1.val
l1 = l1.next
if l2:
carry += l2.val
l2 = l2.next
cur.next = ListNode(carry%10)
carry = carry // 10
cur = cur.next
return dummy.next |
cmd = []
with open('input.txt', 'r') as f:
cmd = list(map(lambda l: l.strip().split(' '), f.readlines()))
horizon_pos = 0
deep = 0
aim = 0
for c in cmd:
if c[0] == 'forward':
val = int(c[1])
horizon_pos += val
deep += aim * val
elif c[0] == 'down':
aim += int(c[1])
elif c[0] == 'up':
aim -= int(c[1])
print(horizon_pos*deep) |
class ChildObject:
def __init__(self, sumo_client, meta_data):
self.sumo = sumo_client
self.__blob = None
self.__png = None
source = meta_data["_source"]
fields = meta_data["fields"]
self.tag_name = fields["tag_name"][0]
self.sumo_id = meta_data["_id"]
self.name = source["data"]["name"]
self.iteration_id = source["fmu"]["iteration"]["id"]
self.relative_path = source["file"]["relative_path"]
self.meta_data = source
self.object_type = source["class"]
if "realization" in source["fmu"]:
self.realization_id = source["fmu"]["realization"]["id"]
else:
self.realization_id = None
if "aggregation" in source["fmu"]:
self.aggregation = source["fmu"]["aggregation"]["operation"]
else:
self.aggregation = None
@property
def blob(self):
if self.__blob is None:
self.__blob = self.__get_blob()
return self.__blob
def __get_blob(self):
blob = self.sumo.get(f"/objects('{self.sumo_id}')/blob")
return blob
@property
def png(self):
if self.__png is None:
self.__png = self.__get_png()
return self.__png
def __get_png(self):
png = self.sumo.get(f"/objects('{self.sumo_id}')/blob", encoder="png")
return png |
confusables = {
u'\u2460': '1',
u'\u2780': '1',
u'\U0001D7D0': '2',
u'\U0001D7DA': '2',
u'\U0001D7E4': '2',
u'\U0001D7EE': '2',
u'\U0001D7F8': '2',
u'\uA75A': '2',
u'\u01A7': '2',
u'\u03E8': '2',
u'\uA644': '2',
u'\u14BF': '2',
u'\uA6EF': '2',
u'\u2461': '2',
u'\u2781': '2',
u'\u01BB': '2',
u'\U0001F103': '2.',
u'\u2489': '2.',
u'\U0001D206': '3',
u'\U0001D7D1': '3',
u'\U0001D7DB': '3',
u'\U0001D7E5': '3',
u'\U0001D7EF': '3',
u'\U0001D7F9': '3',
u'\U00016F3B': '3',
u'\U000118CA': '3',
u'\uA7AB': '3',
u'\u021C': '3',
u'\u01B7': '3',
u'\uA76A': '3',
u'\u2CCC': '3',
u'\u0417': '3',
u'\u04E0': '3',
u'\u0AE9': '3',
u'\u2462': '3',
u'\u0498': '3',
u'\U0001F104': '3.',
u'\u248A': '3.',
u'\U0001D7D2': '4',
u'\U0001D7DC': '4',
u'\U0001D7E6': '4',
u'\U0001D7F0': '4',
u'\U0001D7FA': '4',
u'\U000118AF': '4',
u'\u13CE': '4',
u'\u2463': '4',
u'\u2783': '4',
u'\u1530': '4',
u'\U0001F105': '4.',
u'\u248B': '4.',
u'\U0001D7D3': '5',
u'\U0001D7DD': '5',
u'\U0001D7E7': '5',
u'\U0001D7F1': '5',
u'\U0001D7FB': '5',
u'\U000118BB': '5',
u'\u01BC': '5',
u'\u2464': '5',
u'\u2784': '5',
u'\U0001F106': '5.',
u'\u248C': '5.',
u'\U0001D7D4': '6',
u'\U0001D7DE': '6',
u'\U0001D7E8': '6',
u'\U0001D7F2': '6',
u'\U0001D7FC': '6',
u'\U000118D5': '6',
u'\u2CD2': '6',
u'\u0431': '6',
u'\u13EE': '6',
u'\u2465': '6',
u'\u2785': '6',
u'\U0001F107': '6.',
u'\u248D': '6.',
u'\U0001D212': '7',
u'\U0001D7D5': '7',
u'\U0001D7DF': '7',
u'\U0001D7E9': '7',
u'\U0001D7F3': '7',
u'\U0001D7FD': '7',
u'\U000104D2': '7',
u'\U000118C6': '7',
u'\u2466': '7',
u'\u2786': '7',
u'\U0001F108': '7.',
u'\u248E': '7.',
u'\U0001E8CB': '8',
u'\U0001D7D6': '8',
u'\U0001D7E0': '8',
u'\U0001D7EA': '8',
u'\U0001D7F4': '8',
u'\U0001D7FE': '8',
u'\U0001031A': '8',
u'\u0B03': '8',
u'\u09EA': '8',
u'\u0A6A': '8',
u'\u0223': '8',
u'\u0222': '8',
u'\u2467': '8',
u'\u2787': '8',
u'\U0001F109': '8.',
u'\u248F': '8.',
u'\U0001D7D7': '9',
u'\U0001D7E1': '9',
u'\U0001D7E4': '9',
u'\U0001D7EB': '9',
u'\U0001D7F5': '9',
u'\U0001D7FF': '9',
u'\U000118CC': '9',
u'\U000118AC': '9',
u'\U000118D6': '9',
u'\u0A67': '9',
u'\u0B68': '9',
u'\u09ED': '9',
u'\u0D6D': '9',
u'\uA76E': '9',
u'\u2CCA': '9',
u'\u0967': '9',
u'\u06F9': '9',
u'\u2468': '9',
u'\u2788': '9',
u'\U0001F10A': '9.',
u'\u2490': '9.',
u'\U0001D41A': 'a',
u'\U0001D44E': 'a',
u'\U0001D482': 'a',
u'\U0001D4B6': 'a',
u'\U0001D4EA': 'a',
u'\U0001D51E': 'a',
u'\U0001D552': 'a',
u'\U0001D586': 'a',
u'\U0001D5BA': 'a',
u'\U0001D5EE': 'a',
u'\U0001D622': 'a',
u'\U0001D656': 'a',
u'\U0001D68A': 'a',
u'\U0001D6C2': 'a',
u'\U0001D6FC': 'a',
u'\U0001D736': 'a',
u'\U0001D770': 'a',
u'\U0001D7AA': 'a',
u'\U0001D400': 'a',
u'\U0001D434': 'a',
u'\U0001D468': 'a',
u'\U0001D49C': 'a',
u'\U0001D4D0': 'a',
u'\U0001D504': 'a',
u'\U0001D538': 'a',
u'\U0001D56C': 'a',
u'\U0001D5A0': 'a',
u'\U0001D5D4': 'a',
u'\U0001D608': 'a',
u'\U0001D63C': 'a',
u'\U0001D670': 'a',
u'\U0001D6A8': 'a',
u'\U0001D6E2': 'a',
u'\U0001D71C': 'a',
u'\U0001D756': 'a',
u'\U0001D790': 'a',
u'\u237A': 'a',
u'\uFF41': 'a',
u'\u0251': 'a',
u'\u03B1': 'a',
u'\u0430': 'a',
u'\u2DF6': 'a',
u'\uFF21': 'a',
u'\u0391': 'a',
u'\u0410': 'a',
u'\u13AA': 'a',
u'\u15C5': 'a',
u'\uA4EE': 'a',
u'\u2376': 'a',
u'\u01CE': 'a',
u'\u0103': 'a',
u'\u01CD': 'a',
u'\u0102': 'a',
u'\u0227': 'a',
u'\u00E5': 'a',
u'\u0226': 'a',
u'\u00C5': 'a',
u'\u1E9A': 'a',
u'\u1EA3': 'a',
u'\uAB7A': 'a',
u'\u1D00': 'a',
u'\uA733': 'aa',
u'\uA732': 'aa',
u'\u00E6': 'ae',
u'\u04D5': 'ae',
u'\u00C6': 'ae',
u'\u04D4': 'ae',
u'\uA735': 'ao',
u'\uA734': 'ao',
u'\U0001F707': 'ar',
u'\uA737': 'au',
u'\uA736': 'au',
u'\uA738': 'av',
u'\uA739': 'av',
u'\uA73A': 'av',
u'\uA73B': 'av',
u'\uA73D': 'ay',
u'\uA73C': 'ay',
u'\U0001D41B': 'b',
u'\U0001D44F': 'b',
u'\U0001D483': 'b',
u'\U0001D4B7': 'b',
u'\U0001D4EB': 'b',
u'\U0001D51F': 'b',
u'\U0001D553': 'b',
u'\U0001D587': 'b',
u'\U0001D5BB': 'b',
u'\U0001D5EF': 'b',
u'\U0001D623': 'b',
u'\U0001D657': 'b',
u'\U0001D68B': 'b',
u'\U0001D401': 'b',
u'\U0001D435': 'b',
u'\U0001D469': 'b',
u'\U0001D4D1': 'b',
u'\U0001D505': 'b',
u'\U0001D539': 'b',
u'\U0001D56D': 'b',
u'\U0001D5A1': 'b',
u'\U0001D5D5': 'b',
u'\U0001D609': 'b',
u'\U0001D63D': 'b',
u'\U0001D671': 'b',
u'\U0001D6A9': 'b',
u'\U0001D6E3': 'b',
u'\U0001D71D': 'b',
u'\U0001D757': 'b',
u'\U0001D791': 'b',
u'\U00010282': 'b',
u'\U000102A1': 'b',
u'\U00010301': 'b',
u'\U0001D6C3': 'b',
u'\U0001D6FD': 'b',
u'\U0001D737': 'b',
u'\U0001D771': 'b',
u'\U0001D7AB': 'b',
u'\u0184': 'b',
u'\u042C': 'b',
u'\u13CF': 'b',
u'\u15AF': 'b',
u'\uFF22': 'b',
u'\u212C': 'b',
u'\uA7B4': 'b',
u'\u0392': 'b',
u'\u0412': 'b',
u'\u13F4': 'b',
u'\u15F7': 'b',
u'\uA4D0': 'b',
u'\u0253': 'b',
u'\u0183': 'b',
u'\u0182': 'b',
u'\u0411': 'b',
u'\u0180': 'b',
u'\u048D': 'b',
u'\u048C': 'b',
u'\u0463': 'b',
u'\u0462': 'b',
u'\u0432': 'b',
u'\u13FC': 'b',
u'\u0299': 'b',
u'\uA7B5': 'b',
u'\u03B2': 'b',
u'\u03D0': 'b',
u'\u13F0': 'b',
u'\u00DF': 'b',
u'\u042B': 'bl',
u'\U0001D41C': 'c',
u'\U0001D450': 'c',
u'\U0001D484': 'c',
u'\U0001D4B8': 'c',
u'\U0001D4EC': 'c',
u'\U0001D520': 'c',
u'\U0001D554': 'c',
u'\U0001D588': 'c',
u'\U0001D5BC': 'c',
u'\U0001D5F0': 'c',
u'\U0001D624': 'c',
u'\U0001D658': 'c',
u'\U0001D68C': 'c',
u'\U0001043D': 'c',
u'\U0001F74C': 'c',
u'\U000118F2': 'c',
u'\U000118E9': 'c',
u'\U0001D402': 'c',
u'\U0001D436': 'c',
u'\U0001D46A': 'c',
u'\U0001D49E': 'c',
u'\U0001D4D2': 'c',
u'\U0001D56E': 'c',
u'\U0001D5A2': 'c',
u'\U0001D5D6': 'c',
u'\U0001D60A': 'c',
u'\U0001D63E': 'c',
u'\U0001D672': 'c',
u'\U000102A2': 'c',
u'\U00010302': 'c',
u'\U00010415': 'c',
u'\U0001051C': 'c',
u'\uFF43': 'c',
u'\u217D': 'c',
u'\u1D04': 'c',
u'\u03F2': 'c',
u'\u2CA5': 'c',
u'\u0441': 'c',
u'\uABAF': 'c',
u'\u2DED': 'c',
u'\uFF23': 'c',
u'\u216D': 'c',
u'\u2102': 'c',
u'\u212D': 'c',
u'\u03F9': 'c',
u'\u2CA4': 'c',
u'\u0421': 'c',
u'\u13DF': 'c',
u'\uA4DA': 'c',
u'\u00A2': 'c',
u'\u023C': 'c',
u'\u20A1': 'c',
u'\u00E7': 'c',
u'\u04AB': 'c',
u'\u00C7': 'c',
u'\u04AA': 'c',
u'\u0187': 'c',
u'\U0001D41D': 'd',
u'\U0001D451': 'd',
u'\U0001D485': 'd',
u'\U0001D4B9': 'd',
u'\U0001D4ED': 'd',
u'\U0001D521': 'd',
u'\U0001D555': 'd',
u'\U0001D589': 'd',
u'\U0001D5BD': 'd',
u'\U0001D5F1': 'd',
u'\U0001D625': 'd',
u'\U0001D659': 'd',
u'\U0001D68D': 'd',
u'\U0001D403': 'd',
u'\U0001D437': 'd',
u'\U0001D46B': 'd',
u'\U0001D49F': 'd',
u'\U0001D4D3': 'd',
u'\U0001D507': 'd',
u'\U0001D53B': 'd',
u'\U0001D56F': 'd',
u'\U0001D5A3': 'd',
u'\U0001D5D7': 'd',
u'\U0001D60B': 'd',
u'\U0001D63F': 'd',
u'\U0001D673': 'd',
u'\u217E': 'd',
u'\u2146': 'd',
u'\u0501': 'd',
u'\u13E7': 'd',
u'\u146F': 'd',
u'\uA4D2': 'd',
u'\u216E': 'd',
u'\u2145': 'd',
u'\u13A0': 'd',
u'\u15DE': 'd',
u'\u15EA': 'd',
u'\uA4D3': 'd',
u'\u0257': 'd',
u'\u0256': 'd',
u'\u018C': 'd',
u'\u0111': 'd',
u'\u0110': 'd',
u'\u00D0': 'd',
u'\u0189': 'd',
u'\u20AB': 'd',
u'\u147B': 'd',
u'\u1487': 'd',
u'\u0257': 'd',
u'\u0256': 'd',
u'\u018C': 'd',
u'\u0111': 'd',
u'\u0110': 'd',
u'\u00D0': 'd',
u'\u0189': 'd',
u'\u20AB': 'd',
u'\u147B': 'd',
u'\u1487': 'd',
u'\uAB70': 'd',
u'\U0001D41E': 'e',
u'\U0001D452': 'e',
u'\U0001D486': 'e',
u'\U0001D4EE': 'e',
u'\U0001D522': 'e',
u'\U0001D556': 'e',
u'\U0001D58A': 'e',
u'\U0001D5BE': 'e',
u'\U0001D5F2': 'e',
u'\U0001D626': 'e',
u'\U0001D65A': 'e',
u'\U0001D68E': 'e',
u'\U0001D404': 'e',
u'\U0001D438': 'e',
u'\U0001D46C': 'e',
u'\U0001D4D4': 'e',
u'\U0001D508': 'e',
u'\U0001D53C': 'e',
u'\U0001D570': 'e',
u'\U0001D5A4': 'e',
u'\U0001D5D8': 'e',
u'\U0001D60C': 'e',
u'\U0001D640': 'e',
u'\U0001D674': 'e',
u'\U0001D6AC': 'e',
u'\U0001D6E6': 'e',
u'\U0001D720': 'e',
u'\U0001D75A': 'e',
u'\U0001D794': 'e',
u'\U000118A6': 'e',
u'\U000118AE': 'e',
u'\U00010286': 'e',
u'\U0001D221': 'e',
u'\u212E': 'e',
u'\uFF45': 'e',
u'\u212F': 'e',
u'\u2147': 'e',
u'\uAB32': 'e',
u'\u0435': 'e',
u'\u04BD': 'e',
u'\u2DF7': 'e',
u'\u22FF': 'e',
u'\uFF25': 'e',
u'\u2130': 'e',
u'\u0395': 'e',
u'\u0415': 'e',
u'\u2D39': 'e',
u'\u13AC': 'e',
u'\uA4F0': 'e',
u'\u011B': 'e',
u'\u011A': 'e',
u'\u0247': 'e',
u'\u0246': 'e',
u'\u04BF': 'e',
u'\uAB7C': 'e',
u'\u1D07': 'e',
u'\u0259': 'e',
u'\u01DD': 'e',
u'\u04D9': 'e',
u'\u2107': 'e',
u'\u0510': 'e',
u'\u13CB': 'e',
u'\U0001D41F': 'f',
u'\U0001D453': 'f',
u'\U0001D487': 'f',
u'\U0001D4BB': 'f',
u'\U0001D4EF': 'f',
u'\U0001D523': 'f',
u'\U0001D557': 'f',
u'\U0001D58B': 'f',
u'\U0001D5BF': 'f',
u'\U0001D5F3': 'f',
u'\U0001D627': 'f',
u'\U0001D65B': 'f',
u'\U0001D68F': 'f',
u'\U0001D213': 'f',
u'\U0001D405': 'f',
u'\U0001D439': 'f',
u'\U0001D46D': 'f',
u'\U0001D4D5': 'f',
u'\U0001D509': 'f',
u'\U0001D53D': 'f',
u'\U0001D571': 'f',
u'\U0001D5A5': 'f',
u'\U0001D5D9': 'f',
u'\U0001D60D': 'f',
u'\U0001D641': 'f',
u'\U0001D675': 'f',
u'\U0001D7CA': 'f',
u'\U000118C2': 'f',
u'\U000118A2': 'f',
u'\U00010287': 'f',
u'\U000102A5': 'f',
u'\U00010525': 'f',
u'\uAB35': 'f',
u'\uA799': 'f',
u'\u017F': 'f',
u'\u1E9D': 'f',
u'\u0584': 'f',
u'\u2131': 'f',
u'\uA798': 'f',
u'\u03DC': 'f',
u'\u15B4': 'f',
u'\uA4DD': 'f',
u'\u0192': 'f',
u'\u0191': 'f',
u'\u1D6E': 'f',
u'\uFB00': 'ff',
u'\uFB03': 'ffi',
u'\uFB04': 'ffl',
u'\uFB01': 'fi',
u'\uFB02': 'fl',
u'\u02A9': 'fn',
u'\U0001D420': 'g',
u'\U0001D454': 'g',
u'\U0001D488': 'g',
u'\U0001D4F0': 'g',
u'\U0001D524': 'g',
u'\U0001D558': 'g',
u'\U0001D58C': 'g',
u'\U0001D5C0': 'g',
u'\U0001D5F4': 'g',
u'\U0001D628': 'g',
u'\U0001D65C': 'g',
u'\U0001D690': 'g',
u'\U0001D406': 'g',
u'\U0001D43A': 'g',
u'\U0001D46E': 'g',
u'\U0001D4A2': 'g',
u'\U0001D4D6': 'g',
u'\U0001D50A': 'g',
u'\U0001D53E': 'g',
u'\U0001D572': 'g',
u'\U0001D5A6': 'g',
u'\U0001D5DA': 'g',
u'\U0001D60E': 'g',
u'\U0001D642': 'g',
u'\U0001D676': 'g',
u'\uFF47': 'g',
u'\u210A': 'g',
u'\u0261': 'g',
u'\u1D83': 'g',
u'\u018D': 'g',
u'\u0581': 'g',
u'\u050C': 'g',
u'\u13C0': 'g',
u'\u13F3': 'g',
u'\uA4D6': 'g',
u'\u1DA2': 'g',
u'\u1D4D': 'g',
u'\u0260': 'g',
u'\u01E7': 'g',
u'\u011F': 'g',
u'\u01E6': 'g',
u'\u011E': 'g',
u'\u01F5': 'g',
u'\u0123': 'g',
u'\u01E5': 'g',
u'\u01E4': 'g',
u'\u0193': 'g',
u'\u050D': 'g',
u'\uAB90': 'g',
u'\u13FB': 'g',
u'\U0001D421': 'h',
u'\U0001D489': 'h',
u'\U0001D4BD': 'h',
u'\U0001D4F1': 'h',
u'\U0001D525': 'h',
u'\U0001D559': 'h',
u'\U0001D58D': 'h',
u'\U0001D5C1': 'h',
u'\U0001D5F5': 'h',
u'\U0001D629': 'h',
u'\U0001D65D': 'h',
u'\U0001D691': 'h',
u'\U0001D407': 'h',
u'\U0001D43B': 'h',
u'\U0001D46F': 'h',
u'\U0001D4D7': 'h',
u'\U0001D573': 'h',
u'\U0001D5A7': 'h',
u'\U0001D5DB': 'h',
u'\U0001D60F': 'h',
u'\U0001D643': 'h',
u'\U0001D677': 'h',
u'\U0001D6AE': 'h',
u'\U0001D6E8': 'h',
u'\U0001D722': 'h',
u'\U0001D75C': 'h',
u'\U0001D796': 'h',
u'\U000102CF': 'h',
u'\U00010199': 'h',
u'\uFF48': 'h',
u'\u210E': 'h',
u'\u04BB': 'h',
u'\u0570': 'h',
u'\u13C2': 'h',
u'\uFF28': 'h',
u'\u210B': 'h',
u'\u210C': 'h',
u'\u210D': 'h',
u'\u0397': 'h',
u'\u2C8E': 'h',
u'\u041D': 'h',
u'\u13BB': 'h',
u'\u157C': 'h',
u'\uA4E7': 'h',
u'\u1D78': 'h',
u'\u1D34': 'h',
u'\u0266': 'h',
u'\uA695': 'h',
u'\u13F2': 'h',
u'\u2C67': 'h',
u'\u04A2': 'h',
u'\u0127': 'h',
u'\u210F': 'h',
u'\u045B': 'h',
u'\u0126': 'h',
u'\u04C9': 'h',
u'\u04C7': 'h',
u'\u043D': 'h',
u'\u029C': 'h',
u'\uAB8B': 'h',
u'\u04A3': 'h',
u'\u04CA': 'h',
u'\u04C8': 'h',
u'\U0001D422': 'i',
u'\U0001D456': 'i',
u'\U0001D48A': 'i',
u'\U0001D4BE': 'i',
u'\U0001D4F2': 'i',
u'\U0001D526': 'i',
u'\U0001D55A': 'i',
u'\U0001D58E': 'i',
u'\U0001D5C2': 'i',
u'\U0001D5F6': 'i',
u'\U0001D62A': 'i',
u'\U0001D65E': 'i',
u'\U0001D692': 'i',
u'\U0001D6A4': 'i',
u'\U0001D6CA': 'i',
u'\U0001D704': 'i',
u'\U0001D73E': 'i',
u'\U0001D778': 'i',
u'\U0001D7B2': 'i',
u'\U000118C3': 'i',
u'\u02DB': 'i',
u'\u2373': 'i',
u'\uFF49': 'i',
u'\u2170': 'i',
u'\u2139': 'i',
u'\u2148': 'i',
u'\u0131': 'i',
u'\u026A': 'i',
u'\u0269': 'i',
u'\u03B9': 'i',
u'\u1FBE': 'i',
u'\u037A': 'i',
u'\u0456': 'i',
u'\uA647': 'i',
u'\u04CF': 'i',
u'\uAB75': 'i',
u'\u13A5': 'i',
u'\u24DB': 'i',
u'\u2378': 'i',
u'\u01D0': 'i',
u'\u01CF': 'i',
u'\u0268': 'i',
u'\u1D7B': 'i',
u'\u1D7C': 'i',
u'\u2171': 'ii',
u'\u2172': 'iii',
u'\u0133': 'ij',
u'\u2173': 'iv',
u'\u2178': 'ix',
u'\U0001D423': 'j',
u'\U0001D457': 'j',
u'\U0001D48B': 'j',
u'\U0001D4BF': 'j',
u'\U0001D4F3': 'j',
u'\U0001D527': 'j',
u'\U0001D55B': 'j',
u'\U0001D58F': 'j',
u'\U0001D5C3': 'j',
u'\U0001D5F7': 'j',
u'\U0001D62B': 'j',
u'\U0001D65F': 'j',
u'\U0001D693': 'j',
u'\U0001D409': 'j',
u'\U0001D43D': 'j',
u'\U0001D471': 'j',
u'\U0001D4A5': 'j',
u'\U0001D4D9': 'j',
u'\U0001D50D': 'j',
u'\U0001D541': 'j',
u'\U0001D575': 'j',
u'\U0001D5A9': 'j',
u'\U0001D5DD': 'j',
u'\U0001D611': 'j',
u'\U0001D645': 'j',
u'\U0001D679': 'j',
u'\U0001D6A5': 'j',
u'\uFF4A': 'j',
u'\u2149': 'j',
u'\u03F3': 'j',
u'\u0458': 'j',
u'\uFF2A': 'j',
u'\uA7B2': 'j',
u'\u037F': 'j',
u'\u0408': 'j',
u'\u13AB': 'j',
u'\u148D': 'j',
u'\uA4D9': 'j',
u'\u0249': 'j',
u'\u0248': 'j',
u'\u1499': 'j',
u'\u0575': 'j',
u'\uAB7B': 'j',
u'\u1D0A': 'j',
u'\U0001D424': 'k',
u'\U0001D458': 'k',
u'\U0001D48C': 'k',
u'\U0001D4C0': 'k',
u'\U0001D4F4': 'k',
u'\U0001D528': 'k',
u'\U0001D55C': 'k',
u'\U0001D590': 'k',
u'\U0001D5C4': 'k',
u'\U0001D5F8': 'k',
u'\U0001D62C': 'k',
u'\U0001D660': 'k',
u'\U0001D694': 'k',
u'\U0001D40A': 'k',
u'\U0001D43E': 'k',
u'\U0001D472': 'k',
u'\U0001D4A6': 'k',
u'\U0001D4DA': 'k',
u'\U0001D50E': 'k',
u'\U0001D542': 'k',
u'\U0001D576': 'k',
u'\U0001D5AA': 'k',
u'\U0001D5DE': 'k',
u'\U0001D612': 'k',
u'\U0001D646': 'k',
u'\U0001D67A': 'k',
u'\U0001D6B1': 'k',
u'\U0001D6EB': 'k',
u'\U0001D725': 'k',
u'\U0001D75F': 'k',
u'\U0001D799': 'k',
u'\U0001D6CB': 'k',
u'\U0001D6DE': 'k',
u'\U0001D705': 'k',
u'\U0001D718': 'k',
u'\U0001D73F': 'k',
u'\U0001D752': 'k',
u'\U0001D779': 'k',
u'\U0001D78C': 'k',
u'\U0001D7B3': 'k',
u'\U0001D7C6': 'k',
u'\u212A': 'k',
u'\uFF2B': 'k',
u'\u039A': 'k',
u'\u2C94': 'k',
u'\u041A': 'k',
u'\u13E6': 'k',
u'\u16D5': 'k',
u'\uA4D7': 'k',
u'\u0199': 'k',
u'\u2C69': 'k',
u'\u049A': 'k',
u'\u20AD': 'k',
u'\uA740': 'k',
u'\u049E': 'k',
u'\u0198': 'k',
u'\u1D0B': 'k',
u'\u0138': 'k',
u'\u03BA': 'k',
u'\u03F0': 'k',
u'\u2C95': 'k',
u'\u043A': 'k',
u'\uABB6': 'k',
u'\u049B': 'k',
u'\u049F': 'k',
u'\U00010320': 'l',
u'\U0001E8C7': 'l',
u'\U0001D7CF': 'l',
u'\U0001D7D9': 'l',
u'\U0001D7E3': 'l',
u'\U0001D7ED': 'l',
u'\U0001D7F7': 'l',
u'\U0001D408': 'l',
u'\U0001D43C': 'l',
u'\U0001D470': 'l',
u'\U0001D4D8': 'l',
u'\U0001D540': 'l',
u'\U0001D574': 'l',
u'\U0001D5A8': 'l',
u'\U0001D5DC': 'l',
u'\U0001D610': 'l',
u'\U0001D644': 'l',
u'\U0001D678': 'l',
u'\U0001D425': 'l',
u'\U0001D459': 'l',
u'\U0001D48D': 'l',
u'\U0001D4C1': 'l',
u'\U0001D4F5': 'l',
u'\U0001D529': 'l',
u'\U0001D55D': 'l',
u'\U0001D591': 'l',
u'\U0001D5C5': 'l',
u'\U0001D5F9': 'l',
u'\U0001D62D': 'l',
u'\U0001D661': 'l',
u'\U0001D695': 'l',
u'\U0001D6B0': 'l',
u'\U0001D6EA': 'l',
u'\U0001D724': 'l',
u'\U0001D75E': 'l',
u'\U0001D798': 'l',
u'\U0001EE00': 'l',
u'\U0001EE80': 'l',
u'\U00016F28': 'l',
u'\U0001028A': 'l',
u'\U00010309': 'l',
u'\U0001D22A': 'l',
u'\U0001D40B': 'l',
u'\U0001D43F': 'l',
u'\U0001D473': 'l',
u'\U0001D4DB': 'l',
u'\U0001D50F': 'l',
u'\U0001D543': 'l',
u'\U0001D577': 'l',
u'\U0001D5AB': 'l',
u'\U0001D5DF': 'l',
u'\U0001D613': 'l',
u'\U0001D647': 'l',
u'\U0001D67B': 'l',
u'\U00016F16': 'l',
u'\U000118A3': 'l',
u'\U000118B2': 'l',
u'\U0001041B': 'l',
u'\U00010526': 'l',
u'\U00010443': 'l',
u'\u05C0': 'l',
u'\u007C': 'l',
u'\u2223': 'l',
u'\u23FD': 'l',
u'\uFFE8': 'l',
u'\u0031': 'l',
u'\u0661': 'l',
u'\u06F1': 'l',
u'\u0049': 'l',
u'\uFF29': 'l',
u'\u2160': 'l',
u'\u2110': 'l',
u'\u2111': 'l',
u'\u0196': 'l',
u'\uFF4C': 'l',
u'\u217C': 'l',
u'\u2113': 'l',
u'\u01C0': 'l',
u'\u0399': 'l',
u'\u2C92': 'l',
u'\u0406': 'l',
u'\u04C0': 'l',
u'\u05D5': 'l',
u'\u05DF': 'l',
u'\u0627': 'l',
u'\uFE8E': 'l',
u'\uFE8D': 'l',
u'\u07CA': 'l',
u'\u2D4F': 'l',
u'\u16C1': 'l',
u'\uA4F2': 'l',
u'\u216C': 'l',
u'\u2112': 'l',
u'\u2CD0': 'l',
u'\u13DE': 'l',
u'\u14AA': 'l',
u'\uA4E1': 'l',
u'\uFD3C': 'l',
u'\uFD3D': 'l',
u'\u0142': 'l',
u'\u0141': 'l',
u'\u026D': 'l',
u'\u0197': 'l',
u'\u019A': 'l',
u'\u026B': 'l',
u'\u0625': 'l',
u'\uFE88': 'l',
u'\uFE87': 'l',
u'\u0673': 'l',
u'\u0140': 'l',
u'\u013F': 'l',
u'\u14B7': 'l',
u'\u0623': 'l',
u'\uFE84': 'l',
u'\uFE83': 'l',
u'\u0672': 'l',
u'\u0675': 'l',
u'\u2CD1': 'l',
u'\uABAE': 'l',
u'\U0001F102': 'l.',
u'\u2488': 'l.',
u'\u01C9': 'lj',
u'\u0132': 'lj',
u'\u01C8': 'lj',
u'\u01C7': 'lj',
u'\u2016': 'll',
u'\u2225': 'll',
u'\u2161': 'll',
u'\u01C1': 'll',
u'\u05F0': 'll',
u'\u2162': 'lll',
u'\u02AA': 'ls',
u'\u20B6': 'lt',
u'\u2163': 'lv',
u'\u2168': 'lx',
u'\u02AB': 'lz',
u'\U0001D40C': 'm',
u'\U0001D440': 'm',
u'\U0001D474': 'm',
u'\U0001D4DC': 'm',
u'\U0001D510': 'm',
u'\U0001D544': 'm',
u'\U0001D578': 'm',
u'\U0001D5AC': 'm',
u'\U0001D5E0': 'm',
u'\U0001D614': 'm',
u'\U0001D648': 'm',
u'\U0001D67C': 'm',
u'\U0001D6B3': 'm',
u'\U0001D6ED': 'm',
u'\U0001D727': 'm',
u'\U0001D761': 'm',
u'\U0001D79B': 'm',
u'\U000102B0': 'm',
u'\U00010311': 'm',
u'\uFF2D': 'm',
u'\u216F': 'm',
u'\u2133': 'm',
u'\u039C': 'm',
u'\u03FA': 'm',
u'\u2C98': 'm',
u'\u041C': 'm',
u'\u13B7': 'm',
u'\u15F0': 'm',
u'\u16D6': 'm',
u'\uA4DF': 'm',
u'\u04CD': 'm',
u'\u2DE8': 'm',
u'\u1DDF': 'm',
u'\u1E43': 'm',
u'\U0001F76B': 'mb',
u'\U0001D427': 'n',
u'\U0001D45B': 'n',
u'\U0001D48F': 'n',
u'\U0001D4C3': 'n',
u'\U0001D4F7': 'n',
u'\U0001D52B': 'n',
u'\U0001D55F': 'n',
u'\U0001D593': 'n',
u'\U0001D5C7': 'n',
u'\U0001D5FB': 'n',
u'\U0001D62F': 'n',
u'\U0001D663': 'n',
u'\U0001D697': 'n',
u'\U0001D40D': 'n',
u'\U0001D441': 'n',
u'\U0001D475': 'n',
u'\U0001D4A9': 'n',
u'\U0001D4DD': 'n',
u'\U0001D511': 'n',
u'\U0001D579': 'n',
u'\U0001D5AD': 'n',
u'\U0001D5E1': 'n',
u'\U0001D615': 'n',
u'\U0001D649': 'n',
u'\U0001D67D': 'n',
u'\U0001D6B4': 'n',
u'\U0001D6EE': 'n',
u'\U0001D728': 'n',
u'\U0001D762': 'n',
u'\U0001D79C': 'n',
u'\U00010513': 'n',
u'\U0001018E': 'n',
u'\U0001D6C8': 'n',
u'\U0001D702': 'n',
u'\U0001D73C': 'n',
u'\U0001D776': 'n',
u'\U0001D7B0': 'n',
u'\U0001044D': 'n',
u'\u0578': 'n',
u'\u057C': 'n',
u'\uFF2E': 'n',
u'\u2115': 'n',
u'\u039D': 'n',
u'\u2C9A': 'n',
u'\uA4E0': 'n',
u'\u0273': 'n',
u'\u019E': 'n',
u'\u03B7': 'n',
u'\u019D': 'n',
u'\u1D70': 'n',
u'\u0146': 'n',
u'\u0272': 'n',
u'\u01CC': 'nj',
u'\u01CB': 'nj',
u'\u01CA': 'nj',
u'\u2116': 'no',
u'\U0001D428': 'o',
u'\U0001D45C': 'o',
u'\U0001D490': 'o',
u'\U0001D4F8': 'o',
u'\U0001D52C': 'o',
u'\U0001D560': 'o',
u'\U0001D594': 'o',
u'\U0001D5C8': 'o',
u'\U0001D5FC': 'o',
u'\U0001D630': 'o',
u'\U0001D664': 'o',
u'\U0001D698': 'o',
u'\U0001D6D0': 'o',
u'\U0001D70A': 'o',
u'\U0001D744': 'o',
u'\U0001D77E': 'o',
u'\U0001D7B8': 'o',
u'\U0001D6D4': 'o',
u'\U0001D70E': 'o',
u'\U0001D748': 'o',
u'\U0001D782': 'o',
u'\U0001D7BC': 'o',
u'\U0001EE24': 'o',
u'\U0001EE64': 'o',
u'\U0001EE84': 'o',
u'\U000104EA': 'o',
u'\U000118C8': 'o',
u'\U000118D7': 'o',
u'\U0001042C': 'o',
u'\U000114D0': 'o',
u'\U000118E0': 'o',
u'\U0001D7CE': 'o',
u'\U0001D7D8': 'o',
u'\U0001D7E2': 'o',
u'\U0001D7EC': 'o',
u'\U0001D7F6': 'o',
u'\U0001D40E': 'o',
u'\U0001D442': 'o',
u'\U0001D476': 'o',
u'\U0001D4AA': 'o',
u'\U0001D4DE': 'o',
u'\U0001D512': 'o',
u'\U0001D546': 'o',
u'\U0001D57A': 'o',
u'\U0001D5AE': 'o',
u'\U0001D5E2': 'o',
u'\U0001D616': 'o',
u'\U0001D64A': 'o',
u'\U0001D67E': 'o',
u'\U0001D6B6': 'o',
u'\U0001D6F0': 'o',
u'\U0001D72A': 'o',
u'\U0001D764': 'o',
u'\U0001D79E': 'o',
u'\U000104C2': 'o',
u'\U000118B5': 'o',
u'\U00010292': 'o',
u'\U000102AB': 'o',
u'\U00010404': 'o',
u'\U00010516': 'o',
u'\U0001D21A': 'o',
u'\U0001F714': 'o',
u'\U0001D6C9': 'o',
u'\U0001D6DD': 'o',
u'\U0001D703': 'o',
u'\U0001D717': 'o',
u'\U0001D73D': 'o',
u'\U0001D751': 'o',
u'\U0001D777': 'o',
u'\U0001D78B': 'o',
u'\U0001D7B1': 'o',
u'\U0001D7C5': 'o',
u'\U0001D6AF': 'o',
u'\U0001D6B9': 'o',
u'\U0001D6E9': 'o',
u'\U0001D6F3': 'o',
u'\U0001D723': 'o',
u'\U0001D72D': 'o',
u'\U0001D75D': 'o',
u'\U0001D767': 'o',
u'\U0001D797': 'o',
u'\U0001D7A1': 'o',
u'\U0001F101': 'o',
u'\U0001F100': 'o',
u'\u0C02': 'o',
u'\u0C82': 'o',
u'\u0D02': 'o',
u'\u0D82': 'o',
u'\u0966': 'o',
u'\u0A66': 'o',
u'\u0AE6': 'o',
u'\u0BE6': 'o',
u'\u0C66': 'o',
u'\u0CE6': 'o',
u'\u0D66': 'o',
u'\u0E50': 'o',
u'\u0ED0': 'o',
u'\u1040': 'o',
u'\u0665': 'o',
u'\u06F5': 'o',
u'\uFF4F': 'o',
u'\u2134': 'o',
u'\u1D0F': 'o',
u'\u1D11': 'o',
u'\uAB3D': 'o',
u'\u03BF': 'o',
u'\u03C3': 'o',
u'\u2C9F': 'o',
u'\u043E': 'o',
u'\u10FF': 'o',
u'\u0585': 'o',
u'\u05E1': 'o',
u'\u0647': 'o',
u'\uFEEB': 'o',
u'\uFEEC': 'o',
u'\uFEEA': 'o',
u'\uFEE9': 'o',
u'\u06BE': 'o',
u'\uFBAC': 'o',
u'\uFBAD': 'o',
u'\uFBAB': 'o',
u'\uFBAA': 'o',
u'\u06C1': 'o',
u'\uFBA8': 'o',
u'\uFBA9': 'o',
u'\uFBA7': 'o',
u'\uFBA6': 'o',
u'\u06D5': 'o',
u'\u0D20': 'o',
u'\u101D': 'o',
u'\u07C0': 'o',
u'\u09E6': 'o',
u'\u0B66': 'o',
u'\u3007': 'o',
u'\uFF2F': 'o',
u'\u039F': 'o',
u'\u2C9E': 'o',
u'\u041E': 'o',
u'\u0555': 'o',
u'\u2D54': 'o',
u'\u12D0': 'o',
u'\u0B20': 'o',
u'\uA4F3': 'o',
u'\u2070': 'o',
u'\u00BA': 'o',
u'\u1D52': 'o',
u'\u01D2': 'o',
u'\u014F': 'o',
u'\u01D1': 'o',
u'\u014E': 'o',
u'\u06FF': 'o',
u'\u00F8': 'o',
u'\uAB3E': 'o',
u'\u00D8': 'o',
u'\u2D41': 'o',
u'\u01FE': 'o',
u'\u0275': 'o',
u'\uA74B': 'o',
u'\u04E9': 'o',
u'\u0473': 'o',
u'\uAB8E': 'o',
u'\uABBB': 'o',
u'\u2296': 'o',
u'\u229D': 'o',
u'\u236C': 'o',
u'\u019F': 'o',
u'\uA74A': 'o',
u'\u03B8': 'o',
u'\u03D1': 'o',
u'\u0398': 'o',
u'\u03F4': 'o',
u'\u04E8': 'o',
u'\u0472': 'o',
u'\u2D31': 'o',
u'\u13BE': 'o',
u'\u13EB': 'o',
u'\uAB74': 'o',
u'\uFCD9': 'o',
u'\u01A1': 'o',
u'\u01A0': 'o',
u'\u13A4': 'o',
u'\U0001F101': 'o.',
u'\U0001F100': 'o.',
u'\u0153': 'oe',
u'\u0152': 'oe',
u'\u0276': 'oe',
u'\u221E': 'oo',
u'\uA74F': 'oo',
u'\uA699': 'oo',
u'\uA74E': 'oo',
u'\uA698': 'oo',
u'\u1010': 'oo',
u'\U0001D429': 'p',
u'\U0001D45D': 'p',
u'\U0001D491': 'p',
u'\U0001D4C5': 'p',
u'\U0001D4F9': 'p',
u'\U0001D52D': 'p',
u'\U0001D561': 'p',
u'\U0001D595': 'p',
u'\U0001D5C9': 'p',
u'\U0001D5FD': 'p',
u'\U0001D631': 'p',
u'\U0001D665': 'p',
u'\U0001D699': 'p',
u'\U0001D6D2': 'p',
u'\U0001D6E0': 'p',
u'\U0001D70C': 'p',
u'\U0001D71A': 'p',
u'\U0001D746': 'p',
u'\U0001D754': 'p',
u'\U0001D780': 'p',
u'\U0001D78E': 'p',
u'\U0001D7BA': 'p',
u'\U0001D7C8': 'p',
u'\U0001D40F': 'p',
u'\U0001D443': 'p',
u'\U0001D477': 'p',
u'\U0001D4AB': 'p',
u'\U0001D4DF': 'p',
u'\U0001D513': 'p',
u'\U0001D57B': 'p',
u'\U0001D5AF': 'p',
u'\U0001D5E3': 'p',
u'\U0001D617': 'p',
u'\U0001D64B': 'p',
u'\U0001D67F': 'p',
u'\U0001D6B8': 'p',
u'\U0001D6F2': 'p',
u'\U0001D72C': 'p',
u'\U0001D766': 'p',
u'\U0001D7A0': 'p',
u'\U00010295': 'p',
u'\u2374': 'p',
u'\uFF50': 'p',
u'\u03C1': 'p',
u'\u03F1': 'p',
u'\u2CA3': 'p',
u'\u0440': 'p',
u'\uFF30': 'p',
u'\u2119': 'p',
u'\u03A1': 'p',
u'\u2CA2': 'p',
u'\u0420': 'p',
u'\u13E2': 'p',
u'\u146D': 'p',
u'\uA4D1': 'p',
u'\u01A5': 'p',
u'\u1D7D': 'p',
u'\u1477': 'p',
u'\u1486': 'p',
u'\u1D29': 'p',
u'\uABB2': 'p',
u'\U0001D42A': 'q',
u'\U0001D45E': 'q',
u'\U0001D492': 'q',
u'\U0001D4C6': 'q',
u'\U0001D4FA': 'q',
u'\U0001D52E': 'q',
u'\U0001D562': 'q',
u'\U0001D596': 'q',
u'\U0001D5CA': 'q',
u'\U0001D5FE': 'q',
u'\U0001D632': 'q',
u'\U0001D666': 'q',
u'\U0001D69A': 'q',
u'\U0001D410': 'q',
u'\U0001D444': 'q',
u'\U0001D478': 'q',
u'\U0001D4AC': 'q',
u'\U0001D4E0': 'q',
u'\U0001D514': 'q',
u'\U0001D57C': 'q',
u'\U0001D5B0': 'q',
u'\U0001D5E4': 'q',
u'\U0001D618': 'q',
u'\U0001D64C': 'q',
u'\U0001D680': 'q',
u'\u051B': 'q',
u'\u0563': 'q',
u'\u0566': 'q',
u'\u211A': 'q',
u'\u2D55': 'q',
u'\u02A0': 'q',
u'\u1D90': 'q',
u'\u024B': 'q',
u'\U0001D42B': 'r',
u'\U0001D45F': 'r',
u'\U0001D493': 'r',
u'\U0001D4C7': 'r',
u'\U0001D4FB': 'r',
u'\U0001D52F': 'r',
u'\U0001D563': 'r',
u'\U0001D597': 'r',
u'\U0001D5CB': 'r',
u'\U0001D5FF': 'r',
u'\U0001D633': 'r',
u'\U0001D667': 'r',
u'\U0001D69B': 'r',
u'\U0001D216': 'r',
u'\U0001D411': 'r',
u'\U0001D445': 'r',
u'\U0001D479': 'r',
u'\U0001D4E1': 'r',
u'\U0001D57D': 'r',
u'\U0001D5B1': 'r',
u'\U0001D5E5': 'r',
u'\U0001D619': 'r',
u'\U0001D64D': 'r',
u'\U0001D681': 'r',
u'\U000104B4': 'r',
u'\uAB47': 'r',
u'\uAB48': 'r',
u'\u1D26': 'r',
u'\u2C85': 'r',
u'\u0433': 'r',
u'\uAB81': 'r',
u'\u211B': 'r',
u'\u211C': 'r',
u'\u211D': 'r',
u'\u01A6': 'r',
u'\u13A1': 'r',
u'\u13D2': 'r',
u'\u1587': 'r',
u'\uA4E3': 'r',
u'\u027D': 'r',
u'\u027C': 'r',
u'\u024D': 'r',
u'\u0493': 'r',
u'\u1D72': 'r',
u'\u0491': 'r',
u'\uAB71': 'r',
u'\u0280': 'r',
u'\uABA2': 'r',
u'\u1D73': 'r',
u'\U000118E3': 'rn',
u'\U0001D426': 'rn',
u'\U0001D45A': 'rn',
u'\U0001D48E': 'rn',
u'\U0001D4C2': 'rn',
u'\U0001D4F6': 'rn',
u'\U0001D52A': 'rn',
u'\U0001D55E': 'rn',
u'\U0001D592': 'rn',
u'\U0001D5C6': 'rn',
u'\U0001D5FA': 'rn',
u'\U0001D62E': 'rn',
u'\U0001D662': 'rn',
u'\U0001D696': 'rn',
u'\U00011700': 'rn',
u'\u217F': 'rn',
u'\u20A5': 'rn',
u'\u0271': 'rn',
u'\u1D6F': 'rn',
u'\U0001D42C': 's',
u'\U0001D460': 's',
u'\U0001D494': 's',
u'\U0001D4C8': 's',
u'\U0001D4FC': 's',
u'\U0001D530': 's',
u'\U0001D564': 's',
u'\U0001D598': 's',
u'\U0001D5CC': 's',
u'\U0001D600': 's',
u'\U0001D634': 's',
u'\U0001D668': 's',
u'\U0001D69C': 's',
u'\U000118C1': 's',
u'\U00010448': 's',
u'\U0001D412': 's',
u'\U0001D446': 's',
u'\U0001D47A': 's',
u'\U0001D4AE': 's',
u'\U0001D4E2': 's',
u'\U0001D516': 's',
u'\U0001D54A': 's',
u'\U0001D57E': 's',
u'\U0001D5B2': 's',
u'\U0001D5E6': 's',
u'\U0001D61A': 's',
u'\U0001D64E': 's',
u'\U0001D682': 's',
u'\U00016F3A': 's',
u'\U00010296': 's',
u'\U00010420': 's',
u'\uFF53': 's',
u'\uA731': 's',
u'\u01BD': 's',
u'\u0455': 's',
u'\uABAA': 's',
u'\uFF33': 's',
u'\u0405': 's',
u'\u054F': 's',
u'\u13D5': 's',
u'\u13DA': 's',
u'\uA4E2': 's',
u'\u0282': 's',
u'\u1D74': 's',
u'\U0001F75C': 'sss',
u'\uFB06': 'st',
u'\U0001D42D': 't',
u'\U0001D461': 't',
u'\U0001D495': 't',
u'\U0001D4C9': 't',
u'\U0001D4FD': 't',
u'\U0001D531': 't',
u'\U0001D565': 't',
u'\U0001D599': 't',
u'\U0001D5CD': 't',
u'\U0001D601': 't',
u'\U0001D635': 't',
u'\U0001D669': 't',
u'\U0001D69D': 't',
u'\U0001F768': 't',
u'\U0001D413': 't',
u'\U0001D447': 't',
u'\U0001D47B': 't',
u'\U0001D4AF': 't',
u'\U0001D4E3': 't',
u'\U0001D517': 't',
u'\U0001D54B': 't',
u'\U0001D57F': 't',
u'\U0001D5B3': 't',
u'\U0001D5E7': 't',
u'\U0001D61B': 't',
u'\U0001D64F': 't',
u'\U0001D683': 't',
u'\U0001D6BB': 't',
u'\U0001D6F5': 't',
u'\U0001D72F': 't',
u'\U0001D769': 't',
u'\U0001D7A3': 't',
u'\U00016F0A': 't',
u'\U000118BC': 't',
u'\U00010297': 't',
u'\U000102B1': 't',
u'\U00010315': 't',
u'\U0001D6D5': 't',
u'\U0001D70F': 't',
u'\U0001D749': 't',
u'\U0001D783': 't',
u'\U0001D7BD': 't',
u'\u22A4': 't',
u'\u27D9': 't',
u'\uFF34': 't',
u'\u03A4': 't',
u'\u2CA6': 't',
u'\u0422': 't',
u'\u13A2': 't',
u'\uA4D4': 't',
u'\u2361': 't',
u'\u023E': 't',
u'\u021A': 't',
u'\u0162': 't',
u'\u01AE': 't',
u'\u04AC': 't',
u'\u20AE': 't',
u'\u0167': 't',
u'\u0166': 't',
u'\u1D75': 't',
u'\U0001D42E': 'u',
u'\U0001D462': 'u',
u'\U0001D496': 'u',
u'\U0001D4CA': 'u',
u'\U0001D4FE': 'u',
u'\U0001D532': 'u',
u'\U0001D566': 'u',
u'\U0001D59A': 'u',
u'\U0001D5CE': 'u',
u'\U0001D602': 'u',
u'\U0001D636': 'u',
u'\U0001D66A': 'u',
u'\U0001D69E': 'u',
u'\U0001D6D6': 'u',
u'\U0001D710': 'u',
u'\U0001D74A': 'u',
u'\U0001D784': 'u',
u'\U0001D7BE': 'u',
u'\U000104F6': 'u',
u'\U000118D8': 'u',
u'\U0001D414': 'u',
u'\U0001D448': 'u',
u'\U0001D47C': 'u',
u'\U0001D4B0': 'u',
u'\U0001D4E4': 'u',
u'\U0001D518': 'u',
u'\U0001D54C': 'u',
u'\U0001D580': 'u',
u'\U0001D5B4': 'u',
u'\U0001D5E8': 'u',
u'\U0001D61C': 'u',
u'\U0001D650': 'u',
u'\U0001D684': 'u',
u'\U000104CE': 'u',
u'\U00016F42': 'u',
u'\U000118B8': 'u',
u'\uA79F': 'u',
u'\u1D1C': 'u',
u'\uAB4E': 'u',
u'\uAB52': 'u',
u'\u028B': 'u',
u'\u03C5': 'u',
u'\u057D': 'u',
u'\u222A': 'u',
u'\u22C3': 'u',
u'\u054D': 'u',
u'\u1200': 'u',
u'\u144C': 'u',
u'\uA4F4': 'u',
u'\u01D4': 'u',
u'\u01D3': 'u',
u'\u1D7E': 'u',
u'\uAB9C': 'u',
u'\u0244': 'u',
u'\u13CC': 'u',
u'\u1458': 'u',
u'\u1467': 'u',
u'\u2127': 'u',
u'\u162E': 'u',
u'\u1634': 'u',
u'\u01B1': 'u',
u'\u1D7F': 'u',
u'\u1D6B': 'ue',
u'\uAB63': 'uo',
u'\U0001D42F': 'v',
u'\U0001D463': 'v',
u'\U0001D497': 'v',
u'\U0001D4CB': 'v',
u'\U0001D4FF': 'v',
u'\U0001D533': 'v',
u'\U0001D567': 'v',
u'\U0001D59B': 'v',
u'\U0001D5CF': 'v',
u'\U0001D603': 'v',
u'\U0001D637': 'v',
u'\U0001D66B': 'v',
u'\U0001D69F': 'v',
u'\U0001D6CE': 'v',
u'\U0001D708': 'v',
u'\U0001D742': 'v',
u'\U0001D77C': 'v',
u'\U0001D7B6': 'v',
u'\U00011706': 'v',
u'\U000118C0': 'v',
u'\U0001D20D': 'v',
u'\U0001D415': 'v',
u'\U0001D449': 'v',
u'\U0001D47D': 'v',
u'\U0001D4B1': 'v',
u'\U0001D4E5': 'v',
u'\U0001D519': 'v',
u'\U0001D54D': 'v',
u'\U0001D581': 'v',
u'\U0001D5B5': 'v',
u'\U0001D5E9': 'v',
u'\U0001D61D': 'v',
u'\U0001D651': 'v',
u'\U0001D685': 'v',
u'\U00016F08': 'v',
u'\U000118A0': 'v',
u'\U0001051D': 'v',
u'\U00010197': 'v',
u'\U0001F708': 'v',
u'\u2228': 'v',
u'\u22C1': 'v',
u'\uFF56': 'v',
u'\u2174': 'v',
u'\u1D20': 'v',
u'\u03BD': 'v',
u'\u0475': 'v',
u'\u05D8': 'v',
u'\uABA9': 'v',
u'\u0667': 'v',
u'\u06F7': 'v',
u'\u2164': 'v',
u'\u0474': 'v',
u'\u2D38': 'v',
u'\u13D9': 'v',
u'\u142F': 'v',
u'\uA6DF': 'v',
u'\uA4E6': 'v',
u'\u143B': 'v',
u'\U0001F76C': 'vb',
u'\u2175': 'vi',
u'\u2176': 'vii',
u'\u2177': 'viii',
u'\u2165': 'vl',
u'\u2166': 'vll',
u'\u2167': 'vlll',
u'\U0001D430': 'w',
u'\U0001D464': 'w',
u'\U0001D498': 'w',
u'\U0001D4CC': 'w',
u'\U0001D500': 'w',
u'\U0001D534': 'w',
u'\U0001D568': 'w',
u'\U0001D59C': 'w',
u'\U0001D5D0': 'w',
u'\U0001D604': 'w',
u'\U0001D638': 'w',
u'\U0001D66C': 'w',
u'\U0001D6A0': 'w',
u'\U0001170A': 'w',
u'\U0001170E': 'w',
u'\U0001170F': 'w',
u'\U000118EF': 'w',
u'\U000118E6': 'w',
u'\U0001D416': 'w',
u'\U0001D44A': 'w',
u'\U0001D47E': 'w',
u'\U0001D4B2': 'w',
u'\U0001D4E6': 'w',
u'\U0001D51A': 'w',
u'\U0001D54E': 'w',
u'\U0001D582': 'w',
u'\U0001D5B6': 'w',
u'\U0001D5EA': 'w',
u'\U0001D61E': 'w',
u'\U0001D652': 'w',
u'\U0001D686': 'w',
u'\U000114C5': 'w',
u'\u026F': 'w',
u'\u1D21': 'w',
u'\u0461': 'w',
u'\u051D': 'w',
u'\u0561': 'w',
u'\uAB83': 'w',
u'\u051C': 'w',
u'\u13B3': 'w',
u'\u13D4': 'w',
u'\uA4EA': 'w',
u'\u047D': 'w',
u'\u20A9': 'w',
u'\uA761': 'w',
u'\U0001D431': 'x',
u'\U0001D465': 'x',
u'\U0001D499': 'x',
u'\U0001D4CD': 'x',
u'\U0001D501': 'x',
u'\U0001D535': 'x',
u'\U0001D569': 'x',
u'\U0001D59D': 'x',
u'\U0001D5D1': 'x',
u'\U0001D605': 'x',
u'\U0001D639': 'x',
u'\U0001D66D': 'x',
u'\U0001D6A1': 'x',
u'\U00010322': 'x',
u'\U000118EC': 'x',
u'\U0001D417': 'x',
u'\U0001D44B': 'x',
u'\U0001D47F': 'x',
u'\U0001D4B3': 'x',
u'\U0001D4E7': 'x',
u'\U0001D51B': 'x',
u'\U0001D54F': 'x',
u'\U0001D583': 'x',
u'\U0001D5B7': 'x',
u'\U0001D5EB': 'x',
u'\U0001D61F': 'x',
u'\U0001D653': 'x',
u'\U0001D687': 'x',
u'\U0001D6BE': 'x',
u'\U0001D6F8': 'x',
u'\U0001D732': 'x',
u'\U0001D76C': 'x',
u'\U0001D7A6': 'x',
u'\U00010290': 'x',
u'\U000102B4': 'x',
u'\U00010317': 'x',
u'\U00010527': 'x',
u'\U00010196': 'x',
u'\u166E': 'x',
u'\u00D7': 'x',
u'\u292B': 'x',
u'\u292C': 'x',
u'\u2A2F': 'x',
u'\uFF58': 'x',
u'\u2179': 'x',
u'\u0445': 'x',
u'\u1541': 'x',
u'\u157D': 'x',
u'\u2DEF': 'x',
u'\u036F': 'x',
u'\u166D': 'x',
u'\u2573': 'x',
u'\uFF38': 'x',
u'\u2169': 'x',
u'\uA7B3': 'x',
u'\u03A7': 'x',
u'\u2CAC': 'x',
u'\u0425': 'x',
u'\u2D5D': 'x',
u'\u16B7': 'x',
u'\uA4EB': 'x',
u'\u2A30': 'x',
u'\u04B2': 'x',
u'\u217A': 'xi',
u'\u217B': 'xii',
u'\u216A': 'xl',
u'\u216B': 'xll',
u'\U0001D432': 'y',
u'\U0001D466': 'y',
u'\U0001D49A': 'y',
u'\U0001D4CE': 'y',
u'\U0001D502': 'y',
u'\U0001D536': 'y',
u'\U0001D56A': 'y',
u'\U0001D59E': 'y',
u'\U0001D5D2': 'y',
u'\U0001D606': 'y',
u'\U0001D63A': 'y',
u'\U0001D66E': 'y',
u'\U0001D6A2': 'y',
u'\U0001D6C4': 'y',
u'\U0001D6FE': 'y',
u'\U0001D738': 'y',
u'\U0001D772': 'y',
u'\U0001D7AC': 'y',
u'\U000118DC': 'y',
u'\U0001D418': 'y',
u'\U0001D44C': 'y',
u'\U0001D480': 'y',
u'\U0001D4B4': 'y',
u'\U0001D4E8': 'y',
u'\U0001D51C': 'y',
u'\U0001D550': 'y',
u'\U0001D584': 'y',
u'\U0001D5B8': 'y',
u'\U0001D5EC': 'y',
u'\U0001D620': 'y',
u'\U0001D654': 'y',
u'\U0001D688': 'y',
u'\U0001D6BC': 'y',
u'\U0001D6F6': 'y',
u'\U0001D730': 'y',
u'\U0001D76A': 'y',
u'\U0001D7A4': 'y',
u'\U00016F43': 'y',
u'\U000118A4': 'y',
u'\U000102B2': 'y',
u'\u0263': 'y',
u'\u1D8C': 'y',
u'\uFF59': 'y',
u'\u028F': 'y',
u'\u1EFF': 'y',
u'\uAB5A': 'y',
u'\u03B3': 'y',
u'\u213D': 'y',
u'\u0443': 'y',
u'\u04AF': 'y',
u'\u10E7': 'y',
u'\uFF39': 'y',
u'\u03A5': 'y',
u'\u03D2': 'y',
u'\u2CA8': 'y',
u'\u0423': 'y',
u'\u04AE': 'y',
u'\u13A9': 'y',
u'\u13BD': 'y',
u'\uA4EC': 'y',
u'\u01B4': 'y',
u'\u024F': 'y',
u'\u04B1': 'y',
u'\u00A5': 'y',
u'\u024E': 'y',
u'\u04B0': 'y',
u'\U0001D433': 'z',
u'\U0001D467': 'z',
u'\U0001D49B': 'z',
u'\U0001D4CF': 'z',
u'\U0001D503': 'z',
u'\U0001D537': 'z',
u'\U0001D56B': 'z',
u'\U0001D59F': 'z',
u'\U0001D5D3': 'z',
u'\U0001D607': 'z',
u'\U0001D63B': 'z',
u'\U0001D66F': 'z',
u'\U0001D6A3': 'z',
u'\U000118C4': 'z',
u'\U000102F5': 'z',
u'\U000118E5': 'z',
u'\U0001D419': 'z',
u'\U0001D44D': 'z',
u'\U0001D481': 'z',
u'\U0001D4B5': 'z',
u'\U0001D4E9': 'z',
u'\U0001D585': 'z',
u'\U0001D5B9': 'z',
u'\U0001D5ED': 'z',
u'\U0001D621': 'z',
u'\U0001D655': 'z',
u'\U0001D689': 'z',
u'\U0001D6AD': 'z',
u'\U0001D6E7': 'z',
u'\U0001D721': 'z',
u'\U0001D75B': 'z',
u'\U0001D795': 'z',
u'\U000118A9': 'z',
u'\u1D22': 'z',
u'\uAB93': 'z',
u'\uFF3A': 'z',
u'\u2124': 'z',
u'\u2128': 'z',
u'\u0396': 'z',
u'\u13C3': 'z',
u'\uA4DC': 'z',
u'\u0290': 'z',
u'\u01B6': 'z',
u'\u01B5': 'z',
u'\u0225': 'z',
u'\u0224': 'z',
u'\u1D76': 'z',
u'\u2010': '-',
u'\u2011': '-',
u'\u2012': '-',
u'\u2013': '-',
u'\uFE58': '-',
u'\u06D4': '-',
u'\u2043': '-',
u'\u02D7': '-',
u'\u2212': '-',
u'\u2796': '-',
u'\u2CBA': '-'
}
def unconfuse(domain):
if domain.startswith('xn--'):
domain = domain.encode('idna').decode('idna')
unconfused = ''
for i in range(len(domain)):
if domain[i] in confusables:
unconfused += confusables[domain[i]]
else:
unconfused += domain[i]
return unconfused
|
'''
All tests for the Osscie Package
'''
def test_1():
'''
Sample Test
'''
assert True
|
# built_ins.py
#
# https://www.educative.io/module/lesson/advanced-concepts-in-python/qZ4ZVVDN6kp
# map
"""
The map built-in also takes a function and an iterable and return an iterator that applies the function to each item in the iterable
"""
def doubler(x):
return x * 2
my_list = [1, 2, 3, 4, 5]
for item in map(doubler, my_list):
print(item)
|
config = dict(
# Project settings
project_name='MoA_feature_selection',
batch_size=1024,
num_workers=4,
# Validation
n_folds=5,
# Preprocessing
outlier_removal=True, # TODO
# General training
# Ensemble of models
ensemble=[
dict(
# Model
type='Target',
# Feature selection
n_features=20,
n_cutoff=20, # number of instances required, otherwise set prediction to 0
model=dict(
model='MoaDenseNet',
n_hidden_layer=2,
dropout=0.5,
hidden_dim=32,
activation='prelu', # prelu, relu
normalization='batch',
# Training
batch_size=512,
num_workers=4,
n_epochs=25,
optimizer='adam',
learning_rate=0.001,
weight_decay=0.00001,
scheduler='OneCycleLR',
use_smart_init=True,
use_smote=False,
use_amp=False,
verbose=True,
# Augmentation
augmentations=[],
)
),
],
# Ensembling
ensemble_method='mean', # TODO
# Postprocessing
surety=True, # TODO
)
|
class Par(object):
def __init__(self,tokens):
self.tokens=tokens
def parse(self):
self.count = 0
while self.count < len(self.tokens):
tokens_type = self.tokens[self.count][0]
tokens_value = self.tokens[self.count][1]
#print(tokens_type , tokens_value)
if tokens_type == 'VARIABLE' and tokens_value == 'var':
self.par_var_dec(self.tokens[self.count:len(self.tokens)])
ask = (self.assignmentOpp(self.tokens[self.count:len(self.tokens)]))
elif tokens_type == 'IDENTIFIRE' and tokens_value == 'showme':
self.showme(self.tokens[self.count:len(self.tokens)],ask)
elif tokens_type == 'INOUT' and tokens_value == '~':
self.add(self.tokens[self.count:len(self.tokens)],ask)
self.count += 1
def par_var_dec(self ,token_tip ):
token_chk = 0
for token in range(0,len(token_tip)+1):
tokens_type = token_tip[token_chk][0]
tokens_value = token_tip[token_chk][1]
il=[]
for i in range(1,len(token_tip)+1,4):
il.append(i)
ol=[]
for j in range(2,len(token_tip)+1,4):
ol.append(j)
vl=[]
for h in range(3,len(token_tip)+1,4):
vl.append(h)
cl=[]
for k in range(4,len(token_tip)+1,4):
cl.append(k)
if tokens_type == 'STATEMENT_END':
break
elif token in il:
if tokens_type == "IDENTIFIRE":
identifire = tokens_value
else :
print("Syntax ERROR : you must give a variable name after variable decliaration")
break
elif token in ol:#2 or token == 4 or token == 6 or token == 8:
if tokens_type == "OPARETOR":
tok =1
else :
print("Syntax ERROR :you miss the assignment operator after VARIABLE name")
break
elif token in vl:#== 3 or token == 5 or token == 7 or token == 9:
if tokens_type == 'STRING':
strg = tokens_value
elif tokens_type == 'INTEGER':
initalization =1
else :
print("Syntax ERROR : IT CAN BE A INTEGER, STRING OR IDEMTIFIRE I.E. variable")
break
elif token in cl:
if tokens_type == 'COMMA':
comma = tokens_value
elif tokens_type == "STATEMENT_END":
break
else :
print("Syntax ERROR : in this position you can used comma for multiple decliaration or u can used assignment operator for initalization")
break
token_chk = token_chk + 1
def showme(self,token_tip,toko):
if token_tip[1][0] == "INOUT" :
if token_tip[2][0] == "STRING":
print(token_tip[2][1])
elif token_tip[3][0]=="EMPTY":
print("Statement MISSING : you have to give '.' for debag")
else:
print("Syntax ERROR : you have to type string within parentisis or you have to give output operator")
token_chk = 1
for i in range(0 ,len(toko)):
tokens_id=toko[i][0]
tokens_val=toko[i][1]
if token_tip[1][0] == "INOUT" and token_tip[2][1] == tokens_id and token_tip[3][0] == 'STATEMENT_END':
print(tokens_val)
def assignmentOpp(self,token_tip):
toko = []
token_chk = 1
for i in range(0 ,len(toko)):
tokens_id=toko[i][0]
tokens_val=toko[i][1]
for token in range(0,len(token_tip)):
if token_tip[token][1]== '=':
#token_tip[token-1][1] = token_tip[token+1][1]
toko.append([token_tip[token-1][1],token_tip[token +1][1]])
return (toko)
def add(self,token_tip,toko):
print(toko)
if token_tip[0][0] == 'INOUT' and token_tip[1][0] == 'IDENTIFIRE' and token_tip[2][1] == "=" and token_tip[3][0] == toko[i][0] and token_tip[i+2][0] == toko[i+6][0] and token_tip[6][0] == "STATEMENT_END":
for i in range(0,len(toko)):
if toko[i][0] == token_tip[1][1]:
print(i)
#if token_tip[4][1] == "+":
#toko[token_tip[1][0]]
#return (tok)
|
class ProgressBarStyle(Enum, IComparable, IFormattable, IConvertible):
"""
Specifies the style that a System.Windows.Forms.ProgressBar uses to indicate the progress of an operation.
enum ProgressBarStyle,values: Blocks (0),Continuous (1),Marquee (2)
"""
def __eq__(self, *args):
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __ge__(self, *args):
pass
def __gt__(self, *args):
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args):
pass
def __lt__(self, *args):
pass
def __ne__(self, *args):
pass
def __reduce_ex__(self, *args):
pass
def __str__(self, *args):
pass
Blocks = None
Continuous = None
Marquee = None
value__ = None
|
"""
UPPER VS LOWER: Write a Python function that accepts a string and calculates the number of upper case letters and lower case letters.
Sample String : 'Hello Mr. Rogers, how are you this fine Tuesday?'
Expected Output :
No. of Upper case characters : 4
No. of Lower case Characters : 33
HINT: Two string methods that might prove useful: .isupper() and .islower()
"""
def up_low(s):
upper_char = 0
lower_char = 0
for letter in s:
if letter.isupper():
upper_char += 1
if letter.islower():
lower_char += 1
else:
continue
print("No. of Upper case characters: {}".format(upper_char))
print("No. of Lower case characters: {}".format(lower_char))
|
"""
@name: PyHouse/src/Modules/Families/__init__.py
@author: D. Brian Kimmel
@contact: D.BrianKimmel@gmail.com
@copyright: (c) 2013-2015 by D. Brian Kimmel
@note: Created on May 17, 2013
@license: MIT License
@summary:
Various families of lighting systems.
insteon
upb
x-10
zigbee
z-wave
lutron
and others
To add a family named 'NewFamily', do the following:
* Add a package named 'New_Family'.
* Add the family name (Capitalized) to the list VALID_FAMILIES below.
* Add a module named <NewFamily>_device.py
* Add any other modules needed by the Device module.
<Newfamily>_xml
<NewFamily>_data
...
* A module to interface with the controller is recommended.
<NewFamily>_pim
There are a number of lighting 'Family' types handled here.
The Insteon family is now functional (2012).
The UPB family is work in progress
The X-10 family is mostly just a stub at present (2012)
When PyHouse is reading in the configuration for various devices, a call to family.ReadXml() is made to
add any family specific data for that device. The data for the device MUST include a 'DeviceFamily'
attribute that is already initialized with the family name.
Each family consists of four or more major areas:
Lights / Lighting Devices
Controllers - connected to the computer
Scenes - have one or more lights that are controlled together
Buttons - extra buttons with no light directly attached (key-pad-link)
Since these controllers also control things other than lights, there is also a device type defined.
Devices to control include Lights, Thermostat, Irrigation valves Pool Equipment etc.
"""
__version_info__ = (1, 6, 0)
__version__ = '.'.join(map(str, __version_info__))
VALID_FAMILIES = ['Null', 'Insteon', 'UPB', 'X10'] # Keep null first
VALID_DEVICE_TYPES = ['Light', 'Thermostat', 'Irrigation', 'Pool']
# ## END DBK
|
input_str = """
cut -135
deal with increment 38
deal into new stack
deal with increment 29
cut 120
deal with increment 30
deal into new stack
cut -7198
deal into new stack
deal with increment 59
cut -8217
deal with increment 75
cut 4868
deal with increment 29
cut 4871
deal with increment 2
deal into new stack
deal with increment 54
cut 777
deal with increment 40
cut -8611
deal with increment 3
cut -5726
deal with increment 57
deal into new stack
deal with increment 41
deal into new stack
cut -5027
deal with increment 12
cut -5883
deal with increment 45
cut 9989
deal with increment 14
cut 6535
deal with increment 18
cut -5544
deal with increment 29
deal into new stack
deal with increment 64
deal into new stack
deal with increment 41
deal into new stack
deal with increment 6
cut 4752
deal with increment 8
deal into new stack
deal with increment 26
cut -6635
deal with increment 10
deal into new stack
cut -3830
deal with increment 48
deal into new stack
deal with increment 39
cut -4768
deal with increment 65
deal into new stack
cut -5417
deal with increment 15
cut -4647
deal into new stack
cut -3596
deal with increment 17
cut -3771
deal with increment 50
cut 1682
deal into new stack
deal with increment 20
deal into new stack
deal with increment 22
deal into new stack
deal with increment 3
cut 8780
deal with increment 52
cut 7478
deal with increment 9
cut -8313
deal into new stack
cut 742
deal with increment 19
cut 9982
deal into new stack
deal with increment 68
cut 9997
deal with increment 23
cut -240
deal with increment 54
cut -7643
deal into new stack
deal with increment 6
cut -3493
deal with increment 74
deal into new stack
deal with increment 75
deal into new stack
deal with increment 40
cut 596
deal with increment 6
cut -4957
deal into new stack"""
inlist = [f.strip() for f in input_str.split('\n') if f.strip()]
def cut(pile, n, dest):
if n > 0:
dest[0:-n] = pile[n:]
dest[-n:] = pile[0:n]
else:
dest[0:n] = pile[n:]
dest[n:] = pile[0:(len(pile)+n)]
return
def list_eq(l1, l2):
match = (len(l1) == len(l2)) and all(l1[i] == l2[i] for i in range(len(l1)))
if not match:
print("Mismatch:")
print(l1)
print(l2)
return match
def deal_increment(src, n, dest):
for i in range(len(src)):
dest[(i*n) % len(src)] = src[i]
def deal(src, dest):
n = len(src)
for i in range(n):
dest[n-i-1] = src[i]
def parse_input_and_do():
# avoid allocating ram for each transform by just having two lists and switching src and dest each time.
piles = [list(range(10007)), list(range(10007))]
for i, item in enumerate(inlist):
if i%2 == 0:
src, dest = piles[0], piles[1]
else:
src, dest = piles[1], piles[0]
print(item)
if i%10 == 0:
print(100*i/len(inlist), "% complete")
instr, num = item.split()[-2:]
if num == 'stack':
deal(src, dest)
elif instr == 'increment':
deal_increment(src, int(num), dest)
elif instr == 'cut':
cut(src, int(num), dest)
else:
print("wat:", item)
if len(inlist) % 2 == 1:
print(src.index(2019))
else:
print(dest.index(2019))
### tests
def test_cut():
src = list(range(4))
dest = list(range(4))
cut(src, 2, dest)
assert list_eq(dest, [2, 3, 0, 1])
cut(src, -2, dest)
assert list_eq(dest, [2, 3, 0, 1])
def test_deal_increment():
src = list(range(4))
dest = list(range(4))
deal_increment(src, 3, dest)
assert list_eq(dest, [0, 3, 2, 1])
def do_tests():
test_cut()
test_deal_increment()
print("tests passed")
if __name__ == '__main__':
do_tests()
parse_input_and_do() |
'''Implemente a função fatorial(x), que recebe como parâmetro um
número inteiroe devolve um número inteiro correspondente ao fatorial
de x. Sua solução deve ser implementada utilizando recursão.'''
def fatorial(x):
if x <= 1:
return 1
else:
return x * fatorial(x - 1) |
print("Hello! Please answer a few questions:")
user_name = input("What is your name? ")
user_age = input("How old are you? ")
user_city = input("Where do you live? ")
print(f"""\nHello, {user_name.title()}!
Your age is {user_age},
You live in {user_city.title()}.""")
input()
|
"""Baseball statistics calculation functions."""
def batting_average(at_bats, hits):
"""Calculates the batting average to 3 decimal places using number of at bats and hits."""
try:
return round(hits / at_bats, 3)
except ZeroDivisionError:
return round(0, 3) |
# Problem: https://docs.google.com/document/d/1tS-7_Z0VNpwO-8lgj6B3NWzIN9bHtQW7Pxqhy8U4HvQ/edit?usp=sharing
message = input()
rev = message[::-1]
for a, b in zip(message, rev):
print(a, b)
|
class ConfigFromJSON(object):
def __init__(self, filename):
self.json_file = filename
@property
def json_file(self):
print('set')
return self.__json_file
@json_file.setter
def json_file(self, value):
print('check')
if value is not 0:
raise ValueError("No !")
else:
print("OK")
self.__json_file = value
print('instance')
myA = ConfigFromJSON(0)
print('instance')
myA = ConfigFromJSON(0)
print(myA.json_file)
|
# @Time : 2019/8/11 10:45
# @Author : shakespere
# @FileName: Word Break.py
class Solution(object):
def wordBreak(self, s, wordDict):
dp = [False * (len(s)+1)]
dp[0] = True#空字符串
for i in range(1,len(s)+1):
for j in range(i):
if dp[j] and s[j:i] in wordDict:
dp[i] = True
return dp[-1]
|
# -*- coding: utf-8 -*-
# @Time : 2019-12-20 16:17
# @Author : songzhenxi
# @Email : songzx_2326@163.com
# @File : LeetCode_242_1034.py
# @Software: PyCharm
# 给定两个字符串 s 和 t ,编写一个函数来判断 t 是否是 s 的字母异位词。
#
# 示例 1:
#
# 输入: s = "anagram", t = "nagaram"
# 输出: true
#
#
# 示例 2:
#
# 输入: s = "rat", t = "car"
# 输出: false
#
# 说明:
# 你可以假设字符串只包含小写字母。
#
# 进阶:
# 如果输入字符串包含 unicode 字符怎么办?你能否调整你的解法来应对这种情况?
# Related Topics 排序 哈希表
# leetcode submit region begin(Prohibit modification and deletion)
class Solution(object):
def isAnagram(self, s, t):
"""
题目:242.有效字母异位词(https://leetcode-cn.com/problems/valid-anagram/)
学号:1034(五期一班三组)
标签:排序 哈希表
:type s: str
:type t: str
:rtype: bool
"""
arr = [0] * 26
for ism in s:
arr[ord(ism) - ord('a')] += 1
for ist in t:
arr[ord(ist) - ord('a')] -= 1
arr.sort()
return arr[-1] == 0 and arr[0] == 0
# leetcode submit region end(Prohibit modification and deletion)
|
class _GetAttrAccumulator:
@staticmethod
def apply(gaa, target):
if not isinstance(gaa, _GetAttrAccumulator):
if isinstance(gaa, dict):
return {
_GetAttrAccumulator.apply(k, target): _GetAttrAccumulator.apply(v, target)
for k, v in gaa.items()
}
if isinstance(gaa, list):
return [_GetAttrAccumulator.apply(v, target) for v in gaa]
return gaa
result = target
for fn in gaa._gotten:
result = fn(target, result)
result = _GetAttrAccumulator.apply(result, target)
return result
def __init__(self, gotten=None, text=None):
if gotten is None:
gotten = []
self._gotten = gotten
self._text = "" if text is None else text
def __getitem__(self, item):
gotten = [
*self._gotten,
lambda t, o: o[item],
]
return _GetAttrAccumulator(gotten, "%s[%s]" % (self._text, item))
def __getattr__(self, name):
gotten = [
*self._gotten,
lambda t, o: getattr(o, name),
]
return _GetAttrAccumulator(gotten, "%s.%s" % (self._text, name))
def __call__(self, **kw):
gotten = [
*self._gotten,
lambda t, o: o(**{
k: _GetAttrAccumulator.apply(v, t)
for k, v in kw.items()
}),
]
return _GetAttrAccumulator(gotten, "%s(...)" % self._text)
def __str__(self):
return "_GetAttrAccumulator<%s>" % self._text
def _recursive_transform(o, fn):
# First, a shallow tranform.
o = fn(o)
# Now a recursive one, if needed.
if isinstance(o, dict):
return {
k: _recursive_transform(v, fn)
for k, v in o.items()
}
elif isinstance(o, list):
return [
_recursive_transform(e, fn)
for e in o
]
else:
return o
|
# Smallest Difference Problem : Two arrays are given and you have to find the
# pair (i , j) such abs(i-j) is the smallest where i belongs to the first array and j belong to the second array respectively
def smallestDifference(arr1 : list, arr2 : list) :
arr1.sort();arr2.sort()
i = 0;j = 0
smallest = float("inf");current=float("inf")
smallestPair = list()
while i < len(arr1) and j < len(arr2) :
fnum = arr1[i];snum = arr2[j]
if fnum < snum :
current = snum - fnum
i += 1
elif snum < fnum :
current = snum - fnum
j += 1
else :
return [fnum, snum]
if current < smallest :
smallest = current
smallestPair.append((fnum, snum))
if __name__ == '__main__' :
print(smallestDifference([12,3,45,6], [2,4,3,5])) |
class CasHelper:
def __init__(self, app):
self.app = app
def wrong_password(self, credentials):
wd = self.app.wd
#self.app.open_home_page()
wd.find_element_by_css_selector("input.form-control").click()
wd.find_element_by_css_selector("input.form-control").clear()
wd.find_element_by_css_selector("input.form-control").send_keys(credentials.login)
wd.find_element_by_css_selector("button.btn.btn-default").click()
wd.find_element_by_css_selector("input.form-control").click()
wd.find_element_by_css_selector("input.form-control").send_keys(credentials.password)
wd.find_element_by_xpath("//section[@id='section-left']/section[2]/div/div[1]").click()
wd.find_element_by_css_selector("button.btn.btn-default").click() |
"""Nullutil: nullutil for Python."""
# Null Coalesce
# This means:
# lhs ?? rhs
def qq(lhs, rhs):
return lhs if lhs is not None else rhs
# Safty Access
# This means:
# instance?.member
# instance?.member(*params)
def q_(instance, member, params=None):
if instance is None:
return None
else:
m = getattr(instance, member)
if params is None:
return m
elif isinstance(params, dict):
return m(**params)
elif isinstance(params, list) or isinstance(params, tuple):
return m(*params)
else:
return m(params)
# This means:
# instance?[index]
def qL7(collection, index):
return collection[index] if collection is not None else None
# Safety Evalate (do Syntax)
# This means:
# params?.let{expression}
# do
# p0 <- params[0]
# p1 <- params[1]
# ...
# return expression(p0, p1, ...)
def q_let(params, expression):
if isinstance(params, dict):
for param in params.values():
if param is None:
return None
return expression(**params)
elif isinstance(params, list) or isinstance(params, tuple):
for param in params:
if param is None:
return None
return expression(*params)
else:
return expression(params) if params is not None else None
|
# -*- coding: utf-8 -*-
# Authors: Y. Jia <ytjia.zju@gmail.com>
"""
Given a string S and a string T, find the minimum window in S which will contain all the
characters in T in complexity O(n).
https://leetcode.com/problems/minimum-window-substring/description/
"""
class Solution(object):
def minWindow(self, s, t):
"""
:type s: str
:type t: str
:rtype: str
"""
mw = ""
if not s or not t or len(s) == 0 or len(t) == 0:
return mw
t_char_cnt = len(t)
t_char_dict = dict()
for char in t:
if char in t_char_dict:
t_char_dict[char] += 1
else:
t_char_dict[char] = 1
w_b = 0
w_e = 0
while w_e < len(s):
if s[w_e] in t_char_dict:
t_char_dict[s[w_e]] -= 1
if t_char_dict[s[w_e]] >= 0:
t_char_cnt -= 1
w_e += 1
while t_char_cnt == 0:
if len(mw) == 0 or w_e - w_b < len(mw):
mw = s[w_b:w_e]
if s[w_b] in t_char_dict:
t_char_dict[s[w_b]] += 1
if t_char_dict[s[w_b]] > 0:
t_char_cnt += 1
w_b += 1
return mw
|
class Articles:
"""
Article class defines Article objects
"""
def __init__(self,author,title,description,article_url,image_url,days,hours,minutes):
self.author = author
self.title = title
self.description = description
self.article_url = article_url
self.image_url = image_url
self.days = days
self.hours = hours
self.minutes = minutes
class Source:
"""
Source class to define news source Objects
"""
def __init__(self,id,name,description,url,category,language,country):
self.id = id
self.name = name
self.description = description
self.url = url
self.category = category
self.language = language
self.country = country
|
class AnyArg(object):
"""AnyArg for wildcard mock assertions"""
def __eq__(self, b: any):
return True
|
""" This file was generated when mcuprog was built """
VERSION = '3.9.1.120'
COMMIT_ID = '84ffb61b46baa4fb20896deb0179d09fe3097b5c'
BUILD_DATE = '2021-08-23 18:16:12 +0700'
|
'''
Created on 2017. 8. 16.
@author: jongyeob
'''
_tlm0x8D2_image_size = 2048
tlm0x08D2_struct = [('I','frame_number'),
('I','frame_position'),
('{}B'.format(_tlm0x8D2_image_size),'data')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
assert True; assert not False
assert True
assert not False
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Written by Lucas Sinclair and Paul Rougieux.
JRC biomass Project.
Unit D1 Bioeconomy.
"""
# Built-in modules #
# Third party modules #
###############################################################################
class Graphs(object):
def __getitem__(self, key):
return [i for i in self.instances if i.short_name == key][0]
def __iter__(self): return iter(self.instances)
def __len__(self): return len(self.instances)
def __init__(self):
self.instances = []
def __call__(self, *args, **kwargs):
return [i(*args, **kwargs) for i in self.instances]
###############################################################################
def load_graphs_from_module(parent, submodule):
"""
Sorry for the black magic. The result is an object whose attributes
are all the graphs found in submodule.py initialized with the
proper instance as only argument.
"""
# Get all graphs of a submodule #
graph_classes = [getattr(submodule, g) for g in submodule.__all__]
graph_instances = [g(parent) for g in graph_classes]
graph_names = [g.short_name for g in graph_instances]
# Create a container object #
graphs = Graphs()
# Set its attributes #
for name, instance in zip(graph_names, graph_instances):
setattr(graphs, name, instance)
graphs.instances.append(instance)
# Return result #
return graphs |
#
# PySNMP MIB module SNMP-USM-DH-OBJECTS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/SNMP-USM-DH-OBJECTS-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:00:30 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsIntersection")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
usmUserEntry, = mibBuilder.importSymbols("SNMP-USER-BASED-SM-MIB", "usmUserEntry")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
Bits, ObjectIdentity, Counter64, MibIdentifier, ModuleIdentity, Gauge32, IpAddress, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, Unsigned32, experimental, NotificationType, TimeTicks, Integer32, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "ObjectIdentity", "Counter64", "MibIdentifier", "ModuleIdentity", "Gauge32", "IpAddress", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Unsigned32", "experimental", "NotificationType", "TimeTicks", "Integer32", "Counter32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
snmpUsmDHObjectsMIB = ModuleIdentity((1, 3, 6, 1, 3, 101))
snmpUsmDHObjectsMIB.setRevisions(('2000-03-06 00:00',))
if mibBuilder.loadTexts: snmpUsmDHObjectsMIB.setLastUpdated('200003060000Z')
if mibBuilder.loadTexts: snmpUsmDHObjectsMIB.setOrganization('Excite@Home')
usmDHKeyObjects = MibIdentifier((1, 3, 6, 1, 3, 101, 1))
usmDHKeyConformance = MibIdentifier((1, 3, 6, 1, 3, 101, 2))
class DHKeyChange(TextualConvention, OctetString):
reference = '-- Diffie-Hellman Key-Agreement Standard, PKCS #3; RSA Laboratories, November 1993'
status = 'current'
usmDHPublicObjects = MibIdentifier((1, 3, 6, 1, 3, 101, 1, 1))
usmDHParameters = MibScalar((1, 3, 6, 1, 3, 101, 1, 1, 1), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: usmDHParameters.setStatus('current')
usmDHUserKeyTable = MibTable((1, 3, 6, 1, 3, 101, 1, 1, 2), )
if mibBuilder.loadTexts: usmDHUserKeyTable.setStatus('current')
usmDHUserKeyEntry = MibTableRow((1, 3, 6, 1, 3, 101, 1, 1, 2, 1), )
usmUserEntry.registerAugmentions(("SNMP-USM-DH-OBJECTS-MIB", "usmDHUserKeyEntry"))
usmDHUserKeyEntry.setIndexNames(*usmUserEntry.getIndexNames())
if mibBuilder.loadTexts: usmDHUserKeyEntry.setStatus('current')
usmDHUserAuthKeyChange = MibTableColumn((1, 3, 6, 1, 3, 101, 1, 1, 2, 1, 1), DHKeyChange()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usmDHUserAuthKeyChange.setStatus('current')
usmDHUserOwnAuthKeyChange = MibTableColumn((1, 3, 6, 1, 3, 101, 1, 1, 2, 1, 2), DHKeyChange()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usmDHUserOwnAuthKeyChange.setStatus('current')
usmDHUserPrivKeyChange = MibTableColumn((1, 3, 6, 1, 3, 101, 1, 1, 2, 1, 3), DHKeyChange()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usmDHUserPrivKeyChange.setStatus('current')
usmDHUserOwnPrivKeyChange = MibTableColumn((1, 3, 6, 1, 3, 101, 1, 1, 2, 1, 4), DHKeyChange()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usmDHUserOwnPrivKeyChange.setStatus('current')
usmDHKickstartGroup = MibIdentifier((1, 3, 6, 1, 3, 101, 1, 2))
usmDHKickstartTable = MibTable((1, 3, 6, 1, 3, 101, 1, 2, 1), )
if mibBuilder.loadTexts: usmDHKickstartTable.setStatus('current')
usmDHKickstartEntry = MibTableRow((1, 3, 6, 1, 3, 101, 1, 2, 1, 1), ).setIndexNames((0, "SNMP-USM-DH-OBJECTS-MIB", "usmDHKickstartIndex"))
if mibBuilder.loadTexts: usmDHKickstartEntry.setStatus('current')
usmDHKickstartIndex = MibTableColumn((1, 3, 6, 1, 3, 101, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: usmDHKickstartIndex.setStatus('current')
usmDHKickstartMyPublic = MibTableColumn((1, 3, 6, 1, 3, 101, 1, 2, 1, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: usmDHKickstartMyPublic.setStatus('current')
usmDHKickstartMgrPublic = MibTableColumn((1, 3, 6, 1, 3, 101, 1, 2, 1, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: usmDHKickstartMgrPublic.setStatus('current')
usmDHKickstartSecurityName = MibTableColumn((1, 3, 6, 1, 3, 101, 1, 2, 1, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: usmDHKickstartSecurityName.setStatus('current')
usmDHKeyMIBCompliances = MibIdentifier((1, 3, 6, 1, 3, 101, 2, 1))
usmDHKeyMIBGroups = MibIdentifier((1, 3, 6, 1, 3, 101, 2, 2))
usmDHKeyMIBCompliance = ModuleCompliance((1, 3, 6, 1, 3, 101, 2, 1, 1)).setObjects(("SNMP-USM-DH-OBJECTS-MIB", "usmDHKeyMIBBasicGroup"), ("SNMP-USM-DH-OBJECTS-MIB", "usmDHKeyParamGroup"), ("SNMP-USM-DH-OBJECTS-MIB", "usmDHKeyKickstartGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usmDHKeyMIBCompliance = usmDHKeyMIBCompliance.setStatus('current')
usmDHKeyMIBBasicGroup = ObjectGroup((1, 3, 6, 1, 3, 101, 2, 2, 1)).setObjects(("SNMP-USM-DH-OBJECTS-MIB", "usmDHUserAuthKeyChange"), ("SNMP-USM-DH-OBJECTS-MIB", "usmDHUserOwnAuthKeyChange"), ("SNMP-USM-DH-OBJECTS-MIB", "usmDHUserPrivKeyChange"), ("SNMP-USM-DH-OBJECTS-MIB", "usmDHUserOwnPrivKeyChange"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usmDHKeyMIBBasicGroup = usmDHKeyMIBBasicGroup.setStatus('current')
usmDHKeyParamGroup = ObjectGroup((1, 3, 6, 1, 3, 101, 2, 2, 2)).setObjects(("SNMP-USM-DH-OBJECTS-MIB", "usmDHParameters"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usmDHKeyParamGroup = usmDHKeyParamGroup.setStatus('current')
usmDHKeyKickstartGroup = ObjectGroup((1, 3, 6, 1, 3, 101, 2, 2, 3)).setObjects(("SNMP-USM-DH-OBJECTS-MIB", "usmDHKickstartMyPublic"), ("SNMP-USM-DH-OBJECTS-MIB", "usmDHKickstartMgrPublic"), ("SNMP-USM-DH-OBJECTS-MIB", "usmDHKickstartSecurityName"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usmDHKeyKickstartGroup = usmDHKeyKickstartGroup.setStatus('current')
mibBuilder.exportSymbols("SNMP-USM-DH-OBJECTS-MIB", usmDHUserOwnPrivKeyChange=usmDHUserOwnPrivKeyChange, usmDHKeyMIBCompliance=usmDHKeyMIBCompliance, snmpUsmDHObjectsMIB=snmpUsmDHObjectsMIB, usmDHKickstartEntry=usmDHKickstartEntry, usmDHUserPrivKeyChange=usmDHUserPrivKeyChange, usmDHKeyObjects=usmDHKeyObjects, usmDHKickstartIndex=usmDHKickstartIndex, usmDHKickstartMgrPublic=usmDHKickstartMgrPublic, usmDHKickstartMyPublic=usmDHKickstartMyPublic, PYSNMP_MODULE_ID=snmpUsmDHObjectsMIB, usmDHKickstartTable=usmDHKickstartTable, DHKeyChange=DHKeyChange, usmDHUserKeyTable=usmDHUserKeyTable, usmDHKeyMIBCompliances=usmDHKeyMIBCompliances, usmDHUserOwnAuthKeyChange=usmDHUserOwnAuthKeyChange, usmDHKeyMIBBasicGroup=usmDHKeyMIBBasicGroup, usmDHUserKeyEntry=usmDHUserKeyEntry, usmDHKeyKickstartGroup=usmDHKeyKickstartGroup, usmDHUserAuthKeyChange=usmDHUserAuthKeyChange, usmDHKickstartGroup=usmDHKickstartGroup, usmDHPublicObjects=usmDHPublicObjects, usmDHKeyConformance=usmDHKeyConformance, usmDHKickstartSecurityName=usmDHKickstartSecurityName, usmDHKeyMIBGroups=usmDHKeyMIBGroups, usmDHParameters=usmDHParameters, usmDHKeyParamGroup=usmDHKeyParamGroup)
|
pessoas = list()
dados = list()
pesados = list()
leve = list()
menor = maior = 0
while True:
nome = str(input('Insira o nome: '))
peso = float(input('Insira o peso (kg): '))
if len(pessoas) == 0:
menor = peso
maior = peso
elif peso < menor:
menor = peso
elif peso > maior:
maior = peso
dados.append(nome)
dados.append(peso)
pessoas.append(dados[:])
dados.clear()
opcao = str(input('Deseja continuar [S/N]: ')).strip().upper()[0]
if opcao == 'N':
break
print(f'Você cadastrou um total de {len(pessoas)} pessoa(s).')
print(f'O maior peso foi {maior} kg, de ', end='')
for pessoa in pessoas:
print(f'{pessoa[0]} ' if pessoa[1] == maior else '', end='')
print(f'\nO menor peso foi {menor} kg, de ', end='')
for pessoa in pessoas:
print(f'{pessoa[0]} ' if pessoa[1] == menor else '', end='')
|
# coding=utf-8
# Author: Jianghan LI
# Question: 072.Edit_Distance
# Complexity: O(N)
# Date: 2017-09-06 8:01 - 8:11, 0 wrong try
class Solution(object):
def minDistance(self, w1, w2):
"""
:type word1: str
:type word2: str
:rtype: int
"""
m, n = len(w1) + 1, len(w2) + 1
d = [[0 for j in range(n)] for i in range(m)]
for i in range(m): d[i][0] = i
for j in range(n): d[0][j] = j
for i in range(1, m):
for j in range(1, n):
d[i][j] = min(d[i - 1][j] + 1, d[i][j - 1] + 1, d[i - 1][j - 1] + (w1[i - 1] != w2[j - 1]))
return d[m - 1][n - 1]
|
# -*- coding: utf-8 -*-
__version__ = '0.1.0b1'
__description__ = """Simple AWS Route 53 DNS Updater.
Dynamically update a Route 53 DNS record to the current public IP of the
computer/network it is executed on. Records will only be modified if the
current value and public IP differ.
"""
|
def get_message_native_type(type):
dictionary = {
'uint8': 'uint8_t',
'uint16': 'uint16_t',
'uint32': 'uint32_t',
'uint64': 'uint64_t',
'int8': 'int8_t',
'int16': 'int16_t',
'int32': 'int32_t',
'int64': 'int64_t',
'buffer': 'iota::vector<uint8_t>',
'list': 'iota::vector<uint64_t>',
'string': 'iota::string'
}
return dictionary.get(type)
def get_message_default_initializer(type):
dictionary = {
'uint8': '{}',
'uint16': '{}',
'uint32': '{}',
'uint64': '{}',
'int8': '{}',
'int16': '{}',
'int32': '{}',
'int64': '{}',
'buffer': 'iota::create_vector<uint8_t>()',
'list': 'iota::create_vector<uint64_t>()',
'string': 'iota::create_string()'
}
return dictionary.get(type)
def generate_builder(file, message):
class_name = f"{message.name}_builder"
file.write(f"\tclass {class_name} {{\n")
file.write(f"\tpublic:\n")
file.write(f"\t\t{class_name}(): buf{{}} {{}}\n")
for field in message.items:
name = field.name
native_type = get_message_native_type(field.type)
index = field.index
file.write(f"\t\tvoid add_{name}({native_type} {name}) {{\n")
file.write(f"\t\t\tbuf.add<{native_type}>({index}, {name});\n")
file.write(f"\t\t}}\n")
file.write(f"\t\tuint8_t* serialize() {{\n")
file.write(f"\t\t\tbuf.serialize();\n")
file.write(f"\t\t\treturn buf.data();\n")
file.write(f"\t\t}}\n")
file.write(f"\t\tsize_t length() {{\n")
file.write(f"\t\t\treturn buf.length();\n")
file.write(f"\t\t}}\n")
file.write(f"\tprivate:\n")
file.write(f"\t\tiota::buffer_generator buf;\n")
file.write(f"\t}};\n\n")
def generate_parser(file, message):
class_name = f"{message.name}_parser"
file.write(f"\tclass {class_name} {{\n")
file.write(f"\tpublic:\n")
file.write(f"\t\t{class_name}(uint8_t* buf, size_t size) {{\n")
file.write(f"\t\t\tfor(size_t i = 0; i < size;){{\n")
file.write(f"\t\t\t\tiota::index_type index = buf[i];\n")
file.write(f"\t\t\t\ti++;\n")
file.write(f"\t\t\t\tswitch(index){{\n")
for field in message.items:
file.write(f"\t\t\t\t// {field.type}\n")
file.write(f"\t\t\t\tcase {field.index}: {{\n")
file.write(f"\t\t\t\t\tthis->_p_{field.name} = true;\n")
file.write(f"\t\t\t\t\ti += iota::parse_item<{get_message_native_type(field.type)}>(&buf[i], this->_m_{field.name});\n")
file.write(f"\t\t\t\t\tbreak;\n")
file.write(f"\t\t\t\t}}\n")
file.write(f"\t\t\t\t}}\n")
file.write(f"\t\t\t}}\n")
file.write(f"\t\t}}\n")
for field in message.items:
name = field.name
native_type = get_message_native_type(field.type)
file.write(f"\t\t{native_type}& get_{name}() {{\n")
file.write(f"\t\t\treturn this->_m_{field.name};\n")
file.write(f"\t\t}}\n")
file.write(f"\t\tbool has_{name}() {{\n")
file.write(f"\t\t\treturn this->_p_{field.name};\n")
file.write(f"\t\t}}\n")
file.write(f"\tprivate:\n")
for field in message.items:
name = field.name
native_type = get_message_native_type(field.type)
file.write(f"\t\t{native_type} _m_{name} = {get_message_default_initializer(field.type)}; bool _p_{name} = false;\n")
file.write(f"\t}};\n\n")
def get_enum_native_type(type):
dictionary = {
'uint8': 'uint8_t',
'uint16': 'uint16_t',
'uint32': 'uint32_t',
'uint64': 'uint64_t',
'int8': 'int8_t',
'int16': 'int16_t',
'int32': 'int32_t',
'int64': 'int64_t'
}
return dictionary.get(type)
def generate_enum(file, enum):
file.write(f"\tenum class {enum.name} : {get_enum_native_type(enum.item_type)} {{\n")
for entry in enum.items:
file.write(f"\t\t{entry.name} = {entry.value},\n")
file.write(f"\t}};\n\n")
def copy_file_contents(file1, file2):
for line in file1:
file2.write(line)
file2.write("\n\n")
def generate(subgenerator, output, items, module):
file = open(output, 'w')
file.write("#pragma once\n\n")
file.write("#include <stdint.h>\n")
file.write("#include <stddef.h>\n")
if not subgenerator:
subgenerator = 'std' # use std as default subgen
if subgenerator == 'std':
lib_file = open('lib/cpp/lib-std.hpp', 'r')
copy_file_contents(lib_file, file)
lib_file.close()
elif subgenerator == 'frigg':
lib_file = open('lib/cpp/lib-frigg.hpp', 'r')
copy_file_contents(lib_file, file)
lib_file.close()
elif subgenerator == 'sigma-kernel':
lib_file = open('lib/cpp/lib-sigma-kernel.hpp', 'r')
copy_file_contents(lib_file, file)
lib_file.close()
else:
print(f"cpp: Unknown subgenerator: {subgenerator}")
exit()
buffer_builder_file = open('lib/cpp/buffer_builder.hpp', 'r')
copy_file_contents(buffer_builder_file, file)
buffer_builder_file.close()
buffer_parser_file = open('lib/cpp/buffer_parser.hpp', 'r')
copy_file_contents(buffer_parser_file, file)
buffer_parser_file.close()
module_parts = module.split('.')
for part in module_parts:
file.write(f"namespace [[gnu::visibility(\"hidden\")]] {part} {{ ")
file.write("\n")
for item in items:
if(item.type == 'message'):
generate_builder(file, item)
generate_parser(file, item)
elif(item.type == 'enum'):
generate_enum(file, item)
else:
print(f"cpp: Unknown item type: {item.type}")
exit()
for part in module_parts:
file.write("} ")
file.close()
print("Generated C++ header") |
'''
@author: l4zyc0d3r
People who are happy makes other happy. I am gonna finish it slowly but definitely.cdt
'''
class Solution:
def triangleNumber(self, N: List[int]) -> int:
cou = 0
N.sort(reverse=True)
for k, x in enumerate(N):
j = k+1
i = len(N)-1
while j<i:
if N[i]+N[j]<=N[k]:
i-=1
else:
cou+=i-j
j+=1
return cou
|
# print("小明")
# import random
# for i in range(5):
# print(i)
# for j in 'string':
# print(j)
#
# for m in range(0,5):
# print(m*(random.random()))
#
# def insert_sort(ilist):
# for i in range(len(ilist)):
# print(i)
# for j in range(i):
# print(j)
# # if ilist[i] < ilist[j]:
# # ilist.insert(j, ilist.pop(i))
# # break
# # return ilist
#
# ilist = insert_sort([4,5,6,7,3,2,6,9,8])
# print(ilist)
nums = range(2,20)
for i in nums:
nums = filter(lambda x:x==i or x % i,nums)
nums
data={'id':1,'id1':2,'id2':2,'id3':2}
que_result=list(ProjectAccounts.objects.filter(data).values())
|
"""
Manually selected famous paintings that can be optionally put in a test-set.
The MIT License (MIT)
Originally created at 6/23/20, for Python 3.x
Copyright (c) 2021 Panos Achlioptas (ai.stanford.edu/~optas) & Stanford Geometric Computing Lab
"""
masterpieces_for_test = [
'leonardo-da-vinci_mona-lisa',
'vincent-van-gogh_the-starry-night-1889(1)',
'vincent-van-gogh_the-starry-night-1888-1',
'vincent-van-gogh_the-starry-night-1889-1',
'vincent-van-gogh_the-starry-night-1888-2',
'vincent-van-gogh_the-starry-night-1888',
'johannes-vermeer_the-girl-with-a-pearl-earring',
'robert-silvers_girl-with-the-pearl-earring-2008',
'robert-silvers_guernica-photomosaic-mounted-on-aluminum',
'gustav-klimt_the-kiss-1908(1)',
'leonardo-da-vinci_the-lady-with-the-ermine-cecilia-gallerani-1496',
'vincent-van-gogh_cafe-terrace-on-the-place-du-forum-1888(1)',
'vincent-van-gogh_the-cafe-terrace-on-the-place-du-forum-arles-at-night-1888',
'vincent-van-gogh_cafe-terrace-place-du-forum-arles-1888(1)',
'eugene-delacroix_the-liberty-leading-the-people-1830',
'claude-monet_impression-sunrise',
'james-mcneill-whistler_arrangement-in-grey-and-black-no-1-portrait-of-the-artist-s-mother-1871'] |
m = [("chethan", 20, 30), ("john", 40, 50)]
print(m[1])
file = open("writingintocsvfile.csv", "w")
file.write("Name, Age, Weight")
for n in range(len(m)):
file.write('\n')
for k in range(len(m[1])):
file.write(str(m[n][k]))
file.write(',')
file.close() |
# 014_Conversor_de_temperatura.py
# Converte temperatura de ºC para ºF
print()
C = float(input("Temperatura em ºC: "))
F = (9/5)*C + 32
print(f"{C}ºC equivale a {F:.1f}ºF")
print()
|
def add_numbers(num1 , num2):
return num1 + num2
def subtract_numbers(num1 , num2):
return num1 - num2
def multiply_numbers(num1 , num2):
return num1 * num2
def divide_numbers(num1 , num2):
return num1 / num2
|
def multiples_of_3_or_5(limit: int) -> int:
"""Computes the sum of all the multiples of 3 or 5 below the given limit,
using tail recursion.
:param limit: Limit of the values to sum (exclusive).
:return: Sum of all the multiples of 3 or 5 below the given limit.
"""
def loop(acc, num):
if num < 1:
return acc
if num % 3 == 0 or num % 5 == 0:
return loop(acc + num, num - 1)
return loop(acc, num - 1)
return loop(0, limit - 1)
solution = multiples_of_3_or_5
name = 'tail recursion if'
|
"""
Python Lists
Lists are the most versatile of Python's
compound data types. A list contains items
separated by commas and enclosed within square
brackets ([]). To some extent, lists are similar
to arrays in C. One of the differences between
them is that all the items belonging to a list
can be of different data type.
The values stored in a list can be accessed using
the slice operator ([ ] and [:]) with indexes starting
at 0 in the beginning of the list and working their way
to end -1. The plus (+) sign is the list concatenation
operator, and the asterisk (*) is the repetition operator.
For example −
"""
list = ['abcd', 786, 2.23, 'john', 70.2]
tinylist = [123, 'john']
print (list) #mencetak list semua yg di dalam variabel list
print (list[0]) #mencetak elemen pertama dari list
print (list[1:3]) #mencetak elemen dimulai dari index ke 2 samapi 3
print (list[2:]) #print element dimulai dari index ke 3
print (tinylist *2) # mencetak variabel yang ada di tinylist
print (list + tinylist) # menggabungkan ke 2 list tersebut |
"""
This file assembles a toolchain for a Mac M1 host using the Clang Compiler and glibc.
It downloads the necessary headers, executables, and pre-compiled static/shared libraries to
the external subfolder of the Bazel cache (the same place third party deps are downloaded with
http_archive or similar functions in WORKSPACE.bazel). These will be able to be used via our
custom c++ toolchain configuration (see //toolchain/clang_toolchain_config.bzl)
"""
def _download_mac_m1_toolchain(ctx):
# TODO(jmbetancourt)
pass
# https://bazel.build/rules/repository_rules
download_mac_m1_toolchain = repository_rule(
implementation = _download_mac_m1_toolchain,
attrs = {},
doc = "Downloads clang, and all supporting headers, executables, " +
"and shared libraries required to build Skia on a Mac M1 host",
)
|
class DynamicOptions:
def alphad(self, value="", **kwargs):
"""Defines the mass matrix multiplier for damping.
APDL Command: ALPHAD
Parameters
----------
value
Mass matrix multiplier for damping.
Notes
-----
This command defines the mass matrix multiplier α used to form the
viscous damping matrix [C] = α[M] where [M] is the mass matrix.
Values of α may also be input as a material property (use the ALPD
label on the MP command). If ALPD is included, the ALPD value is added
to the ALPHAD value as appropriate (see Damping Matrices in the
Mechanical APDL Theory Reference). Damping is not used in the static
(ANTYPE,STATIC) or buckling (ANTYPE,BUCKLE) analyses.
This command is also valid in PREP7.
"""
command = f"ALPHAD,{value}"
return self.run(command, **kwargs)
def betad(self, value="", **kwargs):
"""Defines the stiffness matrix multiplier for damping.
APDL Command: BETAD
Parameters
----------
value
Stiffness matrix multiplier for damping.
Notes
-----
This command defines the stiffness matrix multiplier β used to form the
viscous damping matrix [C] = β [K] where [K] is the stiffness matrix.
Values of : β may also be input as a material property (use the BETD
label on the MP command). If BETD is included, the BETD value is added
to the BETAD value as appropriate (see Damping Matrices in the
Mechanical APDL Theory Reference). Damping is not used in the static
(ANTYPE,STATIC) or buckling (ANTYPE,BUCKLE) analyses.
This command is also valid in PREP7.
"""
command = f"BETAD,{value}"
return self.run(command, **kwargs)
def dmprat(self, ratio="", **kwargs):
"""Sets a constant modal damping ratio.
APDL Command: DMPRAT
Parameters
----------
ratio
Modal damping ratio (for example, 2% is input as 0.02).
Notes
-----
Sets a constant damping ratio for use in the mode-superposition
transient (ANTYPE,TRANS) or harmonic (ANTYPE,HARMIC) analysis and the
spectrum (ANTYPE,SPECTR) analysis.
This command is also valid in PREP7.
"""
command = f"DMPRAT,{ratio}"
return self.run(command, **kwargs)
def dmpstr(self, coeff="", **kwargs):
"""Sets a constant structural damping coefficient.
APDL Command: DMPSTR
Parameters
----------
coeff
Structural damping coefficient.
Notes
-----
Sets a constant structural (or hysteretic) damping coefficient for use
in harmonic (ANTYPE,HARMIC) analyses (FULL, MSUP, and VT) and modal
analyses (ANTYPE,MODAL with MODOPT,UNSYM, DAMP or QRDAMP).
Note that for structures with multiple materials, MP,DMPR can also be
used to specify constant structural material damping on a per material
basis. Note that if both DMPSTR and MP,DMPR are specified, the damping
effects are additive.
Caution:: : DMPSTR adds the damping contribution as gK, whereas MP,DMPR
adds the contribution on a per-material basis as 2gK. For more
information, see Damping Matrices in the Mechanical APDL Theory
Reference.
This command is also valid in PREP7.
"""
command = f"DMPSTR,{coeff}"
return self.run(command, **kwargs)
def frqscl(self, scaling="", **kwargs):
"""Turns on automatic scaling of the entire mass matrix and frequency
APDL Command: FRQSCL
range for modal analyses using the Block Lanczos, PCG Lanczos, or
Supernode mode extraction method.
Parameters
----------
scaling
Off - Do not use automatic scaling of the mass matrix and frequency range.
On - Use automatic scaling of the mass matrix and frequency range.
Notes
-----
Use this command to deactivate or force activation of automatic scaling
of the entire mass matrix and frequency range for modal analyses where
the entire mass matrix is significantly different (i.e., orders of
magnitude difference) than the entire stiffness matrix (for example,
due to the particular unit system being used). Where the mass matrix
is significantly smaller compared to the stiffness matrix, the
eigenvalues will tend to approach very large numbers (>10e12), making
the Block Lanczos, PCG Lanczos, or Supernode mode extraction method
less efficient and more likely to miss modes.
ANSYS uses scaling (if appropriate) by default. However, you can issue
FRQSCL,ON to force the entire mass matrix and frequency range to be
scaled to bring the stiffness and mass matrices closer together in
terms of orders of magnitude, improving efficiency and reducing the
likelihood of missed modes. The resulting eigenvalues are then
automatically scaled back to the original system. If you are using
micro MKS units, where the density is typically very small compared to
the stiffness, you may want to issue FRQSCL,ON to force scaling on.
If the stiffness and mass are on the same scale, FRQSCL,ON has no
effect.
This command is available only for modal analyses using the Block
Lanczos, PCG Lanczos, or Supernode mode extraction method (MODOPT,LANB,
LANPCG, or SNODE).
This command is not valid and has no effect when used in conjunction
with the MSAVE,ON command in a modal analysis with the PCG Lanczos mode
extraction method.
"""
command = f"FRQSCL,{scaling}"
return self.run(command, **kwargs)
def harfrq(self, freqb="", freqe="", logopt="", freqarr="", toler="", **kwargs):
"""Defines the frequency range in a harmonic analysis.
APDL Command: HARFRQ
Parameters
----------
freqb
Frequency (Hz) at the beginning of the FREQB to FREQE range (if
FREQE > FREQB). If FREQE is blank, the solution is done only at
frequency FREQB (the central frequency of octave bands, when LogOpt
= OB1, OB2, OB3, OB6, OB12 or OB24).
freqe
Frequency at end of this range. Solutions are done at an interval
of (FREQE-FREQB) / NSBSTP, ending at FREQE. No solution is done at
the beginning of the frequency range. NSBSTP is input via the
NSUBST command. See the EXPSOL command documentation for expansion
pass solutions.
--
Reserved.
logopt
Logarithm frequency span. Solutions are done at an interval of
(log(FREQE) - log(FREQB)) / (NSBSTP-1), (NSBSTP>1). The central
frequency or beginning frequency is used for NSBSTP = 1. Valid
values are:
OB1 - Octave band.
OB2 - 1/2 octave band.
OB3 - 1/3 octave band.
OB6 - 1/6 octave band.
OB12 - 1/12 octave band.
OB24 - 1/24 octave band.
LOG - General logarithm frequency span.
freqarr
An array containing frequency values (Hz). Combined with the
tolerance argument, Toler, these values are merged with values
calculated based on the specifications from FREQB, FREQE, and
LogOpt, as well NSBSTP on the NSUBST command and Clust on the HROUT
command. Enclose the array name in percent (%) signs (for example,
HARFRQ,,,,,%arrname%). Use ``*DIM`` to define the array.
toler
Tolerance to determine if a user input frequency value in FREQARR
is a duplicate and can be ignored. Two frequency values are
considered duplicates if their difference is less than the
frequency range multiplied by the tolerance. The default value is 1
x 10-5.
Notes
-----
Defines the frequency range for loads in the harmonic analysis
(ANTYPE,HARMIC).
Do not use this command for a harmonic ocean wave analysis (HROCEAN).
When frequencies are user-defined, the array FREQARR must be one-
dimensional and contain positive values. User-defined frequency input
is not supported in the following cases:
in a cyclic symmetry harmonic analysis
when the Variational Technology method is used (Method = VT on the
HROPT command)
This command is also valid in PREP7.
"""
command = f"HARFRQ,{freqb},{freqe},{logopt},{freqarr},{toler}"
return self.run(command, **kwargs)
def hrexp(self, angle="", **kwargs):
"""Specifies the phase angle for the harmonic analysis expansion pass.
APDL Command: HREXP
Parameters
----------
angle
Phase angle (degrees) for expansion pass. If ALL (default), use
both 0.0° (real) and 90.0° (imaginary) phase angles.
Notes
-----
Specifies the phase angle where the expansion pass will be done for a
harmonic mode-superposition expansion pass.
For a specific angle, the following real solution is stored in the
results (``*.rst``) file:
If ANGLE is ALL, both the real and imaginary parts of the solution are
stored in the results file.
For more details about the solution equations, see Harmonic Analyses in
the Mechanical APDL Theory Reference.
This command is ignored if the HROPT command has been issued with
Method = VT or Method = VTRU.
This command is also valid in PREP7.
"""
command = f"HREXP,{angle}"
return self.run(command, **kwargs)
def hrocean(self, type_="", nphase="", **kwargs):
"""Perform the harmonic ocean wave procedure (HOWP).
APDL Command: HROCEAN
Parameters
----------
type\_
Specifies how to include ocean wave information in a harmonic
analysis:
HARMONIC - Performs a harmonic analysis using both real and imaginary load vectors
calculated via the harmonic ocean wave procedure (HOWP).
This behavior is the default. This option performs a
harmonic analysis running at a frequency determined by
the wave period (specified via OCTABLE command input).
STATIC - Performs a static analysis using both real and imaginary load vectors
(calculated via HOWP). This option works by performing a
harmonic analysis running at a frequency of 0.0.
OFF - Deactivates a previously activated HOWP and performs a standard harmonic
analysis.
nphase
Positive number specifying the number of phases to calculate
forces. This value must be at least 8. The default value is 20.
Notes
-----
The HROCEAN command applies ocean wave information (obtained via the
OCDATA and OCTABLE commands) in a harmonic analysis (ANTYPE,HARMIC) as
real and imaginary forces.
You can apply only one ocean load at a time.
The applied frequency in the harmonic (Type = HARMONIC) analysis is
based on the wave period input on the OCTABLE command (and not on
HARFRQ command input, which cannot be used). Phase-shift input on the
OCTABLE command is ignored.
HOWP does not generate a damping matrix. If you require a damping
matrix, you must add it separately.
The command applies to regular wave types only (Airy with one wave
component, Wheeler with one wave component, Stokes, and stream
function). Irregular wave types are not supported. For information
about wave types, see Hydrodynamic Loads in the Mechanical APDL Theory
Reference.
The program calculates the forces on each load component of each
element at NPHASE solutions, spread evenly over one wave cycle. Then,
the minimum and maximum, and the phase between them, are calculated.
The command uses the resulting information to generate the real and
imaginary loads.
HOWP cannot be used with stress stiffening.
HOWP works with the full harmonic analysis method (HROPT,FULL) only.
For more information, see Harmonic Ocean Wave Procedure (HOWP) in the
Mechanical APDL Theory Reference.
This command is also valid in PREP7.
"""
command = f"HROCEAN,{type_},{nphase}"
return self.run(command, **kwargs)
def hropt(self, method="", maxmode="", minmode="", mcout="", damp="", **kwargs):
"""Specifies harmonic analysis options.
APDL Command: HROPT
Parameters
----------
method
Solution method for the harmonic analysis:
AUTO - Automatically select the most efficient method. Either the FULL method or the
Variational Technology method is selected depending on the
model. (default method).
FULL - Full method.
MSUP - Mode-superposition method.
VT - Variational Technology method (based on FULL harmonic algorithm).
VTPA - Variational Technology perfect absorber method (based on FULL harmonic
algorithm).
VTRU - Variational Technology reuse method (based on FULL harmonic algorithm).
maxmode
Largest mode number to be used to calculate the response (for
Method = MSUP only). Defaults to the highest mode calculated in
the preceding modal analysis.
minmode
Smallest mode number to be used (for Method = MSUP only). Defaults
to 1.
mcout
Modal coordinates output key (valid only for the mode superposition
method MSUP):
NO - No output of modal coordinates (default).
YES - Output modal coordinates to the text file jobname.MCF.
damp
Damping mode for frequency-dependent material properties (valid
only for the Variational Technology Method VT).
Hysteretic - Not proportional to the frequency.
Viscous - Proportional to the frequency (default).
Notes
-----
Specifies the method of solution for a harmonic analysis
(ANTYPE,HARMIC). If used in SOLUTION, this command is valid only
within the first load step. See the product restrictions indicated
below.
For cyclic symmetry mode-superposition harmonic solutions, MAXMODE and
MINMODE are ignored.
To include residual vectors in your mode-superposition harmonic
analysis, specify RESVEC,ON.
This command is also valid in PREP7.
Distributed ANSYS Restriction: The VTRU method is not supported.
"""
command = f"HROPT,{method},{maxmode},{minmode},{mcout},{damp}"
return self.run(command, **kwargs)
def hrout(self, reimky="", clust="", mcont="", **kwargs):
"""Specifies the harmonic analysis output options.
APDL Command: HROUT
Parameters
----------
reimky
Real/Imaginary print key:
ON - Print complex displacements as real and imaginary components (default).
OFF - Print complex displacements as amplitude and phase angle (degrees).
clust
Cluster option (for HROPT,MSUP):
OFF - Uniform spacing of frequency solutions (default).
ON - Cluster frequency solutions about natural frequencies.
mcont
Mode contributions key (for HROPT,MSUP):
OFF - No print of mode contributions at each frequency (default).
ON - Print mode contributions at each frequency.
Notes
-----
Specifies the harmonic analysis (ANTYPE,HARMIC) output options. If
used in SOLUTION, this command is valid only within the first load
step. OUTPR,NSOL must be specified to print mode contributions at each
frequency.
This command is ignored if the HROPT command has been issued with
Method = VT, VTPA, or VTRU. Displacements are not available at expanded
frequencies with these solution methods.
For cyclic symmetry mode-superposition harmonic solutions, the cluster
option is not available.
This command is also valid in PREP7.
"""
command = f"HROUT,{reimky},{clust},{mcont}"
return self.run(command, **kwargs)
def lvscale(self, fact="", ldstep="", **kwargs):
"""Scales the load vector for mode-superposition analyses.
APDL Command: LVSCALE
Parameters
----------
fact
Scale factor applied to both the real and imaginary (if they exist)
components of the load vector. Defaults to 0.0.
ldstep
Specifies the load step number from the modal analysis
(MODCONT,ON). It corresponds to the load vector number. Defaults to
1. The maximum value is 240.
Notes
-----
Specifies the scale factor for the load vector that was created in a
modal (ANTYPE,MODAL) analysis. Applies only to the mode-superposition
transient analysis (ANTYPE,TRANS), mode-superposition harmonic analysis
(ANTYPE,HARMIC), random vibration analysis (ANTYPE,SPECTR with
SPOPT,PSD), and multiple point response spectrum analysis
(ANTYPE,SPECTR with SPOPT,MPRS). For PSD and MPRS analyses, LVSCALE is
only applicable for pressure loading.
The LVSCALE command supports tabular boundary conditions (%TABNAME_X%)
for FACT input values only as a function of time in the mode-
superposition transient (ANTYPE,TRANS) or as a function of frequency in
mode-superposition harmonic (ANTYPE,HARMIC).
MPC contact generates constraint equations that can include constant
terms (included on the right-hand side of the system equation). The
LVSCALE command scales the constant terms.
In mode-superposition transient and harmonic analyses, all of the load
vectors need to be scaled in the first load step. Use a zero scale
factor if they are not actually used in this first load step. :
Similarly, in random vibration and multipoint response spectrum
analyses, all of the load vectors need to be scaled in the first
participation factor calculation (PFACT). : Use a zero scale factor if
they are not actually used for the first input table.
This command is also valid in PREP7.
"""
command = f"LVSCALE,{fact},{ldstep}"
return self.run(command, **kwargs)
def mascale(self, massfact="", **kwargs):
"""Activates scaling of the entire system matrix.
APDL Command: MASCALE
Parameters
----------
massfact
Scaling factor (> 0) for the mass matrix. Default = 1.0.
Notes
-----
This command is supported in the first load step of the analysis only.
The following features are not affected by the scaling:
* Ocean loading
* Steady-state rolling SSTATE
The mass-related information (mass, center of mass, and mass
moments of inertia) printed in the mass summary is based on
unscaled mass properties.
"""
return self.run(f"MASCALE,{massfact}", **kwargs)
def mdamp(self, stloc="", v1="", v2="", v3="", v4="", v5="", v6="", **kwargs):
"""Defines the damping ratios as a function of mode.
APDL Command: MDAMP
Parameters
----------
stloc
Starting location in table for entering data. For example, if
STLOC = 1, data input in the V1 field applies to the first constant
in the table. If STLOC = 7, data input in the V1 field applies to
the seventh constant in the table, etc. Defaults to the last
location filled + 1.
v1, v2, v3, . . . , v6
Data assigned to six locations starting with STLOC. If a value is
already in this location, it will be redefined. Blank values for
V2 to V6 leave the corresponding previous value unchanged.
Notes
-----
Defines the damping ratios as a function of mode. Table position
corresponds to mode number. These ratios are added to the DMPRAT
value, if defined. Use STAT command to list current values. Applies
to the mode-superposition harmonic (ANTYPE,HARMIC), the mode-
superposition linear transient dynamic (ANTYPE,TRANS), and the spectrum
(ANTYPE,SPECTR) analyses. Repeat MDAMP command for additional
constants (10000 maximum).
MDAMP can also be defined in a substructure analysis using component
mode synthesis with fixed-interface method (ANTYPE,SUBSTR with
CMSOPT,FIX and SEOPT,,,3). The damping ratios are added to the diagonal
of the reduced damping matrix as explained in Component Mode Synthesis
(CMS).
This command is also valid in PREP7.
"""
command = f"MDAMP,{stloc},{v1},{v2},{v3},{v4},{v5},{v6}"
return self.run(command, **kwargs)
def mdplot(self, function="", dmpname="", scale="", **kwargs):
"""Plots frequency-dependent modal damping coefficients calculated by
APDL Command: MDPLOT
DMPEXT.
Parameters
----------
function
Function to display.
d_coeff - Damping coefficient
s_coeff - Squeeze coefficient
d_ratio - Damping ratio
s_ratio - Squeeze stiffness ratio
dmpname
Array parameter name where damping information is stored. Defaults
to d_damp.
scale
Indicates whether to perform a linear or a double logarithmic plot.
LIN - Perform a linear plot. Default
LOG - Perform a double logarithmic plot.
Notes
-----
See Thin Film Analysis for more information on thin film analyses.
"""
command = f"MDPLOT,{function},{dmpname},{scale}"
return self.run(command, **kwargs)
def midtol(self, key="", tolerb="", resfq="", **kwargs):
"""Sets midstep residual criterion values for structural transient
APDL Command: MIDTOL
analyses.
Parameters
----------
key
Midstep residual criterion activation key.
ON or 1 - Activate midstep residual criterion in a structural transient analysis
(default).
OFF or 0 - Deactivate midstep residual criterion in a structural transient analysis.
STAT - List the current midstep residual criterion setting.
tolerb
Midstep residual tolerance or reference value for bisection.
Defaults to 100 times the TOLER setting of the CNVTOL command.
resfq
Key to use response frequency computation along with midstep
residual criterion for automatic time stepping (AUTOTS,ON).
OFF or 0 - Do not calculate response frequency and do not consider it in the automatic
time stepping (default).
ON or 1 - Calculate response frequency and consider it in the automatic time stepping.
Notes
-----
When TOLERB is input as a tolerance value (TOLERB > 0), the typical
force and/or moment from the regular time step is used in the midstep
residual force and/or moment comparison.
In a structural transient analysis, the suggested tolerance range of
TOLERB (TOLERB > 0) is as follows:
If the structural transient analysis is elastic and linear, and the
load is constant or changes slowly, use a smaller value of TOLERB to
achieve an accurate solution. If the analysis involves large amounts of
energy dissipation, such as elastic-plastic material, TOLERB can be
larger. If the analysis includes contact or rapidly varying loads, a
smaller value of TOLERB should be used if high frequency response is
important; otherwise, a larger value of TOLERB may be used to enable
faster convergence with larger time step sizes.
For more information on how the midstep criterion is used by the
program, see Midstep Residual for Structural Dynamic Analysis in the
Mechanical APDL Theory Reference.
This command is also valid in PREP7.
"""
command = f"MIDTOL,{key},{tolerb},{resfq}"
return self.run(command, **kwargs)
def modcont(self, mlskey="", enforcedkey="", **kwargs):
"""Specify additional modal analysis options.
APDL Command: MODCONT
Parameters
----------
mlskey
Multiple load step key:
OFF - Perform the modal analysis (compute the eigenvalues and the load vector) for
each load step. (default)
ON - Perform the modal analysis (compute the eigenvalues and the load vector) only
for the first load step; form the load vector for each
subsequent load step (without repeating the eigenvalue
calculations) and write all load vectors to the Jobname.MODE
file for downstream mode-superposition analyses.
enforcedkey
Enforced motion key:
OFF - Do not calculate enforced static modes. (default)
ON - Calculate enforced static modes and write them to the Jobname.MODE file.
Notes
-----
Specifies additional modal analysis (ANTYPE,MODAL) options.
Use the LVSCALE command to apply the desired load in a mode-
superposition transient or harmonic analysis.
The maximum number of load vectors that can be used in the downstream
mode-superposition transient or harmonic analysis is: 240.
Generation of multiple loads (MLSkey = ON) is supported by the Block
Lanczos, PCG Lanczos, Supernode, Subspace, Unsymmetric, and QR damped
modal methods.
The enforced motion calculation (EnforcedKey = ON) is supported by the
Block Lanczos and Supernode mode extraction methods.
"""
command = f"MODCONT,{mlskey},{enforcedkey}"
return self.run(command, **kwargs)
def modseloption(
self, dir1="", dir2="", dir3="", dir4="", dir5="", dir6="", **kwargs
):
"""APDL Command: MODSELOPTION
Parameters
----------
dir1, dir2, dir3, dir4, dir5, dir6
Selection of the direction to be expanded.
For ``modeselmethod=effm`` on the MXPAND command, the
directions correspond to the global Cartesian directions,
i.e. 1=X, 2=Y, 3=Z, 4=ROTX, 5=ROTY, and 6=ROTZ. If dir1 = YES,
then any mode in this direction is expanded if its modal
effective mass divided by the total mass (modal effective mass
ratio) is greater than SIGNIF on the MXPAND command. If
dir1=NO, then the specified direction is not considered as a
criterion for expansion. If dir1 is given a numerical decimal
value, modes in that direction are selected (starting from the
ones with the largest modal effective mass ratios to the
smallest) until the sum of their modal effective mass ratio
equals this requested threshold.
For ModeSelMethod = MODC on the MXPAND command, dir1
corresponds to the first input spectrum, dir2 to the second,
etc. (i.e. for multiple spectrum inputs; the actual directions
correspond to their respective SED directions). If dir1=YES,
then any mode in this spectrum is ex- panded if its mode
coefficient divided by the largest mode coefficient is greater
than SIGNIF on the MXPAND command. If dir1=NO, then the
specified direction is not considered as a criterion for
expansion.
Notes
-----
This command is only applicable when a mode selection method is defined
(ModeSelMethod on the MXPAND command). See Using Mode Selection in the
Mechanical APDL Structural Analysis Guide for more details.
If a numerical value is specified for a direction, the significance
threshold (SIGNIF on the MXPAND command) is ignored for the selection
of the modes in this direction.
If a mode is determined to be expanded in any of the 6 directions, it
will be expanded in the .MODE file. : Otherwise, the mode will not be
expanded.
The default behavior is to consider all directions for expansion.
"""
command = f"MODSELOPTION,{dir1},{dir2},{dir3},{dir4},{dir5},{dir6}"
return self.run(command, **kwargs)
def modopt(
self,
method="",
nmode="",
freqb="",
freqe="",
cpxmod="",
nrmkey="",
modtype="",
blocksize="",
freqmod="",
**kwargs,
):
"""Specifies modal analysis options.
APDL Command: MODOPT
Parameters
----------
method
Mode-extraction method to be used for the modal analysis.
LANB - Block Lanczos
LANPCG - PCG Lanczos
SNODE - Supernode modal solver
SUBSP - Subspace algorithm
UNSYM - Unsymmetric matrix
DAMP - Damped system
QRDAMP - Damped system using QR algorithm
VT - Variational Technology
nmode
The number of modes to extract. The value can depend on the value
supplied for Method. NMODE has no default and must be specified. If
Method = LANB, LANPCG, or SNODE, the number of modes that can be
extracted can equal the DOFs in the model after the application of
all boundary conditions.
freqb
The beginning, or lower end, of the frequency range of interest.
freqe
The ending, or upper end, of the frequency range of interest (in
Hz). The default for Method = SNODE is described below. The default
for all other methods is to calculate all modes, regardless of
their maximum frequency.
cpxmod
Complex eigenmode key. (Valid only when Method = QRDAMP or Method =
UNSYM).
AUTO - Determine automatically if the eigensolutions are real or complex and output
them accordingly. This is the default for Method = UNSYM.
Not supported for Method = QRDAMP.
ON or CPLX - Calculate and output complex eigenmode shapes.
OFF or REAL - Do not calculate complex eigenmode shapes. This is required if a mode-
superposition analysis is intended after the modal
analysis for Method = QRDAMP. This is the default for
this method.
nrmkey
Mode shape normalization key:
OFF - Normalize the mode shapes to the mass matrix (default).
ON - Normalize the mode shapes to unity instead of to the mass matrix. If a
subsequent spectrum or mode-superposition analysis is planned,
the mode shapes should be normalized to the mass matrix
(Nrmkey = OFF).
modtype
Type of modes calculated by the eigensolver. Only applicable to the
unsymmetric eigensolver.
Blank - Right eigenmodes. This value is the default.
BOTH - Right and left eigenmodes. The left eigenmodes are written to Jobname.LMODE.
This option must be activated if a mode-superposition
analysis is intended.
blocksize
The block vector size to be used with the Block Lanczos or
Subspace eigensolver (used only when Method = LANB or
SUBSP). BlockSize must be an integer value between 0 and
16. When BlockSize = zero or blank, the code decides the
block size internally (normally, a value of 8 is used for
LANB and a value of 6 is used for SUBSP). Typically,
higher BlockSize values are more efficient under each of
the following conditions:
- When running in out-of-core mode and there is not enough
physical memory to buffer all of the files written by
the Block Lanczos or Subspace eigensolver (and thus, the
time spent doing I/O is considerable).
- Many modes are requested (>100).
- Higher-order solid elements dominate the model.
The memory usage only slightly increases as BlockSize is
increased. It is recommended that you use a value
divisible by 4 (4, 8, 12, or 16).
freqmod
The specified frequency when the solved eigenvalues are no
longer frequencies (for example, the model has the Floquet
periodic boundary condition). In a modal analysis, the
Floquet periodic boundary condition (body load FPBC) is
only valid for the acoustic elements FLUID30, FLUID220,
and FLUID221.
Notes
-----
Specifies modal analysis (ANTYPE,MODAL) options. Additional options
used only for the Supernode (SNODE) eigensolver are specified by the
SNOPTION command. Additional options used only for the Subspace (SUBSP)
eigensolver are specified by the SUBOPT command. If Method = LANPCG,
ANSYS automatically switches to the PCG solver internally for this
modal analysis. You can further control the efficiency of the PCG
solver with the PCGOPT and EQSLV commands.
For models that involve a non-symmetric element stiffness matrix, as in
the case of a contact element with frictional contact, the QRDAMP
eigensolver (MODOPT, QRDAMP) extracts modes in the modal subspace
formed by the eigenmodes from the symmetrized eigenproblem. The QRDAMP
eigensolver symmetrizes the element stiffness matrix on the first pass
of the eigensolution, and in the second pass, eigenmodes are extracted
in the modal subspace of the first eigensolution pass. For such non-
symmetric eigenproblems, you should verify the eigenvalue and eigenmode
results using the non-symmetric matrix eigensolver (MODOPT,UNSYM).
The DAMP and QRDAMP options cannot be followed by a subsequent spectrum
analysis. The UNSYM method supports spectrum analysis when
eigensolutions are real.
This command is also valid in PREP7.
Distributed ANSYS Restriction: The VT extraction method is not
supported in Distributed ANSYS. All other extraction methods are
supported. However, PCG Lanczos, SUBSP, UNSYM, DAMP, and QRDAMP are the
only distributed eigensolvers that will run a fully distributed
solution. The Block Lanczos and Supernode eigensolvers are not
distributed eigensolvers; therefore, you will not see the full
performance improvements with these methods that you would with a fully
distributed solution.
"""
command = f"MODOPT,{method},{nmode},{freqb},{freqe},{cpxmod},{nrmkey},{modtype},{blocksize},,,,{freqmod}"
return self.run(command, **kwargs)
def mxpand(
self,
nmode="",
freqb="",
freqe="",
elcalc="",
signif="",
msupkey="",
modeselmethod="",
**kwargs,
):
"""Specifies the number of modes to expand and write for a modal or
APDL Command: MXPAND
buckling analysis.
Parameters
----------
nmode
Number of modes or array name (enclosed in percent signs) to expand
and write. If blank or ALL, expand and write all modes within the
frequency range specified. If -1, do not expand and do not write
modes to the results file during the analysis. If an array name is
input, the array must contain 1 for the expanded modes and zero
otherwise, where the array index corresponds to the mode number. To
specify an array containing the individual modes to expand, enclose
the array name in percent (%) signs (for example,
MXPAND,%arrname%). Use the ``*DIM`` command to define the array.
freqb
Beginning, or lower end, of frequency range of interest. If FREQB
and FREQE are both blank, expand and write the number of modes
specified without regard to the frequency range. Defaults to the
entire range.
freqe
Ending, or upper end, of frequency range of interest.
elcalc
Element calculation key:
NO - Do not calculate element results, reaction forces, and energies (default).
YES - Calculate element results, reaction forces, energies, and the nodal degree of
freedom solution.
signif
Expand only those modes whose significance level exceeds the SIGNIF
threshold (only applicable when ModeSelMethod is defined).
msupkey
Element result superposition key:
NO - Do not write element results to the mode file Jobname.MODE.
YES - Write element result to the mode file for use in the expansion pass of a
subsequent mode-superposition PSD, transient, or harmonic
analysis (default if Elcalc = YES and the mode shapes are
normalized to the mass matrix).
modeselmethod
Methods for mode selection (not supported for complex
eigensolvers):
blank - No mode selection is performed (default).
MODM - The mode selection is based on the modal effective masses.
MODC - The mode selection is based on the mode coefficients.
DDAM - The mode selection is based on DDAM procedure (see Mode Selection Based on DDAM
Procedure in the Mechanical APDL Structural Analysis Guide
for more information). This option is applicable only to
DDAM spectrum analysis.
Notes
-----
Specifies the number of modes to expand and write over a frequency
range for a modal (ANTYPE,MODAL) or buckling (ANTYPE,BUCKLE) analysis.
If used in SOLUTION, this command is valid only within the first load
step.
There is no limit on the number of expanded modes (NMODE). However,
there is a limit on the maximum number of modes used via the ``*GET,,MODE``
command, mode combinations, and the MDAMP command.
With MSUPkey = YES, the computed element results (Elcalc = YES) are
written to Jobname.MODE for use in subsequent downstream mode-
superposition analyses, including harmonic, transient, and PSD
analyses. This significantly reduces computation time for the
combination or expansion passes. For limitations, see Option: Number of
Modes to Expand (MXPAND) in the Mechanical APDL Structural Analysis
Guide.
If a mode selection method (ModeSelMethod) is defined, only the
selected modes will be expanded. See Using Mode Selection in the
Mechanical APDL Structural Analysis Guide for more details about the
procedure.
For array input (NMODE), the array must be dimensioned to be the size
of the number of modes extracted (NMODE on the MODOPT command). A value
of 1 in the array indicates the mode is to be expanded, and a value of
0 indicates not to expand the mode. For the DAMP modal solution, the
modes are in pairs, so be sure to verify that both modes of a pair have
the same value. (For example, if modes #3 and #4 are a pair, indices 3
and 4 in the array should have the same value, 0 or 1.)
For linear perturbation modal analyses, you must set both Elcalc and
MSUPkey to YES so that the downstream stress expansion pass can produce
a solution consistent with the linear or nonlinear base (static or full
transient) analysis. The prestressed nonlinear element history (saved
variables) is accessible only in the first and second phases of the
linear perturbation. The downstream MSUP or PSD analysis can only reuse
the nonlinear information contained in the Jobname.MODE file that is
generated in the linear perturbation.
In a Distributed ANSYS analysis, you must issue MXPAND to specify the
number of modes to expand when computing the modes and mode shapes. In
a Distributed ANSYS run, MXPAND cannot be issued in an expansion pass
(EXPASS).
This command is also valid in PREP7.
"""
command = f"MXPAND,{nmode},{freqb},{freqe},{elcalc},{signif},{msupkey},{modeselmethod}"
return self.run(command, **kwargs)
def qrdopt(self, reusekey="", symmeth="", cmccoutkey="", **kwargs):
"""Specifies additional QRDAMP modal analysis options.
APDL Command: QRDOPT
Parameters
----------
reusekey
Reuse key for method=QRDAMP specified in MODOPT command.
ON - Reuse the symmetric eigensolution from the previous
load steps or from the previous solution.
OFF - Do not reuse (calculates symmetric eigensolution at
current load step). This is the default.
symmeth
Mode-extraction method to be used for the symmetric eigenvalue
problem.
LANB - Block Lanczos (default for shared-memory parallel processing).
SUBSP - Subspace algorithm (default for distributed-memory
parallel processing).
cmccoutkey
Complex Modal Contribution Coefficients (CMCC) output
key. See Calculate the Complex Mode Contribution
Coefficients (CMCC) in the Structural Analysis Guide for
details and usage.
ON - Output the CMCC to the text file Jobname.CMCC.
OFF - Do not output the CMCC. This is the default.
Notes
-----
If the filename.modesym file exists in the working directory
and ReuseKey = ON, filename.modesym will be reused. If
filename.modesym does not exist in the working directory, the
symmetric eigensolution will be calculated.
When ReuseKey=ON, both the new modal analysis
(filename.modesym usage) and the preceding modal analysis
(filename.modesym generation) must be performed using the same
product version number.
The mode-extraction method changes depending on the type of
parallelism involved. For performance reasons, the subspace
method is used with distributed-memory parallel processing
(Distributed ANSYS) runs, while the Block Lanczos method is
used with shared-memory parallel processing runs.
"""
return self.run(f"QRDOPT,{reusekey},,,{symmeth},{cmccoutkey}", **kwargs)
def rigid(self, dof1="", dof2="", dof3="", dof4="", dof5="", dof6="", **kwargs):
"""Specifies known rigid body modes (if any) of the model.
APDL Command: RIGID
Parameters
----------
dof1, dof2, dof3, . . . , dof6
Up to six global Cartesian directions of the rigid modes.
For a completely free 2-D model, use ALL or UX, UY, ROTZ.
For a completely free 3-D model, use ALL or UX, UY, UZ,
ROTX, ROTY, ROTZ. For a constrained model, use UX, UY,
UZ, ROTX, ROTY, or ROTZ, as appropriate, to specify each
and every unconstrained direction which exists in the
model (not specifying every direction may cause
difficulties in extracting the modes).
Notes
-----
Specifies known rigid body modes (if any) of the model. This
command applies only to a component mode synthesis (CMS)
analysis (see the CMSOPT command). Any rigid body modes
specified must be permitted by the applied displacement
constraints (i.e., do not specify a rigid body mode in a
constrained direction). Reissue the command to redefine the
specification. If used in SOLUTION, this command is valid
only within the first load step.
This command is also valid in PREP7.
Distributed ANSYS Restriction: This command is not supported in
Distributed ANSYS.
"""
command = f"RIGID,{dof1},{dof2},{dof3},{dof4},{dof5},{dof6}"
return self.run(command, **kwargs)
def subopt(self, option="", value1="", **kwargs):
"""Specifies Subspace (SUBSP) eigensolver options.
APDL Command: SUBOPT
Parameters
----------
option
One of the following options:
STRMCK - Controls whether a Sturm sequence check is performed.
Value1: - OFF
Do not perform Sturm sequence check (default). - ON
Perform Sturm sequence check. - MEMORY
Controls the memory allocation strategy for the Subspace eigensolver. - Value1:
AUTO - Use the default memory allocation strategy (default).
INCORE - Force the Subspace eigensolver to allocate in-core memory.
OUTOFCORE - Force the Subspace eigensolver to use scratch files.
Notes
-----
SUBOPT specifies options to be used with the Subspace eigensolver
(MODOPT,SUBSP) during a modal analysis.
"""
command = f"SUBOPT,{option},{value1}"
return self.run(command, **kwargs)
def timint(self, key="", lab="", **kwargs):
"""Turns on transient effects.
APDL Command: TIMINT
Parameters
----------
key
Transient effects key:
OFF - No transient effects (static or steady-state).
ON - Include transient (mass or inertia) effects.
lab
Degree of freedom label:
ALL - Apply this key to all appropriate labels (default).
STRUC - Apply this key to structural DOFs.
THERM - Apply this key to thermal DOFs.
ELECT - Apply this key to electric DOFs.
MAG - Apply this key to magnetic DOFs.
FLUID - Apply this key to fluid DOFs.
DIFFU - Apply this key to concentration of DOFs.
Notes
-----
Indicates whether this load step in a full transient analysis should
use time integration, that is, whether it includes transient effects
(e.g. structural inertia, thermal capacitance) or whether it is a
static (steady-state) load step for the indicated DOFs. Transient
initial conditions are introduced at the load step having Key = ON.
Initial conditions are then determined from the previous two substeps.
Zero initial velocity and acceleration are assumed if no previous
substeps exist. See the Structural Analysis Guide, the Thermal
Analysis Guide, and the Low-Frequency Electromagnetic Analysis Guide
for details.
This command is also valid in PREP7.
"""
command = f"TIMINT,{key},{lab}"
return self.run(command, **kwargs)
def tintp(
self,
gamma="",
alpha="",
delta="",
theta="",
oslm="",
tol="",
avsmooth="",
alphaf="",
alpham="",
**kwargs,
):
"""Defines transient integration parameters.
APDL Command: TINTP
Parameters
----------
gamma
Amplitude decay factor for 2nd order transient integration, e.g.,
structural dynamics (used only if ALPHA, DELTA, ALPHAF, and ALPHAM
are blank). Defaults to 0.005.
alpha
2nd order transient integration parameter (used only if GAMMA is
blank). Defaults to 0.2525.
delta
2nd order transient integration parameter (used only if GAMMA is
blank). Defaults to 0.5050.
theta
1st order transient (e.g., thermal transient) integration
parameter. Defaults to 1.0.
oslm
Specifies the oscillation limit criterion for automatic time
stepping of 1st order transients (e.g., thermal transients).
Defaults to 0.5 with a tolerance of TOL.
tol
Tolerance applied to OSLM. Defaults to 0.0.
avsmooth
Smoothing flag option:
0 - Include smoothing of the velocity (1st order system)
or the acceleration (2nd order system) (default).
1 - Do not include smoothing.
alphaf
Interpolation factor in HHT algorithm for force and damping terms
(used only if GAMMA is blank). Defaults to 0.005.
alpham
Interpolation factor in HHT algorithm for inertial term (used only
if GAMMA is blank). Defaults to 0.0.
Notes
-----
Used to define the transient integration parameters. For more
information on transient integration parameters, refer to the
Mechanical APDL Theory Reference.
For structural transient analyses, you may choose between the
Newmark and HHT time integration methods (see the TRNOPT
command). In this case, if GAMMA is input and the integration
parameters ALPHA, DELTA, ALPHAF, and ALPHAM are left blank,
the program will calculate the integration
parameters. Alternatively, you can input these integration
parameters directly on this command. However, for the
unconditional stability and second order accuracy of the time
integration, these parameters should satisfy a specific
relationship, as described in Description of Structural and
Other Second Order Systems of the Mechanical APDL Theory
Reference.
In a transient piezoelectric analysis, required input for this
command is ALPHA = 0.25, DELTA = 0.5, and THETA = 0.5. For a
coupled electromagnetic-circuit transient analysis, use THETA
= 1.0, the default value, to specify the backward Euler
method.
This command is also valid in PREP7.
"""
command = f"TINTP,{gamma},{alpha},{delta},{theta},{oslm},{tol},,,{avsmooth},{alphaf},{alpham}"
return self.run(command, **kwargs)
def trnopt(
self,
method="",
maxmode="",
minmode="",
mcfwrite="",
tintopt="",
vaout="",
dmpsfreq="",
engcalc="",
mckey="",
**kwargs,
):
"""Specifies transient analysis options.
APDL Command: TRNOPT
Parameters
----------
method
Solution method for the transient analysis:
FULL - Full method (default).
MSUP - Mode-superposition method.
VT - Variational Technology method. (Removed by V18.2)
maxmode
Largest mode number to be used to calculate the response
(for Method = MSUP). Defaults to the highest mode
calculated in the preceding modal analysis.
minmode
Smallest mode number to be used (for Method = MSUP).
Defaults to 1.
mcfwrite
Modal coordinates output key to the .mcf file (valid only
for the mode-superposition method):
NO - No output of modal coordinates (default).
YES - Output modal coordinates to the text file Jobname.MCF.
tintopt
Time integration method for the transient analysis:
NMK or 0 - Newmark algorithm (default).
HHT or 1 - HHT algorithm (valid only for the full transient method).
vaout
Velocities and accelerations output key (valid only for
mode- superposition transient analysis):
NO - No output of velocities and accelerations (default).
YES - Write velocities and accelerations on the reduced displacement file
Jobname.RDSP.
dmpsfreq
Average excitation frequency (Hz) for the calculation of
equivalent viscous damping from structural damping input
(DMPSTR and MP,DMPS). See Damping for more
details. Defaults to zero. If an excitation frequency is
not specified, structural damping is ignored. If tabular
excitation frequency data is provided in a full transient
analysis (DMPSFreqTab on DMPSTR), it supersedes this
value.
engcalc
Additional element energies calculation key:
NO - Do not calculate additional element energies
(default).
YES - Calculate damping energy and work done by external
loads.
mckey
Modal coordinates output key to the .rdsp file (valid only
for the mode-superposition method):
AUTO - Writing depends on the modal analysis settings of
the MXPAND command (default).
YES - Always write the modal coordinates to the file
Jobname.rdsp. A subsequent expansion pass (EXPASS) is not
supported.
Notes
-----
Specifies transient analysis (ANTYPE,TRANS) options. If used
in SOLUTION, this command is valid only within the first load
step. Use the TINTP command to set transient integration
parameters.
To include residual vectors in your mode-superposition
transient analysis (Method = MSUP), specify RESVEC,ON.
Method = MSUP is not available for ocean loading.
By default in a mode-superposition transient analysis,
reaction force and other force output contains only static
contributions. If you want to postprocess the velocities,
accelerations, and derived results (Lab = TOTAL, DAMP, or
INERT on the FORCE command), set VAout = YES to activate
velocity and acceleration output.
The calculation of additional energies (EngCalc = YES) is
valid only for the full solution method (Method = FULL). The
Jobname.ESAV file is always saved in this case. The numerical
integration for damping energy and work are consistent only if
solution data are written to the database for every substep
(OUTRES,ALL,ALL, OUTRES,ESOL,ALL, or OUTRES,VENG, ALL). For
more information, see Damping Energy and Work Done by External
Loads in the Mechanical APDL Theory Reference.
This command is also valid in PREP7.
"""
command = f"TRNOPT,{method},{maxmode},,{minmode},{mcfwrite},{tintopt},{vaout},{dmpsfreq},{engcalc},{mckey}"
return self.run(command, **kwargs)
|
class DefinitionGroups(object, IEnumerable[DefinitionGroup], IEnumerable, IDisposable):
""" A specialized set of definition groups that allows creation of new groups. """
def Contains(self, definitionGroup):
"""
Contains(self: DefinitionGroups,definitionGroup: DefinitionGroup) -> bool
Tests for the existence of a definition group within the collection.
definitionGroup: The definition group to look for.
Returns: True if the definition group was found,false otherwise.
"""
pass
def Create(self, name):
"""
Create(self: DefinitionGroups,name: str) -> DefinitionGroup
Create a new parameter definition group using the name provided.
name: The name of the group to be created.
Returns: If successful a reference to the new parameter group is returned,otherwise ll.
"""
pass
def Dispose(self):
""" Dispose(self: DefinitionGroups) """
pass
def GetEnumerator(self):
"""
GetEnumerator(self: DefinitionGroups) -> IEnumerator[DefinitionGroup]
Retrieves an enumerator to the collection.
Returns: The enumerator.
"""
pass
def __contains__(self, *args):
""" __contains__[DefinitionGroup](enumerable: IEnumerable[DefinitionGroup],value: DefinitionGroup) -> bool """
pass
def __enter__(self, *args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __getitem__(self, *args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args):
""" __iter__(self: IEnumerable) -> object """
pass
def __repr__(self, *args):
""" __repr__(self: object) -> str """
pass
IsEmpty = property(lambda self: object(), lambda self, v: None, lambda self: None)
"""Identifies if the definition groups collection is empty.
Get: IsEmpty(self: DefinitionGroups) -> bool
"""
Size = property(lambda self: object(), lambda self, v: None, lambda self: None)
"""The number of definition groups in the collection.
Get: Size(self: DefinitionGroups) -> int
"""
|
def solution():
data = open(r'inputs\day11.in').readlines()
print('Part 1 result: ' + str(part1(data)))
print('Part 2 result: ' + str(part2(data)))
def part1(data):
# 2d array of ints that represents the octopus' current value
octomap = []
for line in data:
octomap.append([int(x) for x in line.strip()])
adj = {
# up one row all 3 spots
(-1, 1),
(-1, 0),
(-1, -1),
# same row, left and right spots
(0, -1),
(0, 1),
# down 1 row, all 3 spots
(1, -1),
(1, 0),
(1, 1)
}
# track the number of flashes
flash_count = 0
rows = len(octomap)
cols = len(octomap[0])
# 100 steps for p1
for step in range(100):
# get the new octomap by adding 1 to each value
octomap = [[x + 1 for x in row] for row in octomap]
# our stack is a list which consists of a tuple of the row and column of each value that is greater than 9, a.k.a the spots a flash should occur at
stack = [(row, col) for row in range(rows) for col in range(cols) if octomap[row][col] > 9]
# while we have stuff in the stack
while stack:
# pop off the top value into our row and column variables
row, col = stack.pop()
# increment our flash count
flash_count += 1
# loop through the neighbors of the popped value
for dx, dy in adj:
if 0 <= row + dx < rows and 0 <= col + dy < cols:
# if its a valid neighbor, add 1 to it, and if it is now a 10, add it to the stack
octomap[row + dx][col + dy] += 1
if octomap[row + dx][col + dy] == 10:
stack.append((row + dx, col + dy))
# at the end of each step, set all values greater than 9 to 0, because they flashed that step
octomap = [[0 if x > 9 else x for x in row] for row in octomap]
return flash_count
def part2(data):
# 2d array of ints of the octopus' current value
octomap = []
for line in data:
octomap.append([int(x) for x in line.strip()])
adj = {
# up one row all 3 spots
(-1, 1),
(-1, 0),
(-1, -1),
# same row, left and right spots
(0, -1),
(0, 1),
# down 1 row, all 3 spots
(1, -1),
(1, 0),
(1, 1)
}
rows = len(octomap)
cols = len(octomap[0])
# similar setup to part 1, but now we track the number of steps outside the loop and use an infinite while loop, since we don't know when the synchronized flash will occur
step = 0
while True:
step += 1
octomap = [[x + 1 for x in row] for row in octomap]
stack = [(row, col) for row in range(rows) for col in range(cols) if octomap[row][col] > 9]
# count the number of flashes in the current step
step_flashes = 0
while stack:
row, col = stack.pop()
# each time we pop off the stack, we are doing a flash, so increment our flash count
step_flashes += 1
for dx, dy in adj:
if 0 <= row + dx < rows and 0 <= col + dy < cols:
octomap[row + dx][col + dy] += 1
if octomap[row + dx][col + dy] == 10:
stack.append((row + dx, col + dy))
octomap = [[0 if x > 9 else x for x in row] for row in octomap]
# if we have 100 flashes this step, then all of the octopi have flashed, because there is a 10x10 grid of octopi, and each octopus flashes at most once per step
if step_flashes == 100:
# so return the step we are on as they are synchronized here
return step
solution() |
{
"variables": {
"boost_lib": "<!(node -p \"process.env.BOOST_LIB || '../../deps/boost/stage/lib'\")",
"boost_dir": "<!(node -p \"process.env.BOOST_DIR || 'boost'\")",
"conditions": [
["target_arch=='x64'", {
"arch": "x64",
}],
["target_arch=='ia32'", {
"arch": "x32",
}],
],
},
"target_defaults": {
"libraries": [
"Shlwapi.lib",
"Version.lib",
"<(boost_lib)/libboost_atomic-vc141-mt-s-<(arch)-1_67.lib",
"<(boost_lib)/libboost_atomic-vc141-mt-sgd-<(arch)-1_67.lib",
"<(boost_lib)/libboost_chrono-vc141-mt-s-<(arch)-1_67.lib",
"<(boost_lib)/libboost_chrono-vc141-mt-sgd-<(arch)-1_67.lib",
"<(boost_lib)/libboost_context-vc141-mt-s-<(arch)-1_67.lib",
"<(boost_lib)/libboost_context-vc141-mt-sgd-<(arch)-1_67.lib",
"<(boost_lib)/libboost_coroutine-vc141-mt-s-<(arch)-1_67.lib",
"<(boost_lib)/libboost_coroutine-vc141-mt-sgd-<(arch)-1_67.lib",
"<(boost_lib)/libboost_date_time-vc141-mt-s-<(arch)-1_67.lib",
"<(boost_lib)/libboost_date_time-vc141-mt-sgd-<(arch)-1_67.lib",
"<(boost_lib)/libboost_filesystem-vc141-mt-s-<(arch)-1_67.lib",
"<(boost_lib)/libboost_filesystem-vc141-mt-sgd-<(arch)-1_67.lib",
"<(boost_lib)/libboost_locale-vc141-mt-s-<(arch)-1_67.lib",
"<(boost_lib)/libboost_locale-vc141-mt-sgd-<(arch)-1_67.lib",
"<(boost_lib)/libboost_log-vc141-mt-s-<(arch)-1_67.lib",
"<(boost_lib)/libboost_log-vc141-mt-sgd-<(arch)-1_67.lib",
"<(boost_lib)/libboost_log_setup-vc141-mt-s-<(arch)-1_67.lib",
"<(boost_lib)/libboost_log_setup-vc141-mt-sgd-<(arch)-1_67.lib",
"<(boost_lib)/libboost_regex-vc141-mt-s-<(arch)-1_67.lib",
"<(boost_lib)/libboost_regex-vc141-mt-sgd-<(arch)-1_67.lib",
"<(boost_lib)/libboost_system-vc141-mt-s-<(arch)-1_67.lib",
"<(boost_lib)/libboost_system-vc141-mt-sgd-<(arch)-1_67.lib",
"<(boost_lib)/libboost_thread-vc141-mt-s-<(arch)-1_67.lib",
"<(boost_lib)/libboost_thread-vc141-mt-sgd-<(arch)-1_67.lib"
],
},
"targets": [
# build shared
{
"target_name": "shared",
"type": "static_library",
"dependencies": [
"./usvfs_deps.gyp:fmt",
"./usvfs_deps.gyp:spdlog"
],
"defines": [
"_WIN64",
"SPDLOG_NO_NAME",
"SPDLOG_NO_REGISTRY_MUTEX",
"NOMINMAX",
"_WINDOWS",
"NDEBUG",
"BOOST_CONFIG_SUPPRESS_OUTDATED_MESSAGE"
],
"include_dirs": [
"usvfs/src/shared",
"usvfs/include",
"<(boost_dir)",
"usvfs/fmt",
"usvfs/spdlog/include/spdlog"
],
"sources": [
"usvfs/src/shared/addrtools.cpp",
"usvfs/src/shared/debug_monitor.cpp",
"usvfs/src/shared/directory_tree.cpp",
"usvfs/src/shared/exceptionex.cpp",
"usvfs/src/shared/loghelpers.cpp",
"usvfs/src/shared/ntdll_declarations.cpp",
"usvfs/src/shared/scopeguard.cpp",
"usvfs/src/shared/shmlogger.cpp",
"usvfs/src/shared/stringcast_win.cpp",
"usvfs/src/shared/stringutils.cpp",
"usvfs/src/shared/test_helpers.cpp",
"usvfs/src/shared/unicodestring.cpp",
"usvfs/src/shared/wildcard.cpp",
"usvfs/src/shared/winapi.cpp",
"usvfs/src/shared/windows_error.cpp"
],
"configurations": {
"Release": {
"msvs_settings": {
"VCCLCompilerTool": {
"ExceptionHandling": 1,
"RuntimeTypeInfo": "true"
}
},
"msvs_configuration_attributes": {
"CharacterSet": 1
},
"msbuild_toolset": "v141",
"msvs_windows_target_platform_version": "10.0.16299.0"
}
}
},
# build thooklib
{
"target_name": "thooklib",
"type": "static_library",
"dependencies": [
"./usvfs_deps.gyp:asmjit",
"shared",
"./usvfs_deps.gyp:spdlog"
],
"defines": [
"_WIN64",
"ASMJIT_STATIC",
"SPDLOG_NO_NAME",
"SPDLOG_NO_REGISTRY_MUTEX",
"NOMINMAX",
"_WINDOWS",
"NDEBUG",
"BOOST_CONFIG_SUPPRESS_OUTDATED_MESSAGE"
],
"include_dirs": [
"usvfs/src/thooklib",
"usvfs/src/shared",
"usvfs/src/tinjectlib",
"usvfs/src/usvfs_helper",
"usvfs/asmjit/src/asmjit",
"usvfs/udis86",
"usvfs/include",
"<(boost_dir)",
"usvfs/fmt",
"usvfs/spdlog/include/spdlog"
],
"sources": [
"usvfs/src/thooklib/hooklib.cpp",
"usvfs/src/thooklib/ttrampolinepool.cpp",
"usvfs/src/thooklib/udis86wrapper.cpp",
"usvfs/src/thooklib/utility.cpp"
],
"configurations": {
"Release": {
"msvs_settings": {
"VCCLCompilerTool": {
"ExceptionHandling": 1,
"RuntimeTypeInfo": "true"
}
},
"msvs_configuration_attributes": {
"CharacterSet": 1
},
"msbuild_toolset": "v141",
"msvs_windows_target_platform_version": "10.0.16299.0"
}
}
},
# build tinjectlib
{
"target_name": "tinjectlib",
"type": "static_library",
"dependencies": [
"./usvfs_deps.gyp:asmjit",
"shared"
],
"defines": [
"_WIN64",
"ASMJIT_STATIC",
"SPDLOG_NO_NAME",
"SPDLOG_NO_REGISTRY_MUTEX",
"NOMINMAX",
"_WINDOWS",
"NDEBUG",
"BOOST_CONFIG_SUPPRESS_OUTDATED_MESSAGE"
],
"include_dirs": [
"usvfs/src/tinjectlib",
"usvfs/src/shared",
"usvfs/src/thooklib",
"usvfs/src/usvfs_helper",
"usvfs/asmjit/src/asmjit",
"usvfs/udis86",
"usvfs/include",
"<(boost_dir)",
"usvfs/fmt",
"usvfs/spdlog/include/spdlog"
],
"sources": [
"usvfs/src/tinjectlib/injectlib.cpp"
],
"configurations": {
"Release": {
"msvs_settings": {
"VCCLCompilerTool": {
"ExceptionHandling": 1,
"RuntimeTypeInfo": "true"
}
},
"msvs_configuration_attributes": {
"CharacterSet": 1
},
"msbuild_toolset": "v141",
"msvs_windows_target_platform_version": "10.0.16299.0"
}
}
},
# usvfs_helper
{
"target_name": "usvfs_helper",
"type": "static_library",
"dependencies": [
"shared",
"tinjectlib"
],
"defines": [
"BUILDING_USVFS_DLL",
"_WIN64",
"ASMJIT_STATIC",
"SPDLOG_NO_NAME",
"SPDLOG_NO_REGISTRY_MUTEX",
"NOMINMAX",
"_WINDOWS",
"NDEBUG",
"BOOST_CONFIG_SUPPRESS_OUTDATED_MESSAGE"
],
"include_dirs": [
"usvfs/src/usvfs_helper",
"usvfs/src/shared",
"usvfs/src/thooklib",
"usvfs/src/tinjectlib",
"usvfs/asmjit/src/asmjit",
"usvfs/udis86",
"usvfs/include",
"<(boost_dir)",
"usvfs/fmt",
"usvfs/spdlog/include/spdlog"
],
"sources": [
"usvfs/src/usvfs_helper/inject.cpp"
],
"configurations": {
"Release": {
"msvs_settings": {
"VCCLCompilerTool": {
"ExceptionHandling": 1,
"RuntimeTypeInfo": "true"
}
},
"msvs_configuration_attributes": {
"CharacterSet": 1
},
"msbuild_toolset": "v141",
"msvs_windows_target_platform_version": "10.0.16299.0"
}
}
},
# usvfs
{
"target_name": "usvfs",
"type": "shared_library",
"dependencies": [
"./usvfs_deps.gyp:asmjit",
"./usvfs_deps.gyp:fmt",
"shared",
"./usvfs_deps.gyp:spdlog",
"thooklib",
"tinjectlib",
"./usvfs_deps.gyp:udis86",
"usvfs_helper"
],
"defines": [
"BUILDING_USVFS_DLL",
"ASMJIT_STATIC",
"SPDLOG_NO_NAME",
"SPDLOG_NO_REGISTRY_MUTEX",
"NOMINMAX",
"_WINDOWS",
"NDEBUG",
"BOOST_CONFIG_SUPPRESS_OUTDATED_MESSAGE"
],
"include_dirs": [
"usvfs/include",
"usvfs/src/usvfs_dll",
"usvfs/src/shared",
"usvfs/src/thooklib",
"usvfs/src/tinjectlib",
"usvfs/src/usvfs_helper",
"usvfs/asmjit/src/asmjit",
"usvfs/udis86",
"<(boost_dir)",
"usvfs/fmt",
"usvfs/spdlog/include/spdlog"
],
"sources": [
"usvfs/src/usvfs_dll/hookcallcontext.cpp",
"usvfs/src/usvfs_dll/hookcontext.cpp",
"usvfs/src/usvfs_dll/hookmanager.cpp",
"usvfs/src/usvfs_dll/hooks/kernel32.cpp",
"usvfs/src/usvfs_dll/hooks/ntdll.cpp",
"usvfs/src/usvfs_dll/redirectiontree.cpp",
"usvfs/src/usvfs_dll/semaphore.cpp",
"usvfs/src/usvfs_dll/stringcast_boost.cpp",
"usvfs/src/usvfs_dll/usvfs.cpp"
],
"configurations": {
"Release": {
"msvs_settings": {
"VCCLCompilerTool": {
"ExceptionHandling": 1,
"RuntimeTypeInfo": "true"
}
},
"msvs_configuration_attributes": {
"CharacterSet": 1
},
"msbuild_toolset": "v141",
"msvs_windows_target_platform_version": "10.0.16299.0"
}
}
},
# usvfs_proxy
{
"target_name": "usvfs_proxy",
"type": "executable",
"dependencies": [
"./usvfs_deps.gyp:asmjit",
"shared",
"tinjectlib",
"usvfs",
"usvfs_helper"
],
"defines": [
"_WIN64",
"ASMJIT_STATIC",
"SPDLOG_NO_NAME",
"SPDLOG_NO_REGISTRY_MUTEX",
"NOMINMAX",
"_WINDOWS",
"NDEBUG",
"BOOST_CONFIG_SUPPRESS_OUTDATED_MESSAGE"
],
"include_dirs": [
"usvfs/src/shared",
"usvfs/src/thooklib",
"usvfs/src/tinjectlib",
"usvfs/src/usvfs_helper",
"usvfs/asmjit/src/asmjit",
"usvfs/udis86",
"usvfs/include",
"<(boost_dir)",
"usvfs/fmt",
"usvfs/spdlog/include/spdlog"
],
"sources": [
"usvfs/src/usvfs_proxy/main.cpp"
],
"configurations": {
"Release": {
"msvs_settings": {
"VCCLCompilerTool": {
"ExceptionHandling": 1,
"RuntimeTypeInfo": "true"
}
},
"msvs_configuration_attributes": {
"CharacterSet": 1
},
"msbuild_toolset": "v141",
"msvs_windows_target_platform_version": "10.0.16299.0"
}
}
},
]
}
|
#
# @lc app=leetcode.cn id=94 lang=python3
#
# [94] 二叉树的中序遍历
#
# https://leetcode-cn.com/problems/binary-tree-inorder-traversal/description/
#
# algorithms
# Medium (71.81%)
# Likes: 544
# Dislikes: 0
# Total Accepted: 180.4K
# Total Submissions: 251.1K
# Testcase Example: '[1,null,2,3]'
#
# 给定一个二叉树,返回它的中序 遍历。
#
# 示例:
#
# 输入: [1,null,2,3]
# 1
# \
# 2
# /
# 3
#
# 输出: [1,3,2]
#
# 进阶: 递归算法很简单,你可以通过迭代算法完成吗?
#
#
# @lc code=start
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def inorderTraversal(self, root: TreeNode) -> List[int]:
if not root:
return root
pre = None
res = []
while root:
if root.left:
pre = root.left
while pre.right:
pre = pre.right
pre.right = root
tmp = root
root = root.left
tmp.left = None
else:
res.append(root.val)
root = root.right
return res
# @lc code=end
# def inorderTraversal(self, root: TreeNode) -> List[int]:
# res = []
# stack = []
# node = root
# while stack or node:
# if node:
# stack.append(node)
# node = node.left
# else:
# tmp = stack.pop()
# res.append(tmp.val)
# node = tmp.right
# return res recursion
## 莫比斯遍历???
# def inorderTraversal(self, root: TreeNode) -> List[int]:
# res = []
# pre = None
# while root:
# if root.left:
# pre = root.left
# while pre.right:
# pre = pre.right
# pre.right = root
# tmp = root
# root = root.left
# tmp.left = None
# else:
# res.append(root.val)
# root = root.right
# return res
## recursion
# def inorderTraversal(self, root: TreeNode) -> List[int]:
# res = []
# def helper(root):
# if not root:
# return
# helper(root.left)
# res.append(root.val)
# helper(root.right)
# helper(root)
# return res
|
# 10. 矩形覆盖
# 我们可以用2*1的小矩形横着或者竖着去覆盖更大的矩形。请问用n个2*1的小矩形无重叠地覆盖一个2*n的大矩形,总共有多少种方法?
# 直接递归 复杂度过高 内存溢出
# -*- coding:utf-8 -*-
class Solution:
def rectCover(self, number):
# write code here
if(number<=2):
return number
return self.rectCover(number-1) + self.rectCover(number-2)
# 尾递归
# -*- coding:utf-8 -*-
class Solution:
def rectCover(self, number):
# write code here
return self.tailRectCover(number, 1, 2)
def tailRectCover(self, number, a, b):
if(number<1):
return 0
if(number == 1):
return a
if(number == 2):
return b
return self.tailRectCover(number-1, b, a+b)
# 使用额外空间
# -*- coding:utf-8 -*-
class Solution:
def rectCover(self, number):
# write code here
if(number<=2):
return number
a = 1
b = 2
for i in range(3,number+1):
temp = b
b = a + b
a = temp
return b
|
'''
- DESAFIO 043
- Desenvolva uma lógica que leia o peso e a altura de uma pessoa, calcule seu IMC e mostre o seu status
de acordo com a tabela abaixo:
- Abaixo de 18.5: Abaixo do peso
- Entre 18.5 e 25: Peso ideal
- 25 até 30: Sobrepeso
- 30 até 40: Obesidade
- Acima de 40: Obesidade mórbida
'''
peso = float(input('informe o peso do indivíduo em quilogramas(Kg): '))
altura = float(input('Informe a altura do indivíduo em metros(m): '))
imc = peso / altura**2
print('De acordo com os valores informados o IMC é igual a {:.2f}, ou seja, é classificado como '.format(imc), end='')
if imc < 18.5:
print('ABAIXO DO PESO')
elif imc <= 25:
print('PESO IDEAL')
elif imc <= 30:
print('SOBREPESO')
elif imc <= 40:
print('OBESIDADE')
else:
print('OBESIDADE MÓRBIDA')
|
class Solution:
def solve(self, nums):
setNums = set(nums)
count = 0
for i in setNums:
if i + 1 in setNums:
count += nums.count(i)
return count
|
# -*- coding: utf-8 -*-
"""
Created on 14 Apr 2020 15:08:45
@author: jiahuei
"""
|
class Movie:
def __init__(self, name):
self.__name = name
@property
def name(self):
return self.__name
def __str__(self) -> str:
return self.__name
class Ticket:
def __init__(self, movie=None, location=None):
if movie is None:
movie = Movie('Avengers')
if location == None:
location = 'PVR Cinemas'
self.__movie = movie
self.__location = location
def __str__(self) -> str:
return f"movie = {self.__movie}, location = {self.__location}"
@property
def movie(self):
return self.__movie
@property
def location(self):
return self.__location
t1 = Ticket()
print(t1)
m1 = Movie('Justice League')
t2 = Ticket(movie=m1)
print(t2)
|
"""Experiments Module
========================
This module merge all the modules under the concept of experiment.
"""
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 16 09:42:38 2019
@author: Giles
"""
# x = 5
# y = 12
#
# z = x + y
#
# print(z)
# 1x = 3
# new_variable = 9
# z = x - y
# a = 2.5
# b = 3.14159
# c = b * a**2
# #
# radius = 2.5
# pi = 3.14159
# area_of_circle = pi * radius**2
# phrase_1 = 'The cat sat on the mat!'
# phrase_2 = 'And so did the dog!'
# phrase_3 = phrase_1 + ' ' + phrase_2
# print(phrase_3)
user_input = int(input('How many apples do you have?\n >>> '))
# help()
# while = 5: |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
def _closest(lst, number):
"""Find the closest number in a list.
Arguments:
lst: List to look into.
number: Desired number.
"""
return lst[min(range(len(lst)), key=lambda i: abs(lst[i] - number))]
|
# -*- coding:utf-8 -*-
#https://leetcode.com/problems/substring-with-concatenation-of-all-words/description/
class Solution(object):
def findSubstring(self, s, words):
"""
:type s: str
:type words: List[str]
:rtype: List[int]
"""
ret = []
if not words:
return ret
len_s, len_words, len_word = len(s), len(words), len(words[0])
dict_word = {}
for word in words:
dict_word[word] = dict_word.get(word, 0) + 1
for i in range(len_word):
begin, end = i, i
dict_curr = {}
while end + len_word <= len_s:
curr = s[end : end + len_word]
end += len_word
if curr not in dict_word:
begin = end
dict_curr = {}
else:
dict_curr[curr] = dict_curr.get(curr, 0) + 1
while dict_curr[curr] > dict_word[curr]:
dict_curr[s[begin : begin + len_word]] -= 1
begin += len_word
if begin + len_words * len_word == end:
ret.append(begin)
return ret
|
channel = "test-channel"
feedtitle = "Discord Channel RSS"
feedlink = "github.com/gmemstr/webbot"
feeddescription = "Discord Channel RSS"
flaskdebug = True
flaskport = 5000 |
class Solution:
def triangleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
# valid triangle sum of two smaller nums > largest num
nums.sort(reverse=True)
cnt, n = 0, len(nums)
for i in range(n - 2):
# if i > 0 and nums[i] == nums[i - 1]:
# continue
l, r = i + 1, n - 1
while l < r:
if nums[l] + nums[r] > nums[i]:
cnt += r - l
l += 1
else:
r -= 1
return cnt |
"""
Problem #1: Add Two Numbers
Leetcode URL: https://leetcode.com/problems/add-two-numbers/
Description on LeetCode:
"
You are given two non-empty linked lists representing two non-negative
integers. The digits are stored in reverse order and each of their nodes
contain a single digit. Add the two numbers and return it as a linked list.
You may assume the two numbers do not contain any leading zero,
except the number 0 itself.
Example:
Input: (2 -> 4 -> 3) + (5 -> 6 -> 4)
Output: 7 -> 0 -> 8
Explanation: 342 + 465 = 807.""
"""
"""
1. Restate the problem:
Oh okay, so the the digits in each linked list need to be first reversed,
so that I know the value they really represent?
Then if I heard you correctly, the process is I have to add those two decimal
values (that just means a number in base ten), and then return that in a linked
list the same way as the numbers which we were given? Meaning that the number
(the digit value, not the whole value) is at the end the list, then preceded by
the digit that was to the RIGHT of it in the number, will be to the LEFT of it
in the linked list nodes?
2. Ask clarifying questions/State Assumptions
Ok this looks very interesting! Let me think for a second here... where would
we used this? This looks like it has some application in encoding and decoding
messages...
Can we assume this function needs to be able to handle linked lists of any
size, in terms of the number of nodes?
I'll assume so, since these are linked lists there's no restriction on the
size of the data we're receiving, save we run out of memory.
Are the two linked lists the same size as each other?
I'm assuming no, because we're decoding the values, and we should be able to
add any two decimal values regardless of their length.
And for right now, we're just concerned about summing 2 linked-lists right now?
I assume so - I know it says it right there in the problem, and that's what
I'll focus on for right now - but I'm totally down to improving it later to
handle more numbers (time permitting).
And what about the data type of the linked lists? I don't seem to recall a
built-in Python data structure for this, am I being provided with a Linked List
class here?
No? Okay, then I can just write up my own classes fairly simply.
It almost reminds me of the blockchain, you know? Linked lists and the
blockchain aren't exactly the same data structure - but maybe this function
could help us in same situation as Ethereum has, whenever they have to
verify a transaction. First we read in data from everyone's block, which in our
case is everyone's 2 or 3 sequence of linked list nodes, then we add up the
values stored in those pieces, then send off a new message encoded in a linked
list, before continuing with the rest of our process. I think it's really cool,
there's so many benefits to distributed systems in terms of scalability,
resistance to network failure, so on and so forth.
4a. Think out loud - Brainstorm solutions, explain rationale
Ok so in terms of our solution, when I look at the problem and I see the
example input you provided, there seems to be a really clear brute force
solution for this:
0. Write LinkedList and Node classes
1. "Decode" the first list - meaning we read in the number it represents
2. "Decode" the second list
3. Add the two values
4. "Encode" the sum value into a new linked list - convert it into what it
would look like in reverse, in terms of the nodes
5. return a new linked list
Everything here looks great so far - before coding it up we should probably
take a closer look at the decode() and encode() first, right?
decode() => it takes in a LinkedList as input right? Oh no, even better if we
just made it a property of the LinkedList class!
the steps of this method
we need to traverse the nodes
we can take the value from each node, and add it to a sum variable
(this can just keep incrementing, and that way even though it's out of order
we'll end up with the true decimal value at the end)
Oh, but we also have to make sure to be multiplying these by the power of ten,
based on which power of ten we're at in the problem!
Yeah - and that's fairly simple though, because the first value in the list
is in the ones place, followed by tens
so to speak, we can just keep another variable called exponent, and on each
iterative step through the list we can make sure to raise 10 by that value,
take that value and multiply it by the value in the Node, and THAT's what we
can increment sum by!
Here's the pseudocode then:
LinkedList.decode():
sum starts at 0
exponent starts at 1
data = grab next item from node, starting from the head
sum += 10**exponent * data
return the sum at the end
encode(sum):
going from left to right, grab digits from the sum
prepend them to the linkedlist (aka append them before the head node)
return the linkedlist at the end
Did that make sense so far? ok then, let's jump into the code!
"""
class Node(object):
def __init__(self, data):
self.data = data
self.next = None # pointer to the next Node in the list
class LinkedList(object):
def __init__(self, items=None):
self.size = 0 # property for number of Nodes, starts at zero default
self.head = self.tail = None
# create the nodes of the list, if they are initially passed in
if items is not None:
for item in items:
self.append(item)
def __str__(self):
"""Returns a string representation of the list nodes in order."""
# init return string
repr = ""
# traverse over nodes to build the string
node = self.head
while node is not Node:
data = node.data
# make the string to repesent the next node data in overall repr
if node == self.tail:
repr += f" {data}"
else:
repr += f"{data} -> "
node = node.next
return repr
def append(self, item):
"""Adds a new item to the list."""
# construct a new Node
node = Node(item)
# add it to the linkedlist (as the head no previous)
if self.size == 0:
self.head = node
# otherwise set it as the new tail
# if there's no current tail
elif self.size == 1:
self.head.next = node
# or if a tail already exists, set the node next to it
else:
self.tail.next = node
# no matter what, set the new tail of the list to the new node
self.tail = node
# increment the size of the list
self.size += 1
def prepend(self, item):
"""Adds an item at the front of a linked list."""
node = Node(item) # make a new Node
self.size += 1 # increment size of the list
current_head = self.head # save memory address of where head is now
node.next = current_head # ensure we don't lose the rest of the list
self.head = node # set the new node where it belongs
def decode(self):
"""Return the decimal value corresponding to the sequence of ints in
this list.
"""
# init return value
sum = 0
# counter for the exponent value at each node in the list
exponent = 1
# start traversal from the head node, (may be None)
node = self.head
# traverse the list
while node is not None:
# increment sum
sum += node.data * (10 ** exponent)
# move the next node
node = node.next
return sum
def delete(self, item):
"""Delete a node with the given item, or raise ValueError if not found.
Implementation left blank for now, because not needed to solve this
problem.
"""
pass
def encode(value):
"""Return the linkedlist representation of a nonegative decimal integer.
To implement this number, we need to slice off each integer, make a node
for it, and insert it into the list somewhere.
I see two ways of going about this:
1. Left to right: We start by grabbing digits from the highest place
value, then prepend each of them the linkedlist.
2. Right to left: We grab digits from the lowest place value, then
append them to the list.
In terms of tradeoffs, neither approach has any clear benefit over the
other. Both of these approaches scale in O(n) time, where n is the
number of digits in the value.
But since I'm left handed, I'll go with the first approach,
because it seems more familiar to write from L to R for me:
Google "cognitivie ease" if interested in learning more :)
"""
# to figure out the place value of the front digit, I will begin by
# modulo dividing the whole number by 10, and
# count how many times I need to do this until the quotient is 0
places = 0 # counter variable
decimal = value # copy of value, so we don't mdoify it
while decimal > 0:
places += 1
decimal %= 10
# next step: init a new linked list for the return value
ll = LinkedList()
# next step: adding nodes to the list
while decimal > 0:
# we take each integer off from the value
next_digit = decimal // (10 ** places)
# prepend to the list
ll.prepend(Node(next_digit))
# decrement places for the next iteration
places -= 1
# return the list at the end
return ll
"""
To the interviewer:
If this looks like a lot, don't worry my friend - I agree with you!
I acknowledge if this concerns you, and will be sure to test this at the end,
so we can see if it actually works or not.
"""
def combine_linked_lists(list1, list2):
"""Provide solution for the overall problem (see top)."""
# decode the lists
value1 = list1.decode()
value2 = list2.decode()
# find the sum
sum = value1 + value2
# return the encoded form of sum
return encode(sum)
"""
To the interviewer:
Whew, that was a lot of work - and all for just 4 lines of code at the end!
Ok, now I test this with the input you provided, and of course I'm sure we
can find some ways to improve it...
"""
if __name__ == "__main__":
# Input: (2 -> 4 -> 3) + (5 -> 6 -> 4)
# By the way, is it ok to input values like this using lists?
# Sorry for not clarifying earlier!
list1 = LinkedList([2, 4, 3])
list2 = LinkedList([5, 6, 4])
# Output: 7 -> 0 -> 8
list3 = combine_linked_lists(list1, list2)
print(list3)
"""
To the interviewer:
Discuss tradeoffs/Suggest improvements
Oh that's weird! I am not sure why it's not working - I'm sure it's just a
small runtime error somewhere, such as I didn't implement the __str__ magic
function correctly - and if I was doing this for real I'm sure I could look up
the fix in the Python docs reasonably fast.
Other than that, I like how our solution turned out!
In terms of Big O complexity for combine_linked_lists:
decode operations - O(n1 + n2), where n1 and n2 are the number of digits in the
first and second linked lists being decoded, respectively
computing sum - O(1), so we don't reall need to worry about that asymptotically
encode - O(m) - scales linearly to the number of digits in the sum
Hmm, I'm not sure how I could improve the runtime - but in terms of the actual
engineering, I think we could definitely improve this using another data
structure!
I think perhaps if we used a queue, it might help us in terms of pulling apart
the digits from the lists - instead of figuring out all the math, we could just
enqueue and dequeue. Again, the runtime complexity wouldn't change, it'd still
be in linear time since we have to go through all the digits, but in terms of
simplicity of code and the actual computations we have to process, we might be
able to shave off some time in the real world - and you know these blockchain
apps need to be high performance if they're going to compete!
That's all I got for now. What do you think?
"""
|
#: A dict contains permission's number as string and its corresponding meanings.
PERM_MAP = {
"1": "download and preview",
"2": "upload",
"3": "upload, download, preview, remove and move",
"4": "upload, download, preview, remove, move and change acls of others",
"5": "preview"
}
#: A set contains all available operation types.
OPERATION_TYPE = {
"SESSION_START",
"DOWNLOAD",
"UPLOAD",
"SHARE",
"MOVE",
"DELETE",
"RESTORE",
"PREVIEW",
"PURGE",
"PWD_ATTACK",
"VIRUS_INFECTED"
}
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: https://github.com/LFreedomDev
# Created on 2018-08-31
# pretask schema
{
'pretask': {
'taskid':str,
'project': str,
'url': str,
}
}
class PreTaskDB(object):
"""
database for pretask
"""
def insert(self,pretask):
raise NotImplementedError
def select(self,project):
raise NotImplementedError
def delete(self,taskid):
raise NotImplementedError
|
def _reduced_string(s):
if not s:
return "Empty String"
letters = list()
counts = list()
last = ""
counts_index = -1
for ch in s:
if last != ch:
letters.append(ch)
counts.append(1)
counts_index += 1
last = ch
else:
counts[counts_index] += 1
ans = list()
for i, val in enumerate(counts):
ans.append(val % 2 * letters[i])
ans = "".join(ans)
if not ans:
ans = "Empty String"
changed = s != ans
return ans, changed
def reduced_string(s):
ans, changed = _reduced_string(s)
while changed:
ans, changed = _reduced_string(ans)
return ans
def test_solution1():
s = "aaabccddd"
ans = "abd"
assert ans == reduced_string(s)
def test_solution2():
s = "baab"
ans = "Empty String"
assert ans == reduced_string(s)
def test_solution3():
s = "aabbccdd"
ans = "Empty String"
assert ans == reduced_string(s)
|
# Config file for user settings
# Change anything as desired
''' Navigation Settings '''
PAN_DIST = 15
PAN_DIST_LARGE = 60
ZOOM_FACTOR = 1.35
'''Canvas Settings'''
SUB_COLOR = (1, 0, 0, 1)
PATH_COLOR = (0, 0, 0, 0.7)
GRID_COLOR = (0.4, 0, 0, 0.3)
TAG_COLOR = (0, 0, 1, 0.8)
ELEMENT_COLOR = (0, .5, 0, 1.0)
MAGNIFY = 40 #How many pixels correspond to 1 unit in shm
PATH_RES = 0.15 #Path resolution
SUB_SIZE = 8
TAG_SIZE = 5
PATH_LEN = -1 #neg. number indicates unbounded
GRID_LINES = 50
GRID_SPACE = 1. #in meters
SMOOTHING = True
PERIOD = .01 # Period of smooth updates, smaller <=> faster smoothing
''' Key bindings '''
# You can bind the control key with the format 'ctrl [key]'
# The same applies to the shift key: 'shift [key]'
# NOTE: Do not use uppercase letters, use shift + letter
# To include both control and shift, ctrl comes first: 'ctrl shift [key]'
bindings_on = True
key_binds = {}
key_binds["quit"] = "shift q"
key_binds["bindings toggle"] = "shift b"
key_binds["help"] = "h"
key_binds["follow sub"] = "shift f"
key_binds["follow position only"] = "f"
key_binds["reset path"] = "r"
key_binds["zoom in"] = "i"
key_binds["zoom out"] = "o"
key_binds["pan left"] = "left"
key_binds["pan left large"] = "shift left"
key_binds["pan right"] = "right"
key_binds["pan right large"] = "shift right"
key_binds["pan up"] = "up"
key_binds["pan up large"] = "shift up"
key_binds["pan down"] = "down"
key_binds["pan down large"] = "shift down"
key_binds["center view"] = "c"
key_binds["rotate cw"] = "ctrl left"
key_binds["rotate ccw"] = "ctrl right"
|
class Solution(object):
def combinationSum2(self, candidates, target):
"""
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
"""
path = []
res = []
candidates.sort()
def dfs(i,n,t):
if t == 0:
res.append(path[:])
while(i<n and candidates[i]<=t):
path.append(candidates[i])
dfs(i+1,n,t-candidates[i])
path.pop()
i += 1
while(i<n and candidates[i]==candidates[i-1]):
i += 1
dfs(0,len(candidates),target)
return res |
'''
Python函数测试代码
'''
def testFunc(width, height):
return width * height
# 区分是执行文件还是被其他模块调用
if __name__ == "__main__":
print("执行main方法")
else:
print(__name__)
# 这行代码没有被区分是执行文件还是被调用,所以如果被其他模块引入的时候会执行
# print("5 * 5 =",testFunc(5, 5))
|
# 使用海象运算符
# 海象运算符的本质是先求出左边的表达式的值,然后将值赋值给右边的变量
# 然后根据这个变量的值进行判断,这样子可以减少一个临时的别的变量进行判断
foo = {'a': 10, 'b': 0}
# 常规方式
a_num = foo.get('a', 0)
if a_num:
print('a > 0')
# 海象运算符
if a := foo.get('a', 0):
print('a > 0')
|
def find_peak(arr, begin=0, end=None, l=None):
'''find_peak is a recursive function that uses divide and conquer methodolology (similar to binary search)
to find the index of a peak element in O(log n) time complexity'''
# initialize values on first iteration
if end is None or l is None:
l = len(arr)
end = l-1
# find index of middle element
mid = int(begin + (end - begin) / 2) # truncate result to an int
try:
# base case: first check if middle is a peak element
# if it is, return it
if arr[mid] > arr[mid+1] and arr[mid] > arr[mid-1]:
return mid
# if left element is greater than mid, left half must contain a peak;
# run function again on left half of arr
if arr[mid-1] > arr[mid]:
return find_peak(arr, begin, mid-1, l)
# if right element is greater than mid, right half must contain a peak;
# run function again on right half of arr
if arr[mid+1] > arr[mid]:
return find_peak(arr, mid+1, end, l)
except IndexError:
# couldn't find a peak
# return either 1st or last element index depending on which is bigger
# peak will be equal to the corner case of begin or end
if arr[0] > arr[l-1]:
return 0
return l-1
## TEST ##
arr = [8, 9, 10, 11, 11, 12, 13, 14, 15, 18]
print(arr)
result = find_peak(arr)
print("Peak Index:", result)
print("Peak:", arr[result])
|
ECG_CHAN_1 = 0
ECG_CHAN_2 = 1
ECG_CHAN_3 = 2
LEAD_05_CHAN_1_DATA_SIZE = 9
LEAD_12_CHAN_1_DATA_SIZE = 6
LEAD_12_CHAN_2_DATA_SIZE = 9
LEAD_12_CHAN_3_DATA_SIZE = 9
TI_ADS1293_CONFIG_REG = 0x00 #/* Main Configuration */
TI_ADS1293_FLEX_CH1_CN_REG = 0x01 #/* Flex Routing Swich Control for Channel 1 */
TI_ADS1293_FLEX_CH2_CN_REG = 0x02 #/* Flex Routing Swich Control for Channel 2 */
TI_ADS1293_FLEX_CH3_CN_REG = 0x03 #/* Flex Routing Swich Control for Channel 3 */
TI_ADS1293_FLEX_PACE_CN_REG = 0x04 #/* Flex Routing Swich Control for Pace Channel */
TI_ADS1293_FLEX_VBAT_CN_REG = 0x05 #/* Flex Routing Swich Control for Battery Monitoriing */
TI_ADS1293_LOD_CN_REG = 0x06 #/* Lead Off Detect Control */
TI_ADS1293_LOD_EN_REG = 0x07 #/* Lead Off Detect Enable */
TI_ADS1293_LOD_CURRENT_REG = 0x08 #/* Lead Off Detect Current */
TI_ADS1293_LOD_AC_CN_REG = 0x09 #/* AC Lead Off Detect Current */
TI_ADS1293_CMDET_EN_REG = 0x0A #/* Common Mode Detect Enable */
TI_ADS1293_CMDET_CN_REG = 0x0B #/* Commond Mode Detect Control */
TI_ADS1293_RLD_CN_REG = 0x0C #/* Right Leg Drive Control */
TI_ADS1293_WILSON_EN1_REG = 0x0D #/* Wilson Reference Input one Selection */
TI_ADS1293_WILSON_EN2_REG = 0x0E #/* Wilson Reference Input two Selection */
TI_ADS1293_WILSON_EN3_REG = 0x0F #/* Wilson Reference Input three Selection */
TI_ADS1293_WILSON_CN_REG = 0x10 #/* Wilson Reference Input Control */
TI_ADS1293_REF_CN_REG = 0x11 #/* Internal Reference Voltage Control */
TI_ADS1293_OSC_CN_REG = 0x12 #/* Clock Source and Output Clock Control */
TI_ADS1293_AFE_RES_REG = 0x13 #/* Analog Front-End Frequency and Resolution */
TI_ADS1293_AFE_SHDN_CN_REG = 0x14 #/* Analog Front-End Shutdown Control */
TI_ADS1293_AFE_FAULT_CN_REG = 0x15 #/* Analog Front-End Fault Detection Control */
TI_ADS1293_AFE_DITHER_EN_REG = 0x16 #/* Enable Dithering in Signma-Delta */
TI_ADS1293_AFE_PACE_CN_REG = 0x17 #/* Analog Pace Channel Output Routing Control */
TI_ADS1293_ERROR_LOD_REG = 0x18 #/* Lead Off Detect Error Status */
TI_ADS1293_ERROR_STATUS_REG = 0x19 #/* Other Error Status */
TI_ADS1293_ERROR_RANGE1_REG = 0x1A #/* Channel 1 Amplifier Out of Range Status */
TI_ADS1293_ERROR_RANGE2_REG = 0x1B #/* Channel 1 Amplifier Out of Range Status */
TI_ADS1293_ERROR_RANGE3_REG = 0x1C #/* Channel 1 Amplifier Out of Range Status */
TI_ADS1293_ERROR_SYNC_REG = 0x1D #/* Synchronization Error */
TI_ADS1293_R2_RATE_REG = 0x21 #/* R2 Decimation Rate */
TI_ADS1293_R3_RATE1_REG = 0x22 #/* R3 Decimation Rate for Channel 1 */
TI_ADS1293_R3_RATE2_REG = 0x23 #/* R3 Decimation Rate for Channel 2 */
TI_ADS1293_R3_RATE3_REG = 0x24 #/* R3 Decimation Rate for Channel 3 */
TI_ADS1293_P_DRATE_REG = 0x25 #/* 2x Pace Data Rate */
TI_ADS1293_DIS_EFILTER_REG = 0x26 #/* ECG Filter Disable */
TI_ADS1293_DRDYB_SRC_REG = 0x27 #/* Data Ready Pin Source */
TI_ADS1293_SYNCOUTB_SRC_REG = 0x28 #/* Sync Out Pin Source */
TI_ADS1293_MASK_DRDYB_REG = 0x29 #/* Optional Mask Control for DRDYB Output */
TI_ADS1293_MASK_ERR_REG = 0x2A #/* Mask Error on ALARMB Pin */
TI_ADS1293_ALARM_FILTER_REG = 0x2E #/* Digital Filter for Analog Alarm Signals */
TI_ADS1293_CH_CNFG_REG = 0x2F #/* Configure Channel for Loop Read Back Mode */
TI_ADS1293_DATA_STATUS_REG = 0x30 #/* ECG and Pace Data Ready Status */
TI_ADS1293_DATA_CH1_PACE_H_REG = 0x31 #/* Channel1 Pace Data High [15:8] */
TI_ADS1293_DATA_CH1_PACE_L_REG = 0x32 #/* Channel1 Pace Data Low [7:0] */
TI_ADS1293_DATA_CH2_PACE_H_REG = 0x33 #/* Channel2 Pace Data High [15:8] */
TI_ADS1293_DATA_CH2_PACE_L_REG = 0x34 #/* Channel2 Pace Data Low [7:0] */
TI_ADS1293_DATA_CH3_PACE_H_REG = 0x35 #/* Channel3 Pace Data High [15:8] */
TI_ADS1293_DATA_CH3_PACE_L_REG = 0x36 #/* Channel3 Pace Data Low [7:0] */
TI_ADS1293_DATA_CH1_ECG_H_REG = 0x37 #/* Channel1 ECG Data High [23:16] */
TI_ADS1293_DATA_CH1_ECG_M_REG = 0x38 #/* Channel1 ECG Data Medium [15:8] */
TI_ADS1293_DATA_CH1_ECG_L_REG = 0x39 #/* Channel1 ECG Data Low [7:0] */
TI_ADS1293_DATA_CH2_ECG_H_REG = 0x3A #/* Channel2 ECG Data High [23:16] */
TI_ADS1293_DATA_CH2_ECG_M_REG = 0x3B #/* Channel2 ECG Data Medium [15:8] */
TI_ADS1293_DATA_CH2_ECG_L_REG = 0x3C #/* Channel2 ECG Data Low [7:0] */
TI_ADS1293_DATA_CH3_ECG_H_REG = 0x3D #/* Channel3 ECG Data High [23:16] */
TI_ADS1293_DATA_CH3_ECG_M_REG = 0x3E #/* Channel3 ECG Data Medium [15:8] */
TI_ADS1293_DATA_CH3_ECG_L_REG = 0x3F #/* Channel3 ECG Data Low [7:0] */
TI_ADS1293_REVID_REG = 0x40 #/* Revision ID */
TI_ADS1293_DATA_LOOP_REG = 0x50 #/* Loop Read Back Address */
#Useful definitions
ADS1293_READ_BIT = 0x80
ADS1293_WRITE_BIT = 0x7F |
# field_subject on islandora 8 is configured to map to the following vocabs: Corporate, Family, Geographic Location,
# Person, Subject
class MemberOf:
def __init__(self):
self.drupal_fieldnames = {'memberof': 'field_member_of'}
self.memberof = 'Repository Item'
def get_memberof(self):
return self.memberof
def get_memberoffieldname(self):
return self.drupal_fieldnames['memberof'] |
# -*- coding: UTF-8 -*-
def read(file):
"""
Read the content of the file as a list
Args:
file: the name of the file with the path
"""
with open(file, 'r') as f:
return f.read().split("\n")
def write(file, text):
"""
Save the file into a specific directory
Args:
file: the name of the file with the path
text: the text to be written in the file
"""
with open(file, 'w') as f:
f.write(text)
|
# Copyright 2016-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
"""
Map of paths to build mode overrides.
Format is: {cell: {path: {'<mode>': <options>}}
"""
build_mode_overrides = {}
|
a = [1, 2, 3, 4, 5]
b = [4, 5, 6, 7, 8]
diff = []
for item in a:
if item not in b:
diff.append(item)
for item in b:
if item not in a:
diff.append(item)
print(diff) |
#
# PySNMP MIB module ZYXEL-CPU-PROTECTION-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ZYXEL-CPU-PROTECTION-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:49:18 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
NotificationType, Integer32, Bits, ObjectIdentity, Gauge32, IpAddress, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, iso, Counter32, Unsigned32, Counter64, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "NotificationType", "Integer32", "Bits", "ObjectIdentity", "Gauge32", "IpAddress", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "iso", "Counter32", "Unsigned32", "Counter64", "MibIdentifier")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
esMgmt, = mibBuilder.importSymbols("ZYXEL-ES-SMI", "esMgmt")
zyxelCpuProtection = ModuleIdentity((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 16))
if mibBuilder.loadTexts: zyxelCpuProtection.setLastUpdated('201207010000Z')
if mibBuilder.loadTexts: zyxelCpuProtection.setOrganization('Enterprise Solution ZyXEL')
if mibBuilder.loadTexts: zyxelCpuProtection.setContactInfo('')
if mibBuilder.loadTexts: zyxelCpuProtection.setDescription('The subtree for cpu protection')
zyxelCpuProtectionSetup = MibIdentifier((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 16, 1))
zyxelCpuProtectionTable = MibTable((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 16, 1, 1), )
if mibBuilder.loadTexts: zyxelCpuProtectionTable.setStatus('current')
if mibBuilder.loadTexts: zyxelCpuProtectionTable.setDescription('The table contains CPU protection configuration.')
zyxelCpuProtectionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 16, 1, 1, 1), ).setIndexNames((0, "ZYXEL-CPU-PROTECTION-MIB", "zyCpuProtectionPort"), (0, "ZYXEL-CPU-PROTECTION-MIB", "zyCpuProtectionReasonType"))
if mibBuilder.loadTexts: zyxelCpuProtectionEntry.setStatus('current')
if mibBuilder.loadTexts: zyxelCpuProtectionEntry.setDescription('An entry contains CPU protection configuration.')
zyCpuProtectionPort = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 16, 1, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: zyCpuProtectionPort.setStatus('current')
if mibBuilder.loadTexts: zyCpuProtectionPort.setDescription('This field displays the port number.')
zyCpuProtectionReasonType = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 16, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("arp", 1), ("bpdu", 2), ("igmp", 3))))
if mibBuilder.loadTexts: zyCpuProtectionReasonType.setStatus('current')
if mibBuilder.loadTexts: zyCpuProtectionReasonType.setDescription('This field displays which type of control packets on the specified port.')
zyCpuProtectionRateLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 16, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 256))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zyCpuProtectionRateLimit.setStatus('current')
if mibBuilder.loadTexts: zyCpuProtectionRateLimit.setDescription('Enter a number from 0 to 256 to specified how many control packets this port can receive or transmit per second. 0 means no rate limit.')
mibBuilder.exportSymbols("ZYXEL-CPU-PROTECTION-MIB", zyxelCpuProtectionEntry=zyxelCpuProtectionEntry, zyCpuProtectionReasonType=zyCpuProtectionReasonType, zyxelCpuProtectionSetup=zyxelCpuProtectionSetup, zyxelCpuProtectionTable=zyxelCpuProtectionTable, zyCpuProtectionPort=zyCpuProtectionPort, zyxelCpuProtection=zyxelCpuProtection, zyCpuProtectionRateLimit=zyCpuProtectionRateLimit, PYSNMP_MODULE_ID=zyxelCpuProtection)
|
# Функция принимает параметры
# A - множество, например, A = [1,2,3,4,5,6,7,8,9,10]
# arr - матрица отношения, состоящая из нулей и единиц
# Печатает пары отношения
# Например, множество A = [3, 5, 7]
# arr = [
# [1,0,1]
# [0,1,0]
# [0,1,1]
# ]
# Функция напечатает отношение
# { (3, 3), (3, 7), (5, 5), (7, 5), (7, 7) }
def print_couple(A, arr):
data = []
for i in range(len(arr)):
for j in range(len(arr[i])):
if arr[i][j] == 1:
data.append( (A[i], A[j]) )
if data != []:
print('{\t', end = '')
k = 0
for i in range(len(data) - 1):
k += 1
if k == 7:
k = 1
print(end = '\n\t')
print(data[i], end = ', ')
print(data[len(data) - 1], end = '')
print(' }')
else:
print('{ }')
return data
|
# GENERATED VERSION FILE
# TIME: Tue Oct 6 17:46:50 2020
__version__ = "0.3.0+0e6315d"
short_version = "0.3.0"
|
class RequestLog:
def __init__(self):
self.url = None
self.request_headers = None
self.request_body = None
self.request_method = None
self.request_timestamp = None
self.status_code = None
self.response_headers = None
self.response_body = None
self.response_timestamp = None
self.error = None
|
#is_hot = True
#is_cold = False
#if is_hot:
#print("it's a hot day wear a vest")
#elif is_cold:
#print("its cold")
#else:
#print("die"1111111111111)
#good_credit = True
#price = 1000000
#if good_credit:
#print("you need to put down 10%")
#print("payment is " + "$" + str(0.1*price))
#else:
#print("you need to put down 20%")
#print("payment is " + "$" + str(0.2*price))
#good_credit = True
#price = 1000000
#if good_credit:
#down_payment = 0.1 * price
#else:
#down_payment = 0.2 * price
#print(f"the down payment: ${down_payment}")
# temperature = 30
# if temperature > 30:
# print ("it is hot")
# elif temperature < 20:
# print("it is kinda cold")
# else:
# print("aight i am not sure what temperature it is")
name = input()
name_character = len(name)
if name_character < 3:
print("your name character is too short")
elif name_character > 20:
print("your name character is too long")
else:
print("your name character are perfect") |
def media_digitos(n):
num = list(map(int,str(n)))
return sum(num) / len(num)
n = int(input("digite o valor: "))
print(f"a media dos valores e: {media_digitos(n)}") |
__title__ = "asent"
__version__ = "0.3.0" # the ONLY source of version ID
__download_url__ = "https://github.com/kennethenevoldsen/asent"
__documentation__ = "https://kennethenevoldsen.github.io/asent"
|
N = int(input())
T = [int(input()) for _ in range(N)]
total = sum(T)
ans = 1e10
for i in range(2 ** N):
tmp = 0
for j in range(N):
if i & (1 << j):
tmp += T[j]
ans = min(ans, max(tmp, total - tmp))
print(ans)
|
x=int(input())
y=int(input())
i=int(1)
fact=int(1)
while i<=x :
fact=((fact%y)*(i%y))%y
i=i+1
print(fact) |
class DLinkedNode():
def __init__(self, key: int = None, value: int = None, prev: 'DLinkedList' = None, next: 'DLinkedList' = None):
self.key = key
self.value = value
self.prev = prev
self.next = next
class DLinkedList():
def __init__(self):
self.head = DLinkedNode()
self.tail = DLinkedNode()
self.head.next = self.tail
self.tail.prev = self.head
def pop_tail(self) -> 'DLinkedList':
res = self.tail.prev
self.remove_node(res)
return res
def move_to_head(self, node: 'DLinkedList') -> None:
self.remove_node(node)
self.add_node(node)
def remove_node(self, node: 'DLinkedList') -> None:
prev = node.prev
new = node.next
prev.next = new
new.prev = prev
def add_node(self, node: 'DLinkedList') -> None:
node.prev = self.head
node.next = self.head.next
self.head.next.prev = node
self.head.next = node
class LRUCache():
def __init__(self, capacity: int):
self.size = 0
self.capacity = capacity
self.cache = {}
self.list = DLinkedList()
def get(self, key: int) -> int:
node = self.cache.get(key)
if not node: return -1
self.list.move_to_head(node)
return node.value
def put(self, key: int, value: int) -> None:
node = self.cache.get(key)
if not node:
node = DLinkedNode(key=key, value=value)
self.cache[key] = node
self.list.add_node(node)
self.size += 1
if self.size > self.capacity:
tail = self.list.pop_tail()
del self.cache[tail.key]
self.size -= 1
else:
node.value = value
self.list.move_to_head(node)
# Your LRUCache object will be instantiated and called as such:
# obj = LRUCache(capacity)
# param_1 = obj.get(key)
# obj.put(key,value)
|
class TokenStream:
def __init__(self, tokens):
self.tokens = tokens
def lookahead(self, index):
return self.tokens[index]
def peek(self):
return self.tokens[0]
def advance(self):
return self.tokens.pop(0)
def defer(self, token):
self.tokens.insert(0, token)
|
# Definitions for Machine.Status
MACHINE_STATUS_BROKEN = 0
MACHINE_STATUS_ALIVE = 1
# The "prebaked" downage categories
PREBAKED_DOWNAGE_CATEGORY_TEXTS = [
'Software Problem',
'Hardware Problem',
'Machine Problem'
]
# TODO: remove this and actually query locations after basic demo
# x should be within [0-3] and y should be within [0-8]
STUB_LOCATION_DICT = {
'1': {
'x': 1,
'y': 2
},
'2': {
'x': 0,
'y': 3
},
'3': {
'x': 0,
'y': 4
},
'4': {
'x': 0,
'y': 5
},
'5': {
'x': 1,
'y': 6
},
'6': {
'x': 3,
'y': 6
},
'7': {
'x': 3,
'y': 5
},
'8': {
'x': 3,
'y': 4
},
'9': {
'x': 3,
'y': 3
},
'10': {
'x': 3,
'y': 2
},
}
|
#CODE ARENA PLAYER
#Problem Link : https://www.hackerearth.com/practice/algorithms/searching/ternary-search/practice-problems/algorithm/small-factorials/
f = [1] * 105
for i in range(2,105):
f[i] = f[i-1] * i
t = int(input())
while t:
t-=1
n = int(input())
print(f[n])
|
"""
Programacion orientad a objetos!
- conceptos que lo soportan!
1 - astraccion de datos: proceso mediante el cual somos capaces de escoger la implementacion
de un objeto que contenga o modele las propiedades en el problema.
2 - encapsulamiento: ocultar la representacion y el estado de las propiedades de un objeto! solamente
se puede modificar a traves de las operaciones definidas en una clase
3 - herencia:
4 - polimorfismo
CLASE: conjunto de objetos que comparten una estructura, comportamiento y semantica comun.
OBJETO: es una instancia o valor correspondiente a una clase! representacion astrabta de un concepto
"""
#persona = "jhon", "angela"
#print(persona)
#--------------------------------
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.