index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
21,700 | c7975f7f7bf86f8a96237e9cab4e2b148b177f42 | def create_user(first_name: str, last_name: str, age: int = 42, **kwargs):
return {
"name": first_name,
"surname": last_name,
"age": age,
"extra": kwargs
}
print(create_user("John", "Doe"))
print(create_user("Bill", "Gates", age=65))
print(create_user("Marie", "Curie", age=66, occupation="physicist", won_nobel=True))
|
21,701 | 43ddd0a7c866eb0e6dd2b5704a19fe2654ab7bc5 | #-*- coding:utf-8 -*-
#!/usr/bin/env python
import time
import json
from fabric.api import *
from fabric.colors import *
env.use_ssh_config = True
def uploadFiles(s, d):
with settings(warn_only=True):
result = put(s, d)
if result.failed and not confirm("put tar file failed, Continue[Y/N]"):
abort("aborting file put: %s-----%s" % (s, d))
else:
print green("Successfully put " + s + " to dir " + d)
def setFirewall():
run('sudo iptables -F')
def uploadPackage():
local('zip -r ./ss-node.zip .')
with settings(warn_only=True):
run('rm ~/ss-node.zip')
uploadFiles('./ss-node.zip', '~/')
local('rm ss-node.zip')
run('rm -rf ~/ss-node')
run('unzip ~/ss-node.zip -d ss-node && cd ~/ss-node')
def deploy():
uploadPackage()
run('cd ~/ss-node && docker build -t ss-node .')
setFirewall()
def setup_server():
run('rpm -iUvh http://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm \
&& yum update -y \
&& yum -y install docker-io \
&& service docker start \
&& chkconfig docker on \
&& sudo yum install unzip -y \
')
|
21,702 | f5b491310ece7ab98d62f1297dfeb5d1f5f6caae | import math
def KeyPairs(string):
Char1 = string[0:1]
Char2 = string[1:]
Bintemp1 = bin(ord(Char1))
Bintemp2 = bin(ord(Char2))
Bin1 = (Bintemp1[0:1] + Bintemp1[2:])
Bin2 = (Bintemp2[0:1] + Bintemp2[2:])
while len(Bin1) < 8:
Bin1 = '0' + Bin1
while len(Bin2) < 8:
Bin2 = '0' + Bin2
BinaryString = (Bin1 + Bin2)
return BinaryString
def ConvertToTable(BinaryString): #Joe says I should make a comment
#No-one listens to Joe anyway
KeyBinDict = {}
i = 0
a = 0
while not i == 4:
TempKeyList = []
while not a == 4:
TempKeyList.append((BinaryString[a:(a+1)]))
a = a + 1
KeyBinDict[i] = TempKeyList
a = 0
i = i + 1
return KeyBinDict
|
21,703 | be0aef74fa1c34e8fc1bf537303041092e125b9b | def build_model(cluserting_model, data, labels):
model = clustering_model(data)
print("homo\tcompl\tv-meas\tARI\tAMI\tsilhouette')
print(50 * "-")
print('%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t'
%(metrics.homogeneity_score(labels, model.labels_),
%(metrics.completeness_score(labels,model.labels_),
%(metrics.v_measure_score(labels,model.labels_),
%(metrics.adjusted_rand_score(labels,model.labels_),
%(metrics.adjusted_mutual_info_score(labels,labels_),
%(metrics.silhouette(data,model.labels_)))
def k_means(data,n_clusters=3,max_iter=1000):
model = KMeans(n_clusters=n_clusters, max_iter=max_iter).fit(data)
return model
|
21,704 | f3c0e4b781856b133a0ea806b19661d6a262fa3a | def list_sort(list_to_sort):
for i in range(len(list_to_sort)):
min_id = i
for j in range(i+1,len(list_to_sort)):
if list_to_sort[min_id] < list_to_sort[j]:
min_id = j
list_to_sort[i], list_to_sort[min_id] = list_to_sort[min_id], list_to_sort[i]
if __name__ == "__main__":
list_raw=[]
i = 0
n = input("Nhap so luong: ")
n = int(n)
while i < n:
temp = input("Nhap n[{0}]: ".format(i))
list_raw.append(temp)
i=i+1
list_sort(list_raw)
print(list_raw)
|
21,705 | 771c9512047a3062a57afe2779ac81f75a4c4354 | from django.conf.urls import patterns, include, url
from django.contrib.auth.decorators import login_required as auth
from django.contrib import admin
admin.autodiscover()
from links.views import LinkListView
from links.views import aboutus, participate, detailnews, sharenews, thanksharing, voteup, votedown
from links.views import UserProfileDetailView
from links.views import UserProfileEditView
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^$', LinkListView.as_view(), name='home'),
url(r"^aboutus/$", aboutus, name="aboutus"),
url(r"^participate/$", participate, name="participate"),
url(r"^detailnews/(.+)/$", detailnews, name="detailnews"),
url(r"^sharenews/(.+)/$", sharenews, name="sharenews"),
url(r"^thanksharing/(.+)/$", thanksharing, name="thanksharing"),
url(r"^vote/up/(.+)/$", voteup, name="voteup"),
url(r"^vote/down/(.+)/$", votedown, name="votedown"),
url(r"^login/$", "django.contrib.auth.views.login",
{"template_name": "login.html"}, name="login"),
url(r"^logout/$", "django.contrib.auth.views.logout_then_login",
name="logout"),
url(r"^accounts/", include("registration.backends.simple.urls")),
url(r"^users/(?P<slug>\w+)/$", UserProfileDetailView.as_view(),
name="profile"),
url(r"edit_profile/$", auth(UserProfileEditView.as_view()),
name="edit_profile")
)
|
21,706 | ac07c58a3c1be86c8d9c92c2600b0215e5c66d07 | from sd1.dogs import listBreeds
if __name__ == "__main__":
listBreeds() |
21,707 | f1d4b74f79f7ace3e31d75fc2889277fc198db66 | from flask import Flask
from flask_restplus import Api, Resource, fields
from route_tienda import Tienda, TiendaById
from route_productos import Productos, ProductosById, CantidadProductos
app = Flask(__name__)
api = Api(app)
api.add_resource(Tienda,"/api/V01/tienda")
api.add_resource(Productos,"/api/V01/productos")
api.add_resource(TiendaById,"/api/V01/tienda/<int:id>")
api.add_resource(ProductosById,"/api/V01/productos/<int:sku>")
api.add_resource(CantidadProductos,"/api/V01/productos/cantidad/<int:sku>")
if __name__ == "__main__":
app.run(debug = True) |
21,708 | 456fe3b42211ac4f2ff9d91206083b4614bde09c | #!/usr/bin/env python3
import binascii
import os
import time
import struct
import unittest
import bootloader_utils
from bootloader_utils import MessageType, ResponseCode
def open_serial_port():
if not 'SYNTH_SERIAL_PORT' in os.environ:
raise KeyError('Environment variable SYNTH_SERIAL_PORT not set.')
ser = bootloader_utils.BootloaderSerial(os.environ['SYNTH_SERIAL_PORT'])
return ser
class BootloaderTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.blser = None
def setUp(self):
self.blser = open_serial_port()
def tearDown(self):
self.blser.close()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.addTypeEqualityFunc(bytes, self.assertBytesEqual)
self.addTypeEqualityFunc(bytearray, self.assertBytesEqual)
def assertBytesEqual(self, a, b, msg=None):
if a == b:
return
standardMsg = '{} != {}'.format(a.hex(), b.hex())
self.fail(self._formatMessage(msg, standardMsg))
class TestSerialErrors(BootloaderTestCase):
def test_packet_with_invalid_crc(self):
send = bytes.fromhex('01 ff ff ff ff')
send = bootloader_utils.cobs_encode(send) + b'\0'
self.blser.write(send)
self.blser.assert_response(ResponseCode.PACKET_CRC_FAIL)
def test_packet_with_invalid_cobs(self):
send = bytes.fromhex('01 ff ff ff ff 00')
self.blser.write(send)
self.blser.assert_response(ResponseCode.COBS_DECODE_FAIL)
def test_packet_timeout(self):
send = bytes.fromhex('05 ff ff ff ff')
self.blser.write(send)
# No data should be received before the 50 ms timeout.
with self.assertRaises(TimeoutError):
self.blser.read_packet(timeout=0.040)
# Write some more bytes to reset timeout
self.blser.write(send)
# No data should be received before the 50 ms timeout.
with self.assertRaises(TimeoutError):
self.blser.read_packet(timeout=0.040)
# Should receive timeout error packet immediately after 50 ms has
# elapsed.
self.blser.assert_response(ResponseCode.TIMEOUT)
def test_packet_too_short(self):
MIN_PACKET_LEN = 5
for packet_len in range(1, MIN_PACKET_LEN):
send = (b'\01' * packet_len) + b'\x00'
self.blser.write(send)
self.blser.assert_response(ResponseCode.PACKET_TOO_SHORT)
def test_packet_too_long(self):
MAX_DATA_LENGTH = 2048
packet_data = b'\xcc' * (MAX_DATA_LENGTH + 1)
self.blser.write_packet(packet_data)
self.blser.assert_response(ResponseCode.PACKET_TOO_LONG)
class TestMessageErrors(BootloaderTestCase):
def test_invalid_message_type(self):
send = b'\xFF'
self.blser.write_packet(send)
self.blser.assert_response(ResponseCode.INVALID_TYPE)
class TestDeviceInfoMessage(BootloaderTestCase):
def test_device_info_extra_bytes_in_request(self):
request = bytes([MessageType.REQUEST_DEVICE_INFO, 0xAB])
self.blser.write_packet(request)
self.blser.assert_response(ResponseCode.DATA_TOO_LONG)
def test_valid_request(self):
# This will raise an exception if device info is not received
# successfully.
self.blser.get_device_info()
class TestEraseFlash(BootloaderTestCase):
def test_message_too_short(self):
# Requires 9 bytes, but only 8 are sent.
packet_data = bytes.fromhex('01 00 00 10 00 00 00 18')
self.blser.write_packet(packet_data)
self.blser.assert_response(ResponseCode.DATA_TOO_SHORT)
def test_message_too_long(self):
# Requires 9 bytes, but 10 are sent.
packet_data = bytes.fromhex('01 00 00 10 00 00 00 18 00 FF')
self.blser.write_packet(packet_data)
self.blser.assert_response(ResponseCode.DATA_TOO_LONG)
def test_start_address_too_low(self):
self.blser.send_erase(
start=0x800, end=0x2000, response=ResponseCode.ADDR_OUT_OF_RANGE)
def test_end_address_too_high(self):
self.blser.send_erase(
start=0x1800, end=0x16000, response=ResponseCode.ADDR_OUT_OF_RANGE)
def test_start_address_bad_alignment(self):
self.blser.send_erase(
start=0x2400, end=0x15800, response=ResponseCode.BAD_ALIGNMENT)
def test_end_address_bad_alignment(self):
self.blser.send_erase(
start=0x1800, end=0x12400, response=ResponseCode.BAD_ALIGNMENT)
class TestVerify(BootloaderTestCase):
def test_message_too_short(self):
# Requires 13 bytes, but only 12 are sent.
packet_data = bytes([MessageType.VERIFY] + [1] * 11)
self.blser.write_packet(packet_data)
self.blser.assert_response(ResponseCode.DATA_TOO_SHORT)
def test_message_too_long(self):
# Requires 13 bytes, but 14 are sent.
packet_data = bytes([MessageType.VERIFY] + [1] * 13)
self.blser.write_packet(packet_data)
self.blser.assert_response(ResponseCode.DATA_TOO_LONG)
def test_end_address_too_high(self):
self.blser.send_verify(
start=0x1800, end=0x1000002, crc=0x0,
response=ResponseCode.ADDR_OUT_OF_RANGE)
def test_start_address_bad_alignment(self):
self.blser.send_verify(
start=0x1801, end=0x1000000, crc=0x0,
response=ResponseCode.BAD_ALIGNMENT)
def test_end_address_bad_alignment(self):
self.blser.send_verify(
start=0x1800, end=0x0FFFFFF, crc=0x0,
response=ResponseCode.BAD_ALIGNMENT)
def test_start_address_too_high(self):
self.blser.send_verify(
start=0x1800, end=0x1000, crc=0x0,
response=ResponseCode.ADDR_OUT_OF_RANGE)
class TestEraseVerify(BootloaderTestCase):
def test_erase_verify_one_page(self):
# Erase a section of flash so that it has a known CRC
self.blser.send_erase(start=0x1000, end=0x1800)
# Calculate the expected CRC for erased flash
data = b'\xFF\xFF\xFF\x00' * 0x400
expected_crc = binascii.crc32(data)
# Send the verify command
self.blser.send_verify(
start=0x1000, end=0x1800, crc=expected_crc)
def test_erase_verify_all_app_flash(self):
# Erase a section of flash so that it has a known CRC
self.blser.send_erase(start=0x1000, end=0x15800)
# Calculate the expected CRC for erased flash
data = b'\xFF\xFF\xFF\x00' * (0x14800 >> 1)
expected_crc = binascii.crc32(data)
# Send the verify command
self.blser.send_verify(start=0x1000, end=0x15800, crc=expected_crc)
class TestWriteDoubleWord(BootloaderTestCase):
def test_message_too_short(self):
# Requires 13 bytes, but only 12 are sent.
packet_data = bytes([MessageType.WRITE_DWORD] + [1] * 11)
self.blser.write_packet(packet_data)
self.blser.assert_response(ResponseCode.DATA_TOO_SHORT)
def test_message_too_long(self):
# Requires 13 bytes, but 14 are sent.
packet_data = bytes([MessageType.WRITE_DWORD] + [1] * 13)
self.blser.write_packet(packet_data)
self.blser.assert_response(ResponseCode.DATA_TOO_LONG)
def test_start_address_too_high(self):
self.blser.send_write_dword(
start=0x15800, data=bytes.fromhex('ABCDEF00CAFECA00'),
response=ResponseCode.ADDR_OUT_OF_RANGE)
def test_start_address_bad_alignment(self):
self.blser.send_write_dword(
start=0x1570A, data=bytes.fromhex('ABCDEF00CAFECA00'),
response=ResponseCode.BAD_ALIGNMENT)
def test_start_address_too_low(self):
self.blser.send_write_dword(
start=0x0F0C, data=bytes.fromhex('ABCDEF00CAFECA00'),
response=ResponseCode.ADDR_OUT_OF_RANGE)
def test_erase_write_double_word_verify(self):
# Erase a page of flash
self.blser.send_erase(start=0x2000, end=0x2800)
# Write a double-word to flash.
data = bytes.fromhex('ABCDEF00CAFECA00')
self.blser.send_write_dword(start=0x2000, data=data)
# Calculate the expected CRC for newly written flash.
expected_crc = binascii.crc32(data)
# Send the verify command
self.blser.send_verify(start=0x2000, end=0x2004, crc=expected_crc)
class TestWriteRow(BootloaderTestCase):
def test_message_too_short(self):
# Requires 517 bytes, but only 516 are sent.
packet_data = bytes([MessageType.WRITE_ROW] + [1] * 515)
self.blser.write_packet(packet_data)
self.blser.assert_response(ResponseCode.DATA_TOO_SHORT)
def test_message_too_long(self):
# Requires 517 bytes, but 518 are sent.
packet_data = bytes([MessageType.WRITE_ROW] + [1] * 517)
self.blser.write_packet(packet_data)
self.blser.assert_response(ResponseCode.DATA_TOO_LONG)
def test_start_address_too_high(self):
data = b'\x00' * 512
self.blser.send_write_row(
start=0x15800, data=data, response=ResponseCode.ADDR_OUT_OF_RANGE)
def test_start_address_bad_alignment(self):
data = b'\x00' * 512
self.blser.send_write_row(
start=0x01080, data=data, response=ResponseCode.BAD_ALIGNMENT)
def test_start_address_too_low(self):
data = b'\x00' * 512
self.blser.send_write_row(
start=0x00F00, data=data, response=ResponseCode.ADDR_OUT_OF_RANGE)
def test_erase_write_row_verify(self):
# Erase a section of flash
self.blser.send_erase(start=0x4000, end=0x4800)
# Write a row to flash
data = b'\xEF\xCD\xAB\x00' * 128
self.blser.send_write_row(start=0x4000, data=data)
# Calculate the expected CRC for newly written flash.
expected_crc = binascii.crc32(data)
# Send the verify command
self.blser.send_verify(
start=0x4000, end=0x4100, crc=expected_crc)
|
21,709 | 13da5c9ea35fbc10958d4eea0576ca7a700be991 | import sys
import re
def compute_simple(line):
fields = line.split(' ')
N = (len(fields) - 1) // 2
n = int(fields.pop(0))
for i in range(0, N):
op = fields.pop(0)
val = int(fields.pop(0))
if op == '+':
n += val
else:
n *= val
return (n)
def compute_complex(line):
addition_re = re.compile(r'(.*?)(\d+)\s\+\s(\d+)(.*)')
while m := addition_re.match(line):
line = m.group(1) + str(int(m.group(2)) + int(m.group(3))) + m.group(4)
return compute_simple(line)
def both_parts(filename, part):
bracket_re = re.compile(r'(.*?)\(([^()]+)\)(.*)')
sum_ = 0
with open(filename, 'r') as f:
for line in f:
line = line.rstrip()
while m := bracket_re.match(line):
if part == 'a':
v = compute_simple(m.group(2))
else:
v = compute_complex(m.group(2))
line = m.group(1) + '{}'.format(v) + m.group(3)
if part == 'a':
sum_ += compute_simple(line)
else:
sum_ += compute_complex(line)
return sum_
def part_a(filename):
return(both_parts(filename, 'a'))
def part_b(filename):
return(both_parts(filename, 'b'))
def entry():
if 'a' in sys.argv:
print(part_a('data/day18.txt'))
if 'b' in sys.argv:
print(part_b('data/day18.txt'))
if __name__ == "__main__":
entry()
|
21,710 | b7236670f039e5b53ca61814a26d53bdc7fc4d3a | #
# See the documentation for more details on how this works
#
# The idea here is you provide a simulation object that overrides specific
# pieces of WPILib, and modifies motors/sensors accordingly depending on the
# state of the simulation. An example of this would be measuring a motor
# moving for a set period of time, and then changing a limit switch to turn
# on after that period of time. This can help you do more complex simulations
# of your robot code without too much extra effort.
#
from pyfrc.physics import motor_cfgs, tankmodel
from pyfrc.physics.units import units
class PhysicsEngine(object):
def __init__(self, physics_controller):
"""
:param physics_controller: `pyfrc.physics.core.PhysicsInterface` object
to communicate simulation effects to
"""
self.physics_controller = physics_controller
self.position = 0
# Change these parameters to fit your robot!
bumper_width = 3.25 * units.inch
# fmt: off
self.drivetrain = tankmodel.TankModel.theory(
motor_cfgs.MOTOR_CFG_CIM, # motor configuration
110 * units.lbs, # robot mass
10.71, # drivetrain gear ratio
3, # motors per side
23 * units.inch, # robot wheelbase
28 * units.inch + bumper_width * 2, # robot width
32 * units.inch + bumper_width * 2, # robot length
6 * units.inch, # wheel diameter
)
# fmt: on
def update_sim(self, hal_data, now, tm_diff):
"""
Called when the simulation parameters for the program need to be
updated.
:param now: The current time as a float
:param tm_diff: The amount of time that has passed since the last
time that this function was called
"""
try:
_ = hal_data["CAN"][3]
except (KeyError, IndexError):
# talon must not be initialized yet
return
# Simulate the drivetrain
l_motor = hal_data["CAN"][1]["value"]
r_motor = hal_data["CAN"][4]["value"]
x, y, angle = self.drivetrain.get_distance(l_motor, r_motor, tm_diff)
self.physics_controller.distance_drive(x, y, angle)
# Sorta simulate the elevator
talon_data = hal_data["CAN"][7]
speed = int(4096 * 4 * talon_data["value"] * tm_diff)
talon_data["quad_position"] += speed
talon_data["quad_velocity"] = speed
|
21,711 | 8da957c4d66a171fefa650046898cd9d6a6fda88 | # coding=utf-8
import myorg
from myorg import a
print(1+1)
print("xxx".__add__(str(1)))
print(__name__)
a=1
b=2
print(dir(myorg.a))
print(__builtins__)
#print(dir())
#print(__package__)
#print(__path__)
#print(__file__)
# 类型和地址
c=2
d=2
print(type(c))
print(id(c),id(d))
# 交换
e=1
f=2
e,f=f,e
print(e,f)
# 元组 tuple
f=(1,"xxx",44)
(id,name,age)=f
print(id,name,age)
print(type(f))
# 数组
g=[1,2,3,4]
g[3]=2 #修改下标为3的值
print(g)
print(g[0:2])
# map
h={"name":"xxx","age":12}
print(type(h))
print(h)
print(name)
# set 不能改,只能加 Set无序
#j={}默认为map,若想为Set,则j=Set{}
j={1,2,3,4}
j.add(8)
print(j)
# 去重
h=[1,3,5,3,2]
print(set(h)) |
21,712 | 5c6e9a6ac8bbe5441fccb752a4e9983d83108d31 | distancia = float(input("Distancia total percorrida"))
cg = float(input("combustivel gasto"))
texto = ("km/l")
total = distancia / cg
print(round(total, 3))
print(texto) |
21,713 | 4e85b35ac869b7757fa6063234f20510768f5373 | """
test.
"""
from time import sleep
from helpers.logger import LOGGER
from helpers.queues import INBOUND_QUEUE
def test_inbound_put():
"""
bleh.
"""
count = 0
while count < 600:
count += 1
INBOUND_QUEUE.put(f'Another item - {count}')
sleep(.5)
def test_inbound_get():
"""
bleh.
"""
count = 0
while count < 600:
count += 1
next_item = INBOUND_QUEUE.get()
LOGGER.info(f'Next Item: {next_item}')
|
21,714 | a5b507d413a6737bb568ee0c2b5740074d996fb2 | # Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Check if all public modules are included in our docs."""
from __future__ import print_function
import os
import sys
import warnings
from sphinx.ext.intersphinx import fetch_inventory
BASE_DIR = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
DOCS_DIR = os.path.join(BASE_DIR, 'docs')
OBJECT_INVENTORY_RELPATH = os.path.join('_build', 'html', 'objects.inv')
IGNORED_PREFIXES = ('test_', '_')
IGNORED_MODULES = frozenset([
'gcloud.__init__',
'gcloud.bigquery.__init__',
'gcloud.bigtable.__init__',
'gcloud.datastore.__init__',
'gcloud.dns.__init__',
'gcloud.iterator',
'gcloud.logging.__init__',
'gcloud.logging.handlers.__init__',
'gcloud.logging.handlers.transports.__init__',
'gcloud.monitoring.__init__',
'gcloud.pubsub.__init__',
'gcloud.resource_manager.__init__',
'gcloud.storage.__init__',
'gcloud.streaming.__init__',
'gcloud.streaming.buffered_stream',
'gcloud.streaming.exceptions',
'gcloud.streaming.http_wrapper',
'gcloud.streaming.stream_slice',
'gcloud.streaming.transfer',
'gcloud.streaming.util',
'gcloud.translate.__init__',
])
class SphinxApp(object):
"""Mock app to interact with Sphinx helpers."""
warn = warnings.warn
srcdir = DOCS_DIR
def is_valid_module(filename):
"""Determines if a filename is a valid Python module.
Assumes if is just the end of a path (i.e. does not contain
``os.path.sep``.
:type filename: string
:param filename: The name of a file.
:rtype: bool
:returns: Flag indicating if the filename is valid.
"""
if not filename.endswith('.py'):
return False
if filename == '__init__.py':
return True
for prefix in IGNORED_PREFIXES:
if filename.startswith(prefix):
return False
return True
def get_public_modules(path, base_package=None):
"""Get list of all public modules relative to a path.
:type path: string
:param path: The path containing the python modules.
:type base_package: string
:param base_package: (Optional) A package to prepend in
front of the path.
:rtype: list
:returns: List of all modules found.
"""
result = []
for subdir, _, files in os.walk(path):
# Skip folders that start with _.
if any([part.startswith('_')
for part in subdir.split(os.path.sep)]):
continue
_, rel_dir = subdir.split(path)
rel_dir = rel_dir.lstrip(os.path.sep)
for filename in files:
if is_valid_module(filename):
mod_name, _ = os.path.splitext(filename)
rel_path = os.path.join(rel_dir, mod_name)
if base_package is not None:
rel_path = os.path.join(base_package, rel_path)
# Turn into a Python module rather than a file path.
result.append(rel_path.replace(os.path.sep, '.'))
return result
def main():
"""Main script to verify modules included."""
mock_uri = ''
inventory = fetch_inventory(SphinxApp, mock_uri,
OBJECT_INVENTORY_RELPATH)
sphinx_mods = set(inventory['py:module'].keys())
library_dir = os.path.join(BASE_DIR, 'gcloud')
public_mods = get_public_modules(library_dir,
base_package='gcloud')
public_mods = set(public_mods)
if not sphinx_mods <= public_mods:
unexpected_mods = sphinx_mods - public_mods
message = ['Unexpected error. There were modules referenced by '
'Sphinx that are not among the public modules.']
message.extend(['- %s' % (mod,) for mod in unexpected_mods])
print('\n'.join(message), file=sys.stderr)
sys.exit(1)
undocumented_mods = public_mods - sphinx_mods
# Remove ignored modules.
undocumented_mods -= IGNORED_MODULES
if undocumented_mods:
message_parts = ['Found undocumented public modules:']
message_parts.extend(['- ' + mod_name
for mod_name in sorted(undocumented_mods)])
print('\n'.join(message_parts), file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
main()
|
21,715 | d1c08d6a6a2e5b6b3441d0bdf4b9c28b61f0ef80 | """Setup API for Peewee ORM models."""
import random
import string
from pathlib import Path
from muffin import ResponseText
from muffin_rest import API
from muffin_rest.peewee import PWRESTHandler
from .models import Pet
from .schemas import PetSchema
api = API(version="0.0.0", title="PetStore API", description="Example Petstore API")
@api.authorization
async def authorization(request):
"""Setup authorization for whole API.
Can be redefined for an endpoint.
---
# OpenAPI Authorization Specs
Auth:
type: http
scheme: bearer
description: Use any value
"""
# Decode tokens, load/check users and etc
# ...
# in the example we just ensure that the authorization header exists
return request.headers.get("authorization", "")
@api.route("/token")
async def token(request) -> ResponseText:
"""A simple endpoint to get current API token.
By default authorization is only awailable for class based endpoints.
So the endpoint supports anonimous access.
If you would like to use API authorization for the simple endpoints, you have to
call it explicitly:
res = await api.auth(request)
if not res:
...
---
# OpenAPI Specs
# Mark the method as anonymous
get:
security: []
"""
return ResponseText(
"".join(random.choices(string.ascii_uppercase + string.digits, k=42)) # noqa: S311
)
@api.route
class Pets(PWRESTHandler):
"""Everything about your Pets."""
class Meta:
"""Tune the endpoint."""
# ORM Model
model = Pet
# Schema for serialization (it can be created automatically)
Schema = PetSchema
# Pagination
limit = 10
# Avalable sort params
sorting = ("id", {"default": "desc"}), "name"
# Available filters
filters = "status", "category"
@PWRESTHandler.route("/pet/{id}/uploadImage", methods="post")
async def upload_image(self, request, *, resource: Pet):
"""Uploads an image.
---
requestBody:
required: true
content:
multipart/form-data:
schema:
type: object
properties:
file:
type: string
format: binary
"""
formdata = await request.form(upload_to=Path(__file__).parent)
resource.image = formdata["file"].name
resource.save()
return resource.image
|
21,716 | 058394ad050443be2172ce0b2c04b178913c536a | #!/usr/bin/env python
import gbrand
def main():
C = 1000000
N = 5
grid = []
for i in xrange( N ):
grid.append( [ 0 ] * N )
for i in xrange( C ):
list = gbrand.badShuffle( N )
for j in xrange( N ):
grid[j][list[j]] += 1
for i in xrange( len( grid ) ):
for j in xrange( len( grid[i] ) ):
grid[i][j] *= float( N ) / C
grid[i][j] = int( grid[i][j] * 100 ) / 100.0
print "%d: %r" % (i, grid[i] )
main()
|
21,717 | a496187b545a7156f2ae20a61ac4341b07b0f698 | class GridIsFullException(Exception):
pass
|
21,718 | 237d53ff6f287da34894f4c9283e758f2e27d562 | import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
for i in range(0, 1000):
df = pd.read_hdf("LAr_Env.h5", key='data')
time = np.array(df['Time'], dtype=float)
time = time/60
plt.figure(0)
press = np.array(df['Pressure (arb)'], dtype=float)
plt.plot(time, press, color='blue')
plt.xlabel("Time (min)")
plt.ylabel("Pressure (arb)")
plt.figure(2)
flow = np.array(df['Flow (L)'], dtype=float)
flow = 1000*(flow/784)
print("Liquid Volume: ", flow[-1])
plt.plot(time, flow)
plt.figure(3)
temp = np.array(df['Temperature (K)'], dtype=float)
plt.ylabel("Temperature (K)")
plt.xlabel("Time (min)")
plt.plot(time, df['Temperature (K)'], color='blue')
#plt.pause(30)
plt.show()
'''
plt.figure(2)
print(df.columns)
press = np.array(df['Pressure (arb)'])
print(press[-40:])
plt.plot(df['Pressure (arb)'])
'''
plt.show()
|
21,719 | 4c08763dbdcfe46971af66b8bfec474be6c0d27c | import torch
from utils.config import *
from utils.data_utils import *
from utils.evaluate_utils import *
from torch.utils.data import SequentialSampler,DataLoader
import logging
import random
from models.model import transformer_model
from transformers import AutoTokenizer
from utils.io_utils import write_results,convert_to_slide_format
from tqdm import tqdm
def dev(model, dev_dataloader):
all_pred_labels, all_gold_labels, all_label_lens= [], [],[]
pbar = tqdm(enumerate(dev_dataloader), total=len(dev_dataloader), ncols=80)
for i,batch in pbar:
sents_tokens, sents_len, tokens_start, labels, sents_other_feats = \
batch['sents_tokens'].cuda(), batch['sents_len'].cuda(), batch['sents_tokens_start'].cuda(), batch['sents_label'].cuda(), batch[
'sents_other_feats'].cuda()
# Telling the model not to compute or store gradients, saving memory and speeding up validation
with torch.no_grad():
output = model(sents_tokens, tokens_start, sents_other_feats, sents_len, labels)
_, pred_labels = output
pred_labels = pred_labels.detach().cpu().numpy()
labels = labels.detach().cpu().numpy()
pred_labels, labels = fix_padding(pred_labels, labels, sents_len)
all_pred_labels.extend(pred_labels)
all_gold_labels.extend(labels)
all_label_lens.extend(sents_len.detach().cpu().numpy())
return all_pred_labels, all_gold_labels,all_label_lens
if __name__=="__main__":
#Logger Setting
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO)
logger = logging.getLogger(__name__)
fileHandler = logging.FileHandler(os.path.join(args.log_file))
logger.addHandler(fileHandler)
logger.info(args)
# CUDA setup
device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu")
n_gpu = torch.cuda.device_count()
#Set Random Seed
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if n_gpu > 0:
torch.cuda.manual_seed_all(args.seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
#Loading data
all_pred_labels, gold_labels, label_lens=[],[],None
if args.bert_test_model_list is not None:
tokenizer = AutoTokenizer.from_pretrained('xlnet-large-cased')
dev_data, dev_words_id, dev_words = read_data(args.dev_file, tokenizer)
dev_dataset = Data(dev_data, tokenizer)
dev_dataloader = DataLoader(dev_dataset, sampler=SequentialSampler(dev_dataset),
collate_fn=dev_dataset.collate_fn, batch_size=args.batch_size, num_workers=8)
logger.info('Data Loaded')
for i, model_name in enumerate(args.bert_test_model_list):
model=transformer_model('xlnet-large-cased', device, args.dropout_prob).to(device)
model.load_state_dict(torch.load(os.path.join(args.model_save_dir, model_name)))
model.eval()
pred_labels, gold_labels, label_lens= dev(model,dev_dataloader)
all_pred_labels.append(pred_labels)
if args.xlm_test_model_list is not None:
tokenizer = AutoTokenizer.from_pretrained('xlm-roberta-large')
dev_data, dev_words_id, dev_words = read_data(args.dev_file, tokenizer)
dev_dataset = Data(dev_data, tokenizer)
dev_dataloader = DataLoader(dev_dataset, sampler=SequentialSampler(dev_dataset),
collate_fn=dev_dataset.collate_fn, batch_size=args.batch_size, num_workers=8)
logger.info('Data Loaded')
for i, model_name in enumerate(args.xlm_test_model_list):
model=transformer_model('xlm-roberta-large', device, args.dropout_prob).to(device)
model.load_state_dict(torch.load(os.path.join(args.model_save_dir, model_name)))
model.eval()
pred_labels, gold_labels, label_lens= dev(model,dev_dataloader)
all_pred_labels.append(pred_labels)
mean_pred_label=np.mean(all_pred_labels,axis=0)
mean_pred_label, _ = fix_padding(mean_pred_label,mean_pred_label,label_lens)
score_m = match_m(mean_pred_label, gold_labels)
score_avg = np.mean(list(score_m.values()))
logger.info("Evaluation Accuracy: ")
logger.info("1:{:0.4f} 5:{:0.4f} 10:{:0.4f}".format(score_m[1], score_m[5], score_m[10]))
logger.info("Average: {:0.4f}".format(score_avg))
|
21,720 | f4b3a08821509c2c5145e9856eb12e3c759a1053 | # Method overriding in inheritance:-
# in case of inheritance when one class acquires
# the property of another class and the property of will be same
# name the derived class will override the property of base class
# it is method overloading.
# Example:-
class A:
def Data(self):
print('class A member called')
class B(A):
def Data(self):
print('class B member called')
obj=B()
obj.Data()
print('\n\n\n')
#solution:-
class A:
def Data(self):
print('class A member called')
class B(A):
def Data(self):
super(B,self).Data()
print('class B member called')
obj=B()
obj.Data() |
21,721 | 282feb0a5ce3658f1c4f8202104a37ca02d2883c | def run():
LIMITE=1000
contador=0
potencia_2= 2**contador
while(potencia_2<LIMITE):
print('2 elevado a la '+ str(contador) + ' es '+str(potencia_2))
contador=contador+1
potencia_2=2**contador
if __name__== '__main__':
run() |
21,722 | 203fc5555b14758e15cd1f0ecb4f0ee18c28a189 | # Generated by Django 2.1.1 on 2018-10-04 17:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('index', '0003_addr_handler'),
]
operations = [
migrations.AddField(
model_name='addr_handler',
name='basic_tel',
field=models.CharField(default=None, max_length=20),
preserve_default=False,
),
]
|
21,723 | 89d2ee7dbdcd8869762c287572c5c79f04d069b2 | #!/usr/bin/env python
import roslib; roslib.load_manifest('grasp_pipeline')
import sys
import rospy
from moveit_commander import RobotCommander, PlanningSceneInterface, roscpp_initialize, roscpp_shutdown
from geometry_msgs.msg import PoseStamped
from gazebo_msgs.msg import ModelStates
from grasp_pipeline.srv import *
import roslib.packages as rp
pkg_path = rp.get_pkg_dir('grasp_pipeline')
#To do: change this file to a ros server to load different objects.
class ManageSceneInMoveit:
def __init__(self):
rospy.init_node('manage_moveit_scene_node')
self.use_sim = rospy.get_param('~use_sim', False)
self.set_table_box_properties()
self.table_box_exist = False
def set_table_box_properties(self):
if self.use_sim:
self.box_len_x = 0.9125
self.box_len_y = 0.61
self.box_len_z = 0.59
self.box_pose = PoseStamped()
self.box_pose.header.frame_id = 'world'
self.box_pose.pose.position.x = 0.
self.box_pose.pose.position.y = -0.8
self.box_pose.pose.position.z = 0.295
self.box_pose.pose.orientation.x = 0.
self.box_pose.pose.orientation.y = 0.
self.box_pose.pose.orientation.z = 0.
self.box_pose.pose.orientation.w = 1.
else:
self.box_pose = PoseStamped()
self.box_pose.header.frame_id = 'world'
# Change table collision box model position for the new robot arm pose
self.box_pose.pose.position.x = 0.
self.box_pose.pose.position.y = -0.88
self.box_pose.pose.position.z = 0.59 + 0.015
self.box_pose.pose.orientation.x = 0.
self.box_pose.pose.orientation.y = 0.
self.box_pose.pose.orientation.z = 0.
self.box_pose.pose.orientation.w = 1.
self.box_len_x = 1.4
self.box_len_y = 1.4
self.box_len_z = 0.015
# self.wall_pose = PoseStamped()
# self.wall_pose.header.frame_id = 'world'
# # Change table collision wall model position for the new robot arm pose
# self.wall_pose.pose.position.x = 0.
# self.wall_pose.pose.position.y = -1.
# self.wall_pose.pose.position.z = 0.
# self.wall_pose.pose.orientation.x = 0.
# self.wall_pose.pose.orientation.y = 0.
# self.wall_pose.pose.orientation.z = 0.
# self.wall_pose.pose.orientation.w = 1.
# self.wall_len_x = 5.
# self.wall_len_y = 0.01
# self.wall_len_z = 5.
def handle_create_moveit_scene(self, req):
scene = PlanningSceneInterface()
rospy.sleep(0.5)
#Add a box for the table.
if not self.table_box_exist:
scene.add_box('table_box', self.box_pose,
(self.box_len_x, self.box_len_y, self.box_len_z))
self.table_box_exist = True
print req.object_mesh_path
scene.add_mesh('obj_mesh', req.object_pose_world, req.object_mesh_path)
# if self.use_sim:
# scene.add_mesh('obj_mesh', req.object_pose_world, req.object_mesh_path)
# else:
# # scene.add_box('wall', self.wall_pose,
# # (self.wall_len_x, self.wall_len_y, self.wall_len_z))
# scene.add_box('obj_box', req.object_pose_world,
# (req.obj_seg.width, req.obj_seg.height, req.obj_seg.depth))
# scene.add_box('obj_box', req.object_pose_world,
# (0.02, 0.02, req.obj_seg.depth))
rospy.sleep(0.5)
response = ManageMoveitSceneResponse()
response.success = True
return response
def create_moveit_scene_server(self):
rospy.Service('create_moveit_scene', ManageMoveitScene, self.handle_create_moveit_scene)
rospy.loginfo('Service create_scene:')
rospy.loginfo('Ready to create the moveit_scene.')
def handle_clean_moveit_scene(self, req):
scene = PlanningSceneInterface()
rospy.sleep(1)
# clean the scene
#scene.remove_world_object('table_box')
scene.remove_world_object('obj_mesh')
response = ManageMoveitSceneResponse()
response.success = True
return response
def clean_moveit_scene_server(self):
rospy.Service('clean_moveit_scene', ManageMoveitScene, self.handle_clean_moveit_scene)
rospy.loginfo('Service clean_scene:')
rospy.loginfo('Ready to clean the moveit_scene.')
if __name__=='__main__':
ms = ManageSceneInMoveit()
ms.create_moveit_scene_server()
ms.clean_moveit_scene_server()
rospy.spin()
|
21,724 | 555a84b29de8053c96f8346ad5f374ad49bcf9b7 | __author__ = 'Andy'
#
import webapp2
from google.appengine.api import users
import jinja2
import os
import logging
template_dir = os.path.join(os.path.dirname(__file__), '../templates') #path to template_dir, __file__(dir of cur file)
#set up jinja_env w/ template dir
jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir), autoescape=True)
class Handler(webapp2.RequestHandler):
def write(self, *a, **kw):
self.response.out.write(*a, **kw)
def render_str(self, template, **params):
t = jinja_env.get_template(template)
return t.render(params)
def render(self, template, **kw):
self.write(self.render_str(template, **kw))
class MainHandler(Handler):
def get(self):
self.render("map.html")
class AboutHandler(Handler):
def get(self):
self.render("about.html")
class testingHandler(Handler):
def get(self):
user = users.get_current_user()
if user:
greeting = ('Welcome, %s! (<a href="%s">sign out</a>)' %
(user.nickname(), users.create_logout_url('/')))
else:
greeting = ('<a href="%s">Sign in or register</a>.' %
users.create_login_url('/'))
self.response.out.write("<html><body>%s</body></html>" % greeting) |
21,725 | d7c7457052ebeec7398a486ea588982681dfcc24 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author: hanabillings
"""
"""
Calculates GC content of FASTA formatted sequence data.
Returns the ID of the sequence and its GC content
"""
import re
def getGCcontent():
# /Users/hanabillings/Desktop/rosalind.txt on my computer lol
fileLocation = input("please input your data file location")
data = open(fileLocation)
text = data.read()
text = text.replace('\n','')
txtRegex= re.compile(r'(Rosalind[_]\d+)')
data = txtRegex.findall(text)
GCRegex = re.compile(r'Rosalind[_]\d+(\w+)')
GCdata = GCRegex.findall(text)
GCcontentDict = {}
i = 0
for sequence in GCdata:
C_amt = 0
G_amt = 0
for NU in sequence:
if NU == "C":
C_amt +=1
elif NU =="G":
G_amt +=1
GCcontent = ((C_amt + G_amt)/len(sequence))*100
GCcontentDict[data[i]] = GCcontent
maxGC = max(GCcontentDict, key=GCcontentDict.get)
print(maxGC, "%.6f" % GCcontentDict[maxGC], "%")
getGCcontent()
|
21,726 | 5ede08b600af0196a5ea147576bec39373e1bc6c | from pprint import pprint
def parse_ranges(tekst):
rangess = tekst.split(",")
for r in rangess:
if "->" in r:
(out, seper,prawa) = r.partition("->")
yield int(out)
elif "-" in r:
tablica = r.split("-")
rang = range(int(tablica[0]),int(tablica[1])+1)
for i in rang:
yield i
else:
yield int(r)
if __name__ == "__main__":
pprint(list(parse_ranges('1-2,4-4,8-10')))
pprint(list(parse_ranges('0-0, 4-8, 20-20, 43-45')))
pprint(list(parse_ranges('0,4-8,20,43-45')))
pprint(list(parse_ranges('0, 4-8, 20->exit, 43-45'))) |
21,727 | 1cc51355cb50f632ae010776439528356128b337 | """Validation exceptions and managers."""
import pytz
import traceback
import contextlib
import datetime
import iso639
import widetime
def make_validator(validator=None):
return validator or ValidationManager()
def valid_color(color):
color = str(color).lower().strip()
if len(color) != 6:
return False
for j in [color[i:i+2] for i in range(0,5,2)]:
try:
j = int(j,16)
except ValueError, e:
return False
return True
def valid_url(url):
return url.startswith('http')
def valid_tz(tz):
return pytz.timezone(tz)
def valid_language(lang):
return iso639.get_language(lang)
def valid_int(value, vmin=None, vmax=None, empty=False):
# Allow empty string if empty=True
if value == '' and empty:
return True
try:
value = int(value)
except ValueError, e:
return False
if vmin is not None and value < vmin:
return False
if vmax is not None and value > vmax:
return False
return True
def valid_float(value, vmin=None, vmax=None, empty=False):
# Allow empty string if empty=True
if value == '' and empty:
return True
try:
value = float(value)
except ValueError, e:
return False
if vmin is not None and value < vmin:
return False
if vmax is not None and value > vmax:
return False
return True
def valid_in(value, within):
return value in within
def valid_bool(value, empty=False):
if value == '' and empty:
return True
try:
value = int(value)
except ValueError, e:
return False
return value in [0,1]
def valid_date(value, empty=False):
if not value and empty:
return True
if len(value) != 8:
return False
try:
datetime.datetime.strptime(value, '%Y%m%d')
except ValueError, e:
return False
return True
def valid_widetime(value):
if len(value.split(':')) != 3:
return False
if len(value) < 7:
return False
if len(value) > 8:
return False
try:
widetime.WideTime.from_string(value)
except ValueError:
return False
return True
def valid_point(point):
if len(point) != 2:
return False
lon, lat = point
if not (-180 <= lon <= 180):
return False
if not (-90 <= lat <= 90):
return False
return True
##### Validation Exceptions #####
class ValidationException(Exception):
def __init__(self, message, source=None):
super(ValidationException, self).__init__(message)
self.source = source
class ValidationError(ValidationException):
"""Base validation error."""
pass
class ValidationWarning(ValidationException):
"""Validation warning."""
pass
class ValidationInfo(ValidationException):
"""Validation info."""
pass
##### Validation Managers #####
class ValidationManager(object):
def __init__(self):
# List of exceptions generated
self.exceptions = []
# Source of ValidationError
self.source = None
def __call__(self, source=None):
self.source = source
return self
def __enter__(self):
return self
def __exit__(self, etype, value, traceback):
self.source = None
return
def report(self):
print "Validation report:"
for e in self.exceptions:
print "%s: %s"%(e.source, e.message)
class ValidationReport(ValidationManager):
def __exit__(self, etype, value, traceback):
# Unset the source from this context.
s = self.source
self.source = None
if not etype:
return
if issubclass(etype, AssertionError):
etype = ValidationError
if issubclass(etype, ValidationException):
self.exceptions.append(etype(value, source=s))
return True
|
21,728 | 95f403c445cfaf01c3ef4458e0b52d5261db2aaa | class Set:
def __init__(self, n):
self.__parent = [0 for i in range(n+1)]
self.__rank = [0 for i in range(n+1)]
def MakeSet(self ,i):
self.__parent[i] = i
self.__rank[i] = 0
def Find(self, i):
if i != self.__parent[i]:
self.__parent[i] = self.Find(self.__parent[i])
return self.__parent[i]
def Union(self, i, j):
i_id = self.Find(i)
j_id = self.Find(j)
if i_id == j_id:
return i_id, i_id, j_id
if self.__rank[i_id] > self.__rank[j_id]:
self.__parent[j_id] = i_id
return i_id, i_id, j_id
else:
self.__parent[i_id] = j_id
if self.__rank[i_id] == self.__rank[j_id]:
self.__rank[j_id] += 1
return j_id, i_id, j_id #временно!
n, m = map(int, input().split(' '))
t = list(map(int, input().split(' ')))
tables_size = [max(t)]
tables_size.extend(t)
path = []
s = Set(n)
for i in range(1,n + 1):
s.MakeSet(i)
for i in range(m):
dst, src = map(int, input().split(' '))
root, i_id, j_id = s.Union(dst, src)
if i_id != j_id:
tables_size[root] = tables_size[i_id] + tables_size[j_id]
tables_size[0] = tables_size[root] if tables_size[root] > tables_size[0] else tables_size[0]
print(tables_size[0])
|
21,729 | bcd106d60ecc79dc564334a45b3c3627ad12c51d | # ------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
# ------------------------------------------------------------------------------------------
import logging
import os
from pathlib import Path
from typing import Any, Dict, Optional, Union
import torch
from torch.optim.optimizer import Optimizer
from torch.optim.rmsprop import RMSprop
from InnerEye.Azure.azure_util import RUN_CONTEXT
from InnerEye.ML.common import ModelExecutionMode
from InnerEye.ML.config import ModelArchitectureConfig, PaddingMode, SegmentationModelBase, \
basic_size_shrinkage
from InnerEye.ML.deep_learning_config import OptimizerType
from InnerEye.ML.model_config_base import ModelConfigBase
from InnerEye.ML.model_training_steps import get_scalar_model_inputs_and_labels
from InnerEye.ML.models.architectures.base_model import BaseModel, CropSizeConstraints
from InnerEye.ML.models.architectures.complex import ComplexModel
from InnerEye.ML.models.architectures.unet_2d import UNet2D
from InnerEye.ML.models.architectures.unet_3d import UNet3D
from InnerEye.ML.models.layers.basic import BasicLayer
from InnerEye.ML.models.parallel.data_parallel import DataParallelModel
from InnerEye.ML.scalar_config import ScalarModelBase
from InnerEye.ML.sequence_config import SequenceModelBase
from InnerEye.ML.utils.device_aware_module import DeviceAwareModule
from InnerEye.ML.utils.metrics_constants import LoggingColumns
from InnerEye.ML.utils.ml_util import RandomStateSnapshot
from InnerEye.ML.utils.temperature_scaling import ModelWithTemperature
from InnerEye.ML.visualizers.model_summary import ModelSummary
class ModelAndInfo:
"""
This class contains the model and optional associated information, as well as methods to create
models and optimizers, move these to GPU and load state from checkpoints. Attributes are:
config: the model configuration information
model: the model created based on the config
optimizer: the optimizer created based on the config and associated with the model
checkpoint_path: the path load load checkpoint from, can be None
mean_teacher_model: the mean teacher model, if and as specified by the config
is_model_adjusted: whether model adjustments (which cannot be done twice) have been applied to model
is_mean_teacher_model_adjusted: whether model adjustments (which cannot be done twice)
have been applied to the mean teacher model
checkpoint_epoch: the training epoch this model was created, if loaded from disk
model_execution_mode: mode this model will be run in
"""
MODEL_STATE_DICT_KEY = 'state_dict'
OPTIMIZER_STATE_DICT_KEY = 'opt_dict'
MEAN_TEACHER_STATE_DICT_KEY = 'mean_teacher_state_dict'
EPOCH_KEY = 'epoch'
def __init__(self,
config: ModelConfigBase,
model_execution_mode: ModelExecutionMode,
checkpoint_path: Optional[Path] = None):
"""
:param config: the model configuration information
:param model_execution_mode: mode this model will be run in
:param checkpoint_path: the path load load checkpoint from, can be None
"""
self.config = config
self.checkpoint_path = checkpoint_path
self.model_execution_mode = model_execution_mode
self._model = None
self._mean_teacher_model = None
self._optimizer = None
self.checkpoint_epoch = None
self.is_model_adjusted = False
self.is_mean_teacher_model_adjusted = False
@property
def model(self) -> DeviceAwareModule:
if not self._model:
raise ValueError("Model has not been created.")
return self._model
@property
def optimizer(self) -> Optimizer:
if not self._optimizer:
raise ValueError("Optimizer has not been created.")
return self._optimizer
@property
def mean_teacher_model(self) -> Optional[DeviceAwareModule]:
if not self._mean_teacher_model and self.config.compute_mean_teacher_model:
raise ValueError("Mean teacher model has not been created.")
return self._mean_teacher_model
@staticmethod
def read_checkpoint(path_to_checkpoint: Path, use_gpu: bool) -> Dict[str, Any]:
# For model debugging, allow loading a GPU trained model onto the CPU. This will clearly only work
# if the model is small.
map_location = None if use_gpu else 'cpu'
checkpoint = torch.load(str(path_to_checkpoint), map_location=map_location)
return checkpoint
@classmethod
def _load_checkpoint(cls, model: DeviceAwareModule, checkpoint_path: Path,
key_in_state_dict: str, use_gpu: bool) -> int:
"""
Loads a checkpoint of a model, may be the model or the mean teacher model. Assumes the model
has already been created, and the checkpoint exists. This does not set checkpoint epoch.
This method should not be called externally. Use instead try_load_checkpoint_for_model
or try_load_checkpoint_for_mean_teacher_model
:param model: model to load weights
:param checkpoint_path: Path to checkpoint
:param key_in_state_dict: the key for the model weights in the checkpoint state dict
:param reader: Function which takes the path and returns a dict with model and optimizer states
:return checkpoint epoch from the state dict
"""
logging.info(f"Loading checkpoint {checkpoint_path}")
checkpoint = ModelAndInfo.read_checkpoint(checkpoint_path, use_gpu)
try:
state_dict = checkpoint[key_in_state_dict]
except KeyError:
logging.error(f"Key {key_in_state_dict} not found in checkpoint")
return False
if isinstance(model, torch.nn.DataParallel):
result = model.module.load_state_dict(state_dict, strict=False)
else:
result = model.load_state_dict(state_dict, strict=False)
if result.missing_keys:
logging.warning(f"Missing keys in model checkpoint: {result.missing_keys}")
if result.unexpected_keys:
logging.warning(f"Unexpected keys in model checkpoint: {result.unexpected_keys}")
return checkpoint[ModelAndInfo.EPOCH_KEY]
@classmethod
def _adjust_for_gpus(cls, model: DeviceAwareModule, config: ModelConfigBase,
model_execution_mode: ModelExecutionMode) -> DeviceAwareModule:
"""
Updates a torch model so that input mini-batches are parallelized across the batch dimension to utilise
multiple gpus. If model parallel is set to True and execution is in test mode, then model is partitioned to
perform full volume inference.
This assumes the model has been created, that the optimizer has not yet been created, and the the model has not
been adjusted twice. This method should not be called externally. Use instead adjust_model_for_gpus
or adjust_mean_teacher_model_for_gpus
:returns Adjusted model
"""
if config.use_gpu:
model = model.cuda()
logging.info("Adjusting the model to use mixed precision training.")
# If model parallel is set to True, then partition the network across all available gpus.
if config.use_model_parallel:
devices = config.get_cuda_devices()
assert devices is not None # for mypy
model.partition_model(devices=devices) # type: ignore
else:
logging.info("Making no adjustments to the model because no GPU was found.")
# Update model related config attributes (After Model Parallel Activated)
config.adjust_after_mixed_precision_and_parallel(model)
# DataParallel enables running the model with multiple gpus by splitting samples across GPUs
# If the model is used in training mode, data parallel is activated by default.
# Similarly, if model parallel is not activated, data parallel is used as a backup option
use_data_parallel = (model_execution_mode == ModelExecutionMode.TRAIN) or (not config.use_model_parallel)
if config.use_gpu and use_data_parallel:
logging.info("Adjusting the model to use DataParallel")
# Move all layers to the default GPU before activating data parallel.
# This needs to happen even though we put the model to the GPU at the beginning of the method,
# but we may have spread it across multiple GPUs later.
model = model.cuda()
model = DataParallelModel(model, device_ids=config.get_cuda_devices())
return model
def create_model(self) -> None:
"""
Creates a model (with temperature scaling) according to the config given.
"""
self._model = create_model_with_temperature_scaling(self.config)
def try_load_checkpoint_for_model(self) -> bool:
"""
Loads a checkpoint of a model. The provided model checkpoint must match the stored model.
:return True if checkpoint exists and was loaded, False otherwise.
"""
if self._model is None:
raise ValueError("Model must be created before it can be adjusted.")
if not self.checkpoint_path:
raise ValueError("No checkpoint provided")
if not self.checkpoint_path.is_file():
logging.warning(f'No checkpoint found at {self.checkpoint_path} current working dir {os.getcwd()}')
return False
epoch = ModelAndInfo._load_checkpoint(model=self._model,
checkpoint_path=self.checkpoint_path,
key_in_state_dict=ModelAndInfo.MODEL_STATE_DICT_KEY,
use_gpu=self.config.use_gpu)
logging.info(f"Loaded model from checkpoint (epoch: {epoch})")
self.checkpoint_epoch = epoch
return True
def adjust_model_for_gpus(self) -> None:
"""
Updates the torch model so that input mini-batches are parallelized across the batch dimension to utilise
multiple gpus. If model parallel is set to True and execution is in test mode, then model is partitioned to
perform full volume inference.
"""
if self._model is None:
raise ValueError("Model must be created before it can be adjusted.")
# Adjusting twice causes an error.
if self.is_model_adjusted:
logging.debug("model_and_info.is_model_adjusted is already True")
if self._optimizer:
raise ValueError("Create an optimizer only after creating and adjusting the model.")
self._model = ModelAndInfo._adjust_for_gpus(model=self._model,
config=self.config,
model_execution_mode=self.model_execution_mode)
self.is_model_adjusted = True
logging.debug("model_and_info.is_model_adjusted set to True")
def create_summary_and_adjust_model_for_gpus(self) -> None:
"""
Generates the model summary, which is required for model partitioning across GPUs, and then moves the model to
GPU with data parallel/model parallel by calling adjust_model_for_gpus.
"""
if self._model is None:
raise ValueError("Model must be created before it can be adjusted.")
if self.config.is_segmentation_model:
summary_for_segmentation_models(self.config, self._model)
# Prepare for mixed precision training and data parallelization (no-op if already done).
# This relies on the information generated in the model summary.
self.adjust_model_for_gpus()
def try_create_model_and_load_from_checkpoint(self) -> bool:
"""
Creates a model as per the config, and loads the parameters from the given checkpoint path.
Also updates the checkpoint_epoch.
:return True if checkpoint exists and was loaded, False otherwise.
"""
self.create_model()
if self.checkpoint_path:
# Load the stored model. If there is no checkpoint present, return immediately.
return self.try_load_checkpoint_for_model()
return True
def try_create_model_load_from_checkpoint_and_adjust(self) -> bool:
"""
Creates a model as per the config, and loads the parameters from the given checkpoint path.
The model is then adjusted for data parallelism and mixed precision.
Also updates the checkpoint_epoch.
:return True if checkpoint exists and was loaded, False otherwise.
"""
success = self.try_create_model_and_load_from_checkpoint()
self.create_summary_and_adjust_model_for_gpus()
return success
def create_mean_teacher_model(self) -> None:
"""
Creates a model (with temperature scaling) according to the config given.
"""
self._mean_teacher_model = create_model_with_temperature_scaling(self.config)
def try_load_checkpoint_for_mean_teacher_model(self) -> bool:
"""
Loads a checkpoint of a model. The provided model checkpoint must match the stored model.
:return True if checkpoint exists and was loaded, False otherwise.
"""
if self._mean_teacher_model is None:
raise ValueError("Mean teacher model must be created before it can be adjusted.")
if not self.checkpoint_path:
raise ValueError("No checkpoint provided")
if not self.checkpoint_path.is_file():
logging.warning(f'No checkpoint found at {self.checkpoint_path} current working dir {os.getcwd()}')
return False
epoch = ModelAndInfo._load_checkpoint(model=self._mean_teacher_model,
checkpoint_path=self.checkpoint_path,
key_in_state_dict=ModelAndInfo.MEAN_TEACHER_STATE_DICT_KEY,
use_gpu=self.config.use_gpu)
logging.info(f"Loaded mean teacher model from checkpoint (epoch: {epoch})")
self.checkpoint_epoch = epoch
return True
def adjust_mean_teacher_model_for_gpus(self) -> None:
"""
Updates the torch model so that input mini-batches are parallelized across the batch dimension to utilise
multiple gpus. If model parallel is set to True and execution is in test mode, then model is partitioned to
perform full volume inference.
"""
if self._mean_teacher_model is None:
raise ValueError("Mean teacher model must be created before it can be adjusted.")
# Adjusting twice causes an error.
if self.is_mean_teacher_model_adjusted:
logging.debug("model_and_info.is_mean_teacher_model_adjusted is already True")
self._mean_teacher_model = ModelAndInfo._adjust_for_gpus(model=self._mean_teacher_model,
config=self.config,
model_execution_mode=self.model_execution_mode)
self.is_mean_teacher_model_adjusted = True
logging.debug("model_and_info.is_mean_teacher_model_adjusted set to True")
def create_summary_and_adjust_mean_teacher_model_for_gpus(self) -> None:
"""
Generates the model summary, which is required for model partitioning across GPUs, and then moves the model to
GPU with data parallel/model parallel by calling adjust_model_for_gpus.
"""
if self._mean_teacher_model is None:
raise ValueError("Mean teacher model must be created before it can be adjusted.")
if self.config.is_segmentation_model:
summary_for_segmentation_models(self.config, self._mean_teacher_model)
# Prepare for mixed precision training and data parallelization (no-op if already done).
# This relies on the information generated in the model summary.
self.adjust_mean_teacher_model_for_gpus()
def try_create_mean_teacher_model_and_load_from_checkpoint(self) -> bool:
"""
Creates a model as per the config, and loads the parameters from the given checkpoint path.
Also updates the checkpoint_epoch.
:return True if checkpoint exists and was loaded, False otherwise.
"""
self.create_mean_teacher_model()
if self.checkpoint_path:
# Load the stored model. If there is no checkpoint present, return immediately.
return self.try_load_checkpoint_for_mean_teacher_model()
return True
def try_create_mean_teacher_model_load_from_checkpoint_and_adjust(self) -> bool:
"""
Creates a model as per the config, and loads the parameters from the given checkpoint path.
The model is then adjusted for data parallelism and mixed precision.
Also updates the checkpoint_epoch.
:return True if checkpoint exists and was loaded, False otherwise.
"""
success = self.try_create_mean_teacher_model_and_load_from_checkpoint()
self.create_summary_and_adjust_mean_teacher_model_for_gpus()
return success
def create_optimizer(self) -> None:
"""
Creates a torch optimizer for the given model, and stores it as an instance variable in the current object.
"""
# Make sure model is created before we create optimizer
if self._model is None:
raise ValueError("Model checkpoint must be created before optimizer checkpoint can be loaded.")
# Select optimizer type
if self.config.optimizer_type in [OptimizerType.Adam, OptimizerType.AMSGrad]:
self._optimizer = torch.optim.Adam(self._model.parameters(), self.config.l_rate,
self.config.adam_betas, self.config.opt_eps, self.config.weight_decay,
amsgrad=self.config.optimizer_type == OptimizerType.AMSGrad)
elif self.config.optimizer_type == OptimizerType.SGD:
self._optimizer = torch.optim.SGD(self._model.parameters(), self.config.l_rate, self.config.momentum,
weight_decay=self.config.weight_decay)
elif self.config.optimizer_type == OptimizerType.RMSprop:
self._optimizer = RMSprop(self._model.parameters(), self.config.l_rate, self.config.rms_alpha,
self.config.opt_eps,
self.config.weight_decay, self.config.momentum)
else:
raise NotImplementedError(f"Optimizer type {self.config.optimizer_type.value} is not implemented")
def try_load_checkpoint_for_optimizer(self) -> bool:
"""
Loads a checkpoint of an optimizer.
:return True if the checkpoint exists and optimizer state loaded, False otherwise
"""
if self._optimizer is None:
raise ValueError("Optimizer must be created before optimizer checkpoint can be loaded.")
if not self.checkpoint_path:
logging.warning("No checkpoint path provided.")
return False
if not self.checkpoint_path.is_file():
logging.warning(f'No checkpoint found at {self.checkpoint_path} current working dir {os.getcwd()}')
return False
logging.info(f"Loading checkpoint {self.checkpoint_path}")
checkpoint = ModelAndInfo.read_checkpoint(self.checkpoint_path, self.config.use_gpu)
try:
state_dict = checkpoint[ModelAndInfo.OPTIMIZER_STATE_DICT_KEY]
except KeyError:
logging.error(f"Key {ModelAndInfo.OPTIMIZER_STATE_DICT_KEY} not found in checkpoint")
return False
self._optimizer.load_state_dict(state_dict)
logging.info(f"Loaded optimizer from checkpoint (epoch: {checkpoint[ModelAndInfo.EPOCH_KEY]})")
self.checkpoint_epoch = checkpoint[ModelAndInfo.EPOCH_KEY]
return True
def try_create_optimizer_and_load_from_checkpoint(self) -> bool:
"""
Creates an optimizer and loads its state from a checkpoint.
:return True if the checkpoint exists and optimizer state loaded, False otherwise
"""
self.create_optimizer()
if self.checkpoint_path:
return self.try_load_checkpoint_for_optimizer()
return True
def save_checkpoint(self, epoch: int) -> Path:
"""
Saves a checkpoint of the current model and optimizer_type parameters in the specified folder
and uploads it to the output blob storage of the current run context.
The checkpoint's name for epoch 123 would be 123_checkpoint.pth.tar.
:param epoch: The last epoch used to train the model.
:return: The full path of the checkpoint file.
"""
logging.getLogger().disabled = True
model_state_dict = self.model.module.state_dict() \
if isinstance(self.model, torch.nn.DataParallel) else self.model.state_dict()
checkpoint_file_path = self.config.get_path_to_checkpoint(epoch)
checkpoint_file_path.parent.mkdir(exist_ok=True, parents=True)
info_to_store = {
ModelAndInfo.EPOCH_KEY: epoch,
ModelAndInfo.MODEL_STATE_DICT_KEY: model_state_dict,
ModelAndInfo.OPTIMIZER_STATE_DICT_KEY: self.optimizer.state_dict()
}
if self.config.compute_mean_teacher_model:
assert self.mean_teacher_model is not None # for mypy, getter has this built in
mean_teacher_model_state_dict = self.mean_teacher_model.module.state_dict() \
if isinstance(self.mean_teacher_model, torch.nn.DataParallel) \
else self.mean_teacher_model.state_dict()
info_to_store[ModelAndInfo.MEAN_TEACHER_STATE_DICT_KEY] = mean_teacher_model_state_dict
torch.save(info_to_store, checkpoint_file_path)
logging.getLogger().disabled = False
logging.info(f"Saved model checkpoint for epoch {epoch} to {checkpoint_file_path}")
return checkpoint_file_path
def init_weights(m: Union[torch.nn.Conv3d, torch.nn.BatchNorm3d]) -> None:
"""
Initialize the weights of a Pytorch module.
:param m: A PyTorch module. Only Conv3d and BatchNorm3d are initialized.
"""
import torch
if isinstance(m, torch.nn.Conv3d):
torch.nn.init.normal_(m.weight, 0, 0.01)
elif isinstance(m, torch.nn.BatchNorm3d):
torch.nn.init.constant_(m.weight, 1)
torch.nn.init.constant_(m.bias, 0)
# noinspection PyTypeChecker
def build_net(args: SegmentationModelBase) -> BaseModel:
"""
Build network architectures
:param args: Network configuration arguments
"""
full_channels_list = [args.number_of_image_channels, *args.feature_channels, args.number_of_classes]
initial_fcn = [BasicLayer] * 2
residual_blocks = [[BasicLayer, BasicLayer]] * 3
basic_network_definition = initial_fcn + residual_blocks # type: ignore
# no dilation for the initial FCN and then a constant 1 neighbourhood dilation for the rest residual blocks
basic_dilations = [1] * len(initial_fcn) + [2, 2] * len(basic_network_definition)
# Crop size must be at least 29 because all architectures (apart from UNets) shrink the input image by 28
crop_size_constraints = CropSizeConstraints(minimum_size=basic_size_shrinkage + 1)
run_weight_initialization = True
network: BaseModel
if args.architecture == ModelArchitectureConfig.Basic:
network_definition = basic_network_definition
network = ComplexModel(args, full_channels_list,
basic_dilations, network_definition, crop_size_constraints) # type: ignore
elif args.architecture == ModelArchitectureConfig.UNet3D:
network = UNet3D(input_image_channels=args.number_of_image_channels,
initial_feature_channels=args.feature_channels[0],
num_classes=args.number_of_classes,
kernel_size=args.kernel_size)
run_weight_initialization = False
elif args.architecture == ModelArchitectureConfig.UNet2D:
network = UNet2D(input_image_channels=args.number_of_image_channels,
initial_feature_channels=args.feature_channels[0],
num_classes=args.number_of_classes,
padding_mode=PaddingMode.Edge)
run_weight_initialization = False
else:
raise ValueError(f"Unknown model architecture {args.architecture}")
network.validate_crop_size(args.crop_size, "Training crop size")
network.validate_crop_size(args.test_crop_size, "Test crop size") # type: ignore
# Initialize network weights
if run_weight_initialization:
network.apply(init_weights) # type: ignore
return network
def summary_for_segmentation_models(config: ModelConfigBase, model: DeviceAwareModule) -> None:
"""
Generates a human readable summary of the present segmentation model, writes it to logging.info, and
stores the ModelSummary object inside the argument `model`.
:param config: The configuration for the model.
:param model: The instantiated Pytorch model.
"""
assert isinstance(model, BaseModel)
crop_size = config.crop_size
if isinstance(crop_size, int):
crop_size = (crop_size, crop_size, crop_size)
try:
model.generate_model_summary(crop_size, log_summaries_to_files=config.log_summaries_to_files)
except AttributeError as e:
logging.warning(f"summary_for_segmentation_models failed with exception {e}")
def generate_and_print_model_summary(config: ModelConfigBase, model: DeviceAwareModule) -> None:
"""
Writes a human readable summary of the present model to logging.info, and logs the number of trainable
parameters to AzureML.
:param config: The configuration for the model.
:param model: The instantiated Pytorch model.
"""
random_state = RandomStateSnapshot.snapshot_random_state()
# There appears to be a bug in apex, where previous use (in training for example) causes problems
# when another model is later built on the CPU (for example, before loading from a checkpoint)
# https://github.com/NVIDIA/apex/issues/694
# Hence, move the model to the GPU before doing model summary.
if config.use_gpu:
model = model.cuda()
if isinstance(config, ScalarModelBase):
# To generate the model summary, read the first item of the dataset. Then use the model's own
# get_model_input function to convert the dataset item to input tensors, and feed them through the model.
train_dataset = config.get_torch_dataset_for_inference(ModelExecutionMode.TRAIN)
train_item_0 = next(iter(train_dataset.as_data_loader(shuffle=False, batch_size=1, num_dataload_workers=0)))
model_inputs = get_scalar_model_inputs_and_labels(config, model, train_item_0).model_inputs
# The model inputs may already be converted to float16, assuming that we would do mixed precision.
# However, the model is not yet converted to float16 when this function is called, hence convert back to float32
summary = ModelSummary(model)
summary.generate_summary(input_tensors=model_inputs, log_summaries_to_files=config.log_summaries_to_files)
elif config.is_segmentation_model:
summary_for_segmentation_models(config, model)
assert model.summarizer
summary = model.summarizer # type: ignore
else:
raise ValueError("Don't know how to generate a summary for this type of model?")
RUN_CONTEXT.log(LoggingColumns.NumTrainableParameters, summary.n_trainable_params)
random_state.restore_random_state()
def create_model_with_temperature_scaling(config: ModelConfigBase) -> Any:
"""
Create a model with temperature scaling by wrapping the result of config.create_model with ModelWithTemperature,
if temperature scaling config has been provided, otherwise return the result of config.create_model
"""
# wrap the model around a temperature scaling model if required
model = config.create_model()
if isinstance(config, SequenceModelBase) and config.temperature_scaling_config:
model = ModelWithTemperature(model, config.temperature_scaling_config)
return model
|
21,730 | 22e592053e6fcacca896b1da0fe22bb7cf98904b | import os
import tensorflow as tf
import io
from object_detection.utils import dataset_util
from PIL import Image
from time import time
#cwd = os.getcwd()
# new a tensorflow model
sess = tf.Session()
# make a placeholder more flexible
encoded_jpg_ph = tf.placeholder(tf.string, shape=[])
# set resize layer
height = 256
width = 256
# resizing the image here
decoded_image = tf.image.decode_jpeg(encoded_jpg_ph)
decoded_image_resized = tf.image.resize_images(decoded_image, [height, width]) # this returns float32
decoded_image_resized_uint = tf.cast(decoded_image_resized, tf.uint8)
resize_image = tf.image.encode_jpeg(decoded_image_resized_uint) # expects uint8
# reset all variables
sess.run(tf.global_variables_initializer())
def create_cat_tf_example(label, label_text, img_path, img_name):
"""Creates a tf.Example proto from sample cat image.
Args:
encoded_cat_image_data: The jpg encoded data of the cat image.
Returns:
example: The created tf.Example.
"""
with tf.gfile.FastGFile(img_path + img_name, 'rb') as fid:
encoded_image = fid.read()
encoded_image_data = sess.run(resize_image, {encoded_jpg_ph: encoded_image}) # I think this may not be the right way of doing this
b_filename = str.encode(img_name)
image_format = b'jpg'
xmins = [10.0 / width]
xmaxs = [(width - 10) / width]
ymins = [10.0 / height]
ymaxs = [(height - 10.0) / height]
# classes_text = [str.encode(label_text)]
classes_text = []
if label_text:
classes_text.append(label_text.encode('utf8'))
classes = []
# if label == 1:
classes.append(int(label))
# print(classes_text, classes, b_filename)
tf_example = tf.train.Example(features=tf.train.Features(feature={
'image/height': dataset_util.int64_feature(height),
'image/width': dataset_util.int64_feature(width),
'image/filename': dataset_util.bytes_feature(b_filename),
'image/source_id': dataset_util.bytes_feature(b_filename),
'image/encoded': dataset_util.bytes_feature(encoded_image_data),
# 'image/encoded': dataset_util.bytes_feature(encoded_jpg),
'image/format': dataset_util.bytes_feature(image_format),
'image/object/bbox/xmin': dataset_util.float_list_feature(xmins),
'image/object/bbox/xmax': dataset_util.float_list_feature(xmaxs),
'image/object/bbox/ymin': dataset_util.float_list_feature(ymins),
'image/object/bbox/ymax': dataset_util.float_list_feature(ymaxs),
'image/object/class/text': dataset_util.bytes_list_feature(classes_text),
'image/object/class/label': dataset_util.int64_list_feature(classes),
}))
return tf_example
if __name__ == '__main__':
start_time = time()
each_batch_time = time()
# collect the dirs
mode_list = ["train", "eval"]
for mode in mode_list:
cwd = "C:/Users/VIPLAB/Desktop/dog_vs_cat_detection/dataset/self_divide/" + mode + "/"
# classes = ["cat", "dog"]
classes = ["dog", "cat"]
writer = tf.python_io.TFRecordWriter("C:/Users/VIPLAB/Desktop/dog_vs_cat_detection/dataset/TFRECORD/" + mode + ".tfrecords")
for index, name in enumerate(classes):
class_path = cwd + name + "/"
for img_count, img_name in enumerate(os.listdir(class_path)):
if (img_count % 100 == 0):
output_str = mode + " step -- " + str(img_count)
print(output_str, " compute 100 image _ batch time = ", time() - each_batch_time)
print("id =", int(index+1),name, img_name, class_path)
each_batch_time = time()
# sess.close()
# # reset session otherwise it will run slowly
# tf.reset_default_graph()
# sess = tf.Session()
# img_path = class_path + img_name
each_record = create_cat_tf_example(label = index + 1 , label_text = name, img_path = class_path, img_name = img_name)
# if(name == "dog"):
# each_record = create_cat_tf_example(label = 1, label_text = name, img_path = class_path, img_name = img_name)
# else:
# each_record = create_cat_tf_example(label = None, label_text = None, img_path = class_path, img_name = img_name)
writer.write(each_record.SerializeToString()) #序列化为字符串
writer.close()
print(mode , "is finished.")
sess.close()
print("cost time =", time() - start_time) |
21,731 | 2530111f35244bbc5ef29d200439aacd50c7a4db | """
Videos module: fetches a list of playable streams (assets) or categories (channels)
"""
# main imports
import sys
import os
import xbmc
import xbmcgui
import xbmcplugin
import re
import urllib
import MpTVConnector
class _Info:
def __init__( self, *args, **kwargs ):
self.__dict__.update( kwargs )
class Main:
# base paths
BASE_PATH = os.getcwd().replace( ";", "" )
_handle = int(sys.argv[ 1 ])
def __init__( self ):
print "[tvserver] handle=" + str(self._handle)
self._path = sys.argv[ 0 ]
if self._handle == -1:
print "[tvserver] Invalid handle."
self._is_playing = False
self._get_xbmc_version()
self._get_settings()
try:
server_host = str(self.settings["mptvserver"])
server_port = int(self.settings["mptvport"])
except:
print "[tvserver] Error retrieving host and port settings. Using default 'localhost:9596'"
server_host = "localhost"
server_port = 9596
# init the connection
self._conn = MpTVConnector.MpTVConnector()
self._conn.connect(server_host, server_port)
showshift = False
if sys.argv[ 2 ]:
chanUrl = sys.argv[ 2 ]
print "[tvserver] chanUrl: " + chanUrl
if chanUrl.startswith("?G"):
# this is a group
print "[tvserver] get_channel: " + chanUrl[ 3: ]
ok = self.get_channel(chanUrl[ 3: ])
showshift = True
elif chanUrl.startswith("?C"):
print "[tvserver] choose_channel: " + chanUrl[ 3: ]
ok = self.choose_channel(chanUrl[ 3: ])
showshift = True
elif chanUrl.startswith("?K"):
ok = self.stop_timeshift()
showshift = True
elif chanUrl.startswith("?S"):
xbmc.output("[tvserver] Choose_show: " + chanUrl[ 3: ])
print "[tvserver] choose_show: " + chanUrl[ 3: ]
ok = self.choose_show(showUrl[ 3: ])
elif chanUrl.startswith("?D"):
print "[tvserver] delete show: " + chanUrl[ 3: ]
ok = self.delete_show(chanUrl[ 3: ])
elif chanUrl.startswith("?R"):
xbmc.output("[tvserver] Container.Refresh")
xbmc.executebuiltin("Container.Refresh")
ok = True
elif chanUrl.startswith("?PS"):
# play shows
xbmc.output("[tvserver] TVServer play shows")
ok = self.get_shows()
elif chanUrl.startswith("?PL"):
# play live TV
print "[tvserver] play live tv"
ok = self.get_groups()
showshift = True
else:
print "Unknown path : ", chanUrl
else:
ok = self.main_menu()
if ok:
if showshift:
curShift = self._conn.isTimeshifting();
if curShift[0] == "True":
try:
curProgram = curShift[3] + " - " + curShift[4] + " [" + curShift[5] + "]"
listitem = xbmcgui.ListItem("Current Timeshift: " + curProgram, iconImage = "DefaultFolder.png")
# give the item a stop timeshift context menu
listitem.addContextMenuItems([("Stop Timeshifting", "XBMC.RunPlugin(%s?K)" % (self._path))])
#print "[tvserver] before addDirectoryItem handle=" + str(self._handle)
ok = xbmcplugin.addDirectoryItem( handle=self._handle, url=curShift[1], listitem=listitem, isFolder=False)
# also add a separate stop timeshift entry
listitem = xbmcgui.ListItem("Stop Timeshift: " + curProgram, iconImage = "DefaultFolder.png")
print "url=%s?K" % (self._path)
ok = xbmcplugin.addDirectoryItem( handle=self._handle, url="%s?K" % (self._path), listitem=listitem, isFolder=False)
except:
print "[tvserver] error getting curShift"
# send notification we're finished, successfully or unsuccessfully
xbmcplugin.endOfDirectory( handle=self._handle, succeeded=ok, cacheToDisc=False )
def _get_settings( self ):
self.settings = {}
if self.VERSION_XBMC >= 30000:
# Starting from XBMC PRE 10.x, getSetting has 2 arguments
print "[tvserver] handle=" + str(self._handle)
self.settings["mptvserver"] = xbmcplugin.getSetting(self._handle, "mptvserver")
self.settings["mptvport"] = xbmcplugin.getSetting(self._handle, "mptvport")
self.settings["mprecplayback"] = xbmcplugin.getSetting(self._handle, "mprecplayback")
self.settings["mpsharename"] = xbmcplugin.getSetting(self._handle, "mpsharename")
self.settings["extsort"] = ( xbmcplugin.getSetting(self._handle, "extsort") == "true" )
else:
self.settings["mptvserver" ] = xbmcplugin.getSetting("mptvserver")
self.settings["mptvport" ] = xbmcplugin.getSetting("mptvport")
self.settings["mprecplayback"] = xbmcplugin.getSetting("mprecplayback")
self.settings["mpsharename"] = xbmcplugin.getSetting("mpsharename")
self.settings["extsort" ] = ( xbmcplugin.getSetting("extsort") == "true" )
def _get_xbmc_version( self ):
try:
buildVersion = xbmc.getInfoLabel("System.BuildVersion")
if buildVersion.startswith('PRE-10.') or buildVersion.startswith('10.') or buildVersion.startswith('9.') or buildVersion.startswith('UNKNOWN'):
# XBMC with revision info rXXXXX
xbmc.output ("[tvserver] XBMC BuildVersion: " + buildVersion)
try:
rev_re = re.compile(' r(\d+)')
self.VERSION_XBMC = int(rev_re.search(buildVersion).group(1))
except:
if buildVersion.startswith('PRE-10.') or buildVersion.startswith('10.'):
self.VERSION_XBMC = 30000
else:
self.VERSION_XBMC = 0
elif buildVersion.startswith('0.9.'):
# Boxee betas
xbmc.output ("[tvserver] BOXEE BuildVersion: " + buildVersion)
rev_re = re.compile('0\.9\.\d+\.(\d+)')
self.VERSION_BOXEE = int (rev_re.search(buildVersion).group(1))
self.VERSION_XBMC = 0
xbmc.output ("[tvserver] initial version of BOXEE: %d" % (VERSION_BOXEE,))
else: # Plex ?
xbmc.output ("[tvserver] XBMC BuildVersion: " + buildVersion)
self.VERSION_XBMC = 0
except:
xbmc.output ("[tvserver] init except: %s" % (sys.exc_info()[0],))
self.VERSION_XBMC = 0
#return VERSION_XBMC
def get_groups ( self ):
try:
groups = self._conn.getGroups()
icon = "DefaultFolder.png"
for group in groups:
listitem = xbmcgui.ListItem(group, iconImage = icon)
url = '%s?G?%s' % (self._path, group)
ok = xbmcplugin.addDirectoryItem( handle=self._handle, url=url, listitem=listitem, isFolder=True, totalItems=len(groups) )
if ( not ok ): raise
except:
print sys.exc_info()[ 1 ]
ok = False
return ok
def main_menu ( self ):
try:
listitem = xbmcgui.ListItem("Live TV", iconImage = "DefaultFolder.png")
ok = xbmcplugin.addDirectoryItem( handle=self._handle, url="%s?PL" % (self._path), listitem=listitem, isFolder=True)
listitem = xbmcgui.ListItem("Recorded Shows", iconImage = "DefaultFolder.png")
ok = xbmcplugin.addDirectoryItem( handle=self._handle, url="%s?PS" % (self._path), listitem=listitem, isFolder=True)
except:
print sys.exc_info()[ 1 ]
ok = False
return ok
def get_channel ( self, group ):
try:
channels = self._conn.getChannels(group)
icon = "DefaultFolder.png"
for channel in channels:
channelStr = channel[1]
if channel[2] != "":
channelStr += " - " + channel[2]
if channel[3] != "":
channelStr += " [" + channel[3] + "]"
listitem = xbmcgui.ListItem(channelStr, iconImage = icon)
url = '%s?C?%s' % (self._path, channel[0])
#print channel[1] + " " + url
ok = xbmcplugin.addDirectoryItem( handle=self._handle, url=url, listitem=listitem, isFolder=False, totalItems=len(channels) )
if ( not ok ): raise
except:
print "[tvserver]" + sys.exc_info()[ 1 ]
ok = False
return ok
def choose_channel ( self, chanId ):
# we start the time shift, and mplayer it?
ok = True
print "[tvserver] choose_channel: " + chanId + " before try"
try:
xbmc.Player().stop()
print "[tvserver] xbmc.Player().stop() succesful"
#if self._conn.isTimeshifting()[0] == "True":
# self_.conn.stopTimeshift()
print "[tvserver] Timeshifting channel : " + chanId
result = self._conn.timeshiftChannel(chanId)
print "[tvserver] self._conn.startTimeshift returned: " + result
# results is rtsp://MACHINE/stream, we want to replace machine with IP
#result = re.sub("rtsp://([^\/]+)/", "rtsp://" + SERVER_HOST + "/", result)
# disabled - server now resolves for us.
#print "TVServer after replace: " + result
if result.startswith("[ERROR]"):
# handle the error
xbmcgui.Dialog().ok("Error", "Error timeshifting channel\n" + result)
else:
print "[tvserver] just before play: " + result
xbmc.Player().play(result)
print "[tvserver] after play: " + result
except:
print "[tvserver] choose_channel: " + chanId + " failed"
ok = False
self._is_playing = ok
return ok
def stop_timeshift (self ):
try:
result = self._conn.stopTimeshift()
xbmc.executebuiltin("Container.Refresh")
except:
return False
return False
def get_shows ( self ):
extended_sort = False
ok = True
try:
shows = self._conn.getShows()
icon = "DefaultFolder.png"
server_host = str(self.settings["mptvserver"])
sharepath = str(self.settings["mpsharename"])
hostIp = str(server_host)
extended_sort = self.settings["extsort"]
recording_playback = int(self.settings["mprecplayback"])
baseurl = xbmc.translatePath("smb://" + hostIp + "/" + sharepath + "/")
# check for show list where the only entry is blank
# and if so, empty the list
if len(shows) == 1:
if len(shows[0][0]) == 0:
shows=()
for show in shows:
recID = show[0]
title = show[1]
# limit the length of descr as it's used in the URL
descr = show[2]#[0:1023]
genre = show[3]
seen = show[4]
start = show[5]
end = show[6]
fname = show[7]
chan = show[8]
# later versions provide more info
if len(show)>=10:
runtime = str(int(show[9]) / 60)
streamURL = show[10]
else:
runtime = ""
streamURL = ""
xbmc.output("[tvserver] Show: " + recID + "|" + title + "|" + descr + "|" + fname + "|" + streamURL + "|" + chan)
# clean up descr for adding to title, as it may have embedded newlines
descrShort=descr.replace('\n',' ')
if len(descrShort)>128:
descrShort=descrShort[0:128]
if recording_playback==0:
# smb share playback
# construct the smb:// URL from the
# original filename and the configured
# sharename
# TODO: make this a configurable format
# like the XBMCMythTV scripts do
try:
basename = str(fname).rsplit('\\',1)[1]
except:
basename = ""
smbName = baseurl + basename
try:
size = os.path.getsize(smbName)
except:
size = 0
elif recording_playback==1:
try:
size = os.path.getsize(fname)
except:
size = 0
smbName = fname
elif recording_playback==2:
# use the RTSP URL
if streamURL != "":
smbName=streamURL
size = 0
# MP doesn't give us a channel name,
# so we fudge it from the filename
try:
chan=basename.split(' - ',3)[1]
except:
chan=""
studio = chan
# we use the filename (less the extension) as the title
if len(chan)>0:
showStr = chan + " - " + title + " [" + start.split(" ")[0] +"] "#basename.rsplit('.',1)[0] + ' - ' + descrShort
else:
showStr = title + " [" + start.split(" ")[0] +"] "#basename.rsplit('.',1)[0] + ' - ' + descrShort
listitem = xbmcgui.ListItem(showStr, iconImage = icon)
# get date "31:01:2008" from start "1/31/2008 3:25:01 PM"
try:
startdate=start.split(" ")[0].split("/")
if len(startdate[0]) == 1:
startdate[0] = "0" + startdate[0]
date = startdate[1]+":"+startdate[0]+":"+startdate[2]
except:
try:
startdate=start.split(" ")[0].split("-")
if len(startdate[1]) == 1:
startdate[1] = "0" + startdate[1]
date = startdate[0]+":"+startdate[1]+":"+startdate[2]
except:
date = ""
# add "Delete <name of show>" to context menu
# get "Delete" string from locale index 117
if len(title) > 24:
deleteLabel = xbmc.getLocalizedString(117) + " " + title[0:23] + "..."
else:
deleteLabel = xbmc.getLocalizedString(117) + " " + title
listitem.addContextMenuItems([
(deleteLabel, "XBMC.RunPlugin(%s?D&%s&%s)"%(sys.argv[0],recID,title))
])
# we'll need "url" this when we start calling choose_show()
# but for now we go straight to the player by using the smb name
# TODO: encode only the recID, and have choose_show() get the info itself
#url = '%s?S?%s&%s&%s&%s' % (self._path, urllib.quote(smbName),urllib.quote(title),urllib.quote(genre),urllib.quote(descr))
#ok = xbmcplugin.addDirectoryItem( handle=int( self._handle ), url=url, listitem=listitem, isFolder=False, totalItems=len(shows) )
listitem.setInfo( type="Video", infoLabels={
"Title": showStr, #title,
"Tvshowtitle": title,
"Genre": genre,
"Plot": descr,
#"Plotoutline": descr,
"Size": size,
"Studio": studio,
"Date": date,
"Year": int(startdate[2]),
"Playcount": int(seen),
"Count": 1,
} )
if runtime != "":
listitem.setInfo( type="Video", infoLabels={ "Duration": runtime } )
ok = xbmcplugin.addDirectoryItem( handle=self._handle, url=smbName, listitem=listitem, isFolder=False, totalItems=len(shows) )
if ( not ok ): raise
except:
print sys.exc_info()[ 1 ]
ok = False
if ( ok and extended_sort ):
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_DATE )
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_LABEL )
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_VIDEO_TITLE )
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_SIZE )
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_STUDIO )
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_GENRE )
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_PROGRAM_COUNT )
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_EPISODE )
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_FILE )
return ok
def delete_show ( self, args ):
# we get the file ID and title and request its deletion
# Note: still in beta
ok = True
try:
args=args.split("&",2)
recID=urllib.unquote(args[0])
title=urllib.unquote(args[1])
print "[tvserver] delete_show: " + title + " (" + recID + ")"
if xbmcgui.Dialog().yesno("Delete", "Delete "+title+"?"):
result = self._conn.deleteShow(recID)
if result.startswith("[ERROR]"):
# handle the error, or maybe we can just "raise"
xbmcgui.Dialog().ok("Error", "Error deleting " + title + "\n" + result)
ok = False
else:
# we probably need to refresh the list, somehow
# or remove the listitem, or navigate back
xbmc.executebuiltin("Container.Refresh")
ok = True
except:
print "[tvserver] delete failed"
xbmcgui.Dialog().ok("Error", "An error ocurred deleting the selected show")
ok = False
raise
return ok
def choose_show ( self, args ):
# NOTE: ***** not used yet *****
# we get the filename and add to the playlist
ok = True
try:
args=args.split("&",4)
smbName=urllib.unquote(args[0])
print "[tvserver] choose_show: Playing show: " + smbName
title=urllib.unquote(args[1])
genre=urllib.unquote(args[2])
descr=urllib.unquote(args[3])
print "[tvserver] playing " + smbName
thumbnail = xbmc.getInfoImage( "ListItem.Thumb" )
filesize = os.path.getsize(smbName)
playlist = xbmc.PlayList( xbmc.PLAYLIST_VIDEO )
# clear any possible entries
playlist.clear()
# set the default icon
icon = "DefaultVideo.png"
# only need to add label, icon and thumbnail, setInfo() and addSortMethod() takes care of label2
listitem = xbmcgui.ListItem( title, iconImage=icon, thumbnailImage=thumbnail )
# set the key information
listitem.setInfo( "video", {
"Title": title,
"Tvshowtitle": title,
"Genre": genre,
"Plotoutline": descr,
"Size": filesize,
} )
# add item to our playlist
playlist.add( smbName, listitem )
player = xbmc.Player()
player.play( playlist )
except:
print "[tvserver] playing failed"
xbmcgui.Dialog().ok("Error", "An error ocurred playing the selected show")
ok = False
raise
# set some sort methods
if ( ok ):
xbmcplugin.addSortMethod( handle=self._handle, sortMethod=xbmcplugin.SORT_METHOD_DATE )
xbmcplugin.addSortMethod( handle=self._handle, sortMethod=xbmcplugin.SORT_METHOD_LABEL )
xbmcplugin.addSortMethod( handle=self._handle, sortMethod=xbmcplugin.SORT_METHOD_VIDEO_TITLE )
xbmcplugin.addSortMethod( handle=self._handle, sortMethod=xbmcplugin.SORT_METHOD_SIZE )
xbmcplugin.addSortMethod( handle=self._handle, sortMethod=xbmcplugin.SORT_METHOD_STUDIO )
xbmcplugin.addSortMethod( handle=self._handle, sortMethod=xbmcplugin.SORT_METHOD_PROGRAM_COUNT )
xbmcplugin.addSortMethod( handle=self._handle, sortMethod=xbmcplugin.SORT_METHOD_EPISODE )
xbmcplugin.addSortMethod( handle=self._handle, sortMethod=xbmcplugin.SORT_METHOD_FILE )
# saw this in the MythTV plug-in
xbmcplugin.endOfDirectory( handle=self._handle, succeeded=ok )
return ok
def stop_playing (self ):
# not used yet
try:
result = self._conn.stopPlaying()
except:
return False
return False
|
21,732 | 24afc1718036392002dc0851372105f65045656a | # -*- encoding:utf-8 -*-
class AdminException(Exception):
pass |
21,733 | 210a384c84113169509d183ecbdcecba36beb517 | #import RPi.GPIO as GPIO
import time
from threading import Timer
class Unlock:
def __init__(self):
self.relay = 3 #pin GPIO
self.isLocked = True
# GPIO.setmode(GPIO.BOARD)
# GPIO.setwarnings(False)
# GPIO.setup(self.relay, GPIO.OUT)
def unlock(self):
print("[INFO] unlock")
if(self.isLocked):
#GPIO.output(self.relay, GPIO.LOW)
self.isLocked = False
def lock(self):
print("[INFO] lock")
if(self.isLocked == False):
#GPIO.output(self.relay, GPIO.HIGH)
self.isLocked = True
|
21,734 | 02f43ad8d89e80e54f5deb398a705f017868ce2f | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os
import time
import argparse
import logging
import math
import random
import numpy as np
import mxnet as mx
from core.model import get_model
from core.dataset import NCFTrainData
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser(description="Run model optimizer.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--path', nargs='?', default='./data/',
help='Input data path.')
parser.add_argument('--dataset', nargs='?', default='ml-20m',
help='The dataset name.')
parser.add_argument('--model-prefix', type=str, default='./model/ml-20m/neumf')
parser.add_argument('--epoch', type=int, default=7, help='parameters epoch')
parser.add_argument('--model-type', type=str, default='neumf', choices=['neumf', 'gmf', 'mlp'],
help="mdoel type")
parser.add_argument('--layers', default='[256, 256, 128, 64]',
help="list of number hiddens of fc layers in mlp model.")
parser.add_argument('--factor-size-gmf', type=int, default=64,
help="outdim of gmf embedding layers.")
parser.add_argument('--num-hidden', type=int, default=1,
help="num-hidden of neumf fc layer")
head = '%(asctime)-15s %(message)s'
logging.basicConfig(level=logging.INFO, format=head)
# arg parser
args = parser.parse_args()
logging.info(args)
model_prefix = args.model_prefix
model_type = args.model_type
model_layers = eval(args.layers)
factor_size_gmf = args.factor_size_gmf
factor_size_mlp = int(model_layers[0]/2)
num_hidden = args.num_hidden
train_dataset = NCFTrainData((args.path + args.dataset + '/train-ratings.csv'), nb_neg=4)
net = get_model(model_type, factor_size_mlp, factor_size_gmf,
model_layers, num_hidden, train_dataset.nb_users, train_dataset.nb_items, opt=True)
raw_params, _ = mx.model.load_params(model_prefix, args.epoch)
fc_0_weight_split = mx.nd.split(raw_params['fc_0_weight'], axis=1, num_outputs=2)
fc_0_left = fc_0_weight_split[0]
fc_0_right = fc_0_weight_split[1]
user_weight_fusion = mx.nd.FullyConnected(data = raw_params['mlp_user_weight'], weight=fc_0_left, bias=raw_params['fc_0_bias'], no_bias=False, num_hidden=model_layers[0])
item_weight_fusion = mx.nd.FullyConnected(data = raw_params['mlp_item_weight'], weight=fc_0_right, no_bias=True, num_hidden=model_layers[0])
opt_params = raw_params
del opt_params['mlp_user_weight']
del opt_params['mlp_item_weight']
del opt_params['fc_0_bias']
opt_params['fused_mlp_user_weight'] = user_weight_fusion
opt_params['fused_mlp_item_weight'] = item_weight_fusion
mx.model.save_checkpoint(model_prefix + '-opt', args.epoch, net, opt_params, {})
|
21,735 | 79fd4a51bec57eb8e039ce95de0dfd5bf1121b42 | ######################## Base imports #################################
from dataiku.code_env_resources import clear_all_env_vars
from dataiku.code_env_resources import set_env_path
######################## Download FLAIR Models ###########################
# Clear all environment variables defined by a previously run script
clear_all_env_vars()
# Set Flair cache directory
set_env_path("FLAIR_CACHE_ROOT", "flair")
from flair.models import SequenceTagger
# Download pretrained model: automatically managed by Flair,
# does not download anything if model is already in FLAIR_CACHE_ROOT
SequenceTagger.load('flair/ner-english-fast@3d3d35790f78a00ef319939b9004209d1d05f788')
# Add any other models you want to download, check https://huggingface.co/flair for examples
# E.g. SequenceTagger.load('flair/ner-french')
# Make sure to modify the model used in recipe.py if you want to use a different model
|
21,736 | e77560311753d623e6b4545ad9c06fe89ec4d50c | """
Exercício 19
Nome: Praticando: Listas/For (Loop)
Objetivo: Praticar a criação e a leitura básica de listas com números e textos.
Dificuldade: Intermediário
1 - Crie uma lista com alguns números inseridos manualmente (não há necessidade do usuário inserí-los).
2 - Crie um for que varra cada elemento da lista e exiba-o no console.
3 - Declare um novo for que exiba apenas os números maiores que 3.
4 - Declare um outro for que exiba apenas os números pares.
5 - Exiba na tela a soma de todos os elementos da lista sem a utilização de funções extras.
6 - Exiba a soma de todos os elementos utilizando a função sum().
7 - Crie uma nova lista a partir de números digitados pelo usuário, faça com que o usuário insira 10 números, porém,
utilize o 'for' para isso, em vez de declarar 10 vezes o input de entrada de informação, declare apenas UMA vez e
faça com que ele seja executado 10 vezes.
A cada atualização da lista, exiba a quantidade de elementos que ela possui.
8 - Exiba apenas os três primeiros elementos da lista no console.
9 - Exiba apenas os três últimos elementos da lista no console.
10 - Declare uma nova lista vazia chamada 'nomes' e armazene 3 nomes digitados pelo usuário, ordene esses
nomes por ordem alfabética e exiba-os na tela, um de cada vez.
"""
# Item 1
print("\n1 - Crie uma lista.")
lista = [1, 5, 2, 7, 9, 3, 58]
print(lista)
a = 0
lista2 = []
# Item 2
print("\n2 - Crie um for que varra cada elemento da lista e exiba-o no console.")
for i in range(len(lista)):
print(f"lista[{i}] = {lista[i]}")
print("\n")
# Item 3
print("3 - Declare um novo for que exiba apenas os números maiores que 3.")
for i in range(len(lista)):
if lista[i] > 3:
print(f"lista[{i}] = {lista[i]}")
print("\n")
# Item 4
print("4 - Declare um outro for que exiba apenas os números pares.")
for i in range(len(lista)):
if lista[i] % 2 == 0:
print(f"lista[{i}] = {lista[i]}")
print("\n")
# Item 5
print("5 - Exiba na tela a soma de todos os elementos da lista sem a utilização de funções extras.")
for i in range(len(lista)):
a += lista[i]
print(f"\n{a}")
# Item 6
print("\n6 - Exiba a soma de todos os elementos utilizando a função sum().\n")
for i in range(len(lista)):
print(sum(lista))
break
# Item 7
print("\nDigite 10 números e a cada número novo na lista, exiba a quantidade de elementos que ela possui.")
def usuario(msg):
num = int(input(msg))
return num
for i in range (10):
ent = usuario("Digite um número: ")
lista2.append(ent)
print(lista2)
b = []
# Item 8
print("\n8 - Exiba apenas os três primeiros elementos da lista no console.")
print(f"\nOs três primeiros valores da lista são: {lista2[0:3]}")
# Item 9
print("\n9 - Exiba apenas os três últimos elementos da lista no console.")
print(f"\nOs três últimos valores da lista são: {lista2[7:10]}")
# Item 10
print("\n10 - Digite 3 nomes , ordene esses nomes por ordem alfabética e exiba-os na tela, um de cada vez.")
e = []
def nome(msg):
nomes = input(msg)
return nomes
for i in range(3):
en = nome(f"Digite o {i+1}° nome: ").title()
e.append(en)
e.sort()
print("\n")
print(*e, sep='\n') |
21,737 | fc93070a29b8573bb4a16ce0ff8ecabc7802d907 | from script import Screen
class gender_struct:
def __init__(self, gender):
is_female = gender == "female"
self.he_she = 'she' if is_female else 'he'
self.him_her = 'her' if is_female else 'him'
self.his_her = 'her' if is_female else 'his'
# This is where the intro story will be told
def intro_story(player):
pronouns = gender_struct(player.gender)
intro = f"\tThere was smoke coming from over the hill...... That was strange to {player.name} because it wasn't time " \
f"for dinner and it was too hot to just have a fire going. {player.name}'s mind started to race thinking maybe the " \
f"Bandits have come back? No, no {pronouns.he_she} gave them a good beating last time. It was too far for {pronouns.him_her} " \
f"to run due to the large deer on {pronouns.his_her} back. {pronouns.he_she.capitalize()} reassured {pronouns.him_her}self that other " \
f"people in the village would keep {pronouns.his_her} village safe. But when {player.name} finally made it over the " \
f"hill {pronouns.he_she} didn't believe {pronouns.his_her} eyes........."
Screen.display(intro)
|
21,738 | e00222a21aa969616e1d01cbed1d72526268e4be | from textblob import TextBlob
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
import flask
from flask import request, jsonify
import mysql.connector
import time
from flask_cors import CORS
try:
conexion_db = mysql.connector.connect(
host="localhost", user="alex", passwd="Hola1234.", database="ia2proyectos")
print("--- Conexión a DB exitosa ---")
except:
print("Error en la Conexión a la DB")
listaObras = {"asphalted": "asfaltado",
"asphalt": "asfaltado",
"lighting": "alumbrado",
"paved": "empedrado",
"hospital": "hospital",
"post": "posta",
"potholes": "baches",
"bridge": "puente",
"school": "colegio",
"Park": "parque",
"sewerage": "alcantarillado",
"Water": "agua",
"distributor": "distribuidor",
"traffic light": "semaforo",
"court": "cancha",
"soccer": "futbol",
"highway": "carretera",
"avenue": "avenida",
"terminal": "terminal",
"train": "tren",
"irrigation": "riego"
}
def analizador(texto):
analisisDeTexto = TextBlob(texto)
traduccionDeTexto = analisisDeTexto.translate(to='en')
analizadorDeSentimientos = SentimentIntensityAnalyzer()
resultadosDeSentimientos = analizadorDeSentimientos.polarity_scores(
traduccionDeTexto)
return resultadosDeSentimientos
def insertar_calificacion(comentario, proyectoid):
consulta = "INSERT INTO estado (positivo, neutral, negativo, fecha, proyectos_id) VALUES (%s, %s, %s, %s, %s)"
cursor = conexion_db.cursor(buffered=True)
positivo = neutral = negativo = 0
fecha = time.strftime('%Y-%m-%d %H:%M:%S')
resultados = analizador(comentario)
positivo = resultados['pos']
negativo = resultados['neg']
neutral = resultados['neu']
datos = (positivo, neutral, negativo, fecha, proyectoid)
cursor.execute(consulta, datos)
conexion_db.commit()
cursor.close()
return resultados
def obtener_resumenCalif(proyectoid):
consulta = "SELECT positivo, negativo, neutral, fecha FROM estado WHERE proyectos_id="+str(proyectoid) # + \
print(consulta)
cursor = conexion_db.cursor(buffered=True)
cursor.execute(consulta)
resultados = cursor.fetchall()
print(resultados)
consultaNombre = "SELECT nombre FROM proyectos WHERE proyectos_id={}".format(
proyectoid)
cursor.execute(consultaNombre)
nombre = cursor.fetchone()[0]
jsonificado = {"proyectoid": proyectoid,
"proyectonombre": nombre, "datos": []}
for dato in resultados:
jsonificado['datos'].append({"fecha": dato[3].strftime(
'%Y-%m-%d %H:%M:%S'), "calificaciones": {"pos": dato[0], "neg": dato[1], "neu": dato[2]}})
cursor.close()
return jsonificado
def clasificarPedido(pedido, localidad):
pedidoBlob = TextBlob(pedido)
pedidoTraducido = pedidoBlob.translate(to='en')
print(pedidoTraducido)
clasificacionNouns = (sustantivo for sustantivo,
tag in pedidoTraducido.tags if tag == 'NN')
listaNouns = []
for item in clasificacionNouns:
listaNouns.append(item)
for item in listaNouns:
if(listaObras.get(item) != None):
anadirPedido(item, localidad)
def anadirPedido(pedido, localidad):
consulta = "INSERT INTO pedidos (pedido, localidad, fecha) VALUES (%s, %s, %s)"
cursor = conexion_db.cursor(buffered=True)
pedido = listaObras.get(pedido)
fecha = time.strftime('%Y-%m-%d %H:%M:%S')
datos = (pedido, localidad, fecha)
cursor.execute(consulta, datos)
conexion_db.commit()
cursor.close()
def getPedido():
cursor = conexion_db.cursor(buffered=True)
consultaPedidos = "SELECT pedido,localidad,fecha, COUNT(pedido) AS total FROM pedidos GROUP BY Month(fecha), pedido"
cursor.execute(consultaPedidos)
resultados = cursor.fetchall()
respuesta = {"pedidos": []}
for dato in resultados:
respuesta['pedidos'].append({"pedido": dato[0], "localidad": dato[1], "mes": int(
dato[2].strftime('%m')), "total": dato[3]})
return respuesta
app = flask.Flask(__name__)
cors = CORS(app, resources=r'/api/*')
app.config["DEBUG"] = True
@ app.route('/api/calificacion', methods=['POST'])
def api_calificacion():
textoRecibido = request.get_json()['comentario']
proyectoid = request.get_json()['proyectoid']
if ('comentario' in request.get_json()) & ('proyectoid' in request.get_json()):
textoAnalizado = insertar_calificacion(textoRecibido, proyectoid)
else:
return "Error: parámetro 'comentario' o 'proyectoid' no provisto. Por favor especificalos."
return jsonify(textoAnalizado)
@ app.route('/api/calificacion/resumen', methods=['POST'])
def api_calResumen():
proyectoid = request.get_json()['proyectoid']
if 'proyectoid' in request.get_json():
resultados = obtener_resumenCalif(proyectoid)
else:
return "Error: parámetro 'proyectoid' no provisto. Por favor especifica un id de proyecto."
return jsonify(resultados)
@ app.route('/api/pedidos', methods=['POST'])
def api_pedidos():
if 'pedido' in request.get_json():
pedido = request.get_json()['pedido']
localidad = request.get_json()['localidad']
clasificarPedido(pedido, localidad)
else:
return "Error: parámetro 'pedido' no provisto. Por favor especifica un pedido de obra."
return jsonify('Realizado Exitosamente')
@ app.route('/api/pedidos/resumen', methods=['GET'])
def api_getPedidos():
respuesta = getPedido()
return jsonify(respuesta)
app.run()
|
21,739 | 5336f56ecf64c48d762cbdb164b22847759ff99f | import os.path
import pandas as pd
from .config.cache import CACHE_FILE
from .exceptions import DataNotScrappedError
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(
*args, **kwargs)
return cls._instances[cls]
class Cacher(metaclass=Singleton):
def ___cache_data(self, games):
df = pd.DataFrame(games, columns=['category', 'name'])
df.to_csv(CACHE_FILE)
async def save(self, res: dict):
"""Save data with games to file
:param res:
:return:
"""
self.___cache_data(res['games'])
def __get_data(self, filters):
"""Get data from cached file and filter it
:param filter:
:return:
"""
if not os.path.exists(CACHE_FILE):
raise DataNotScrappedError()
df = pd.read_csv(CACHE_FILE)
if not filters:
return list(df.T.to_dict().values())
filtered_df = df[df['name'] == filters][['category', 'name']]
return list(filtered_df.T.to_dict().values())
async def get(self, filters: dict = None):
"""Get data from cached file"""
res = self.__get_data(filters)
return res
|
21,740 | d9de1ea582e065026daf8a812bcf9054ced24d96 | class Solution(object):
def largestPerimeter(self, A):
A.sort()
ans = 0
for i in xrange(len(A) - 2):
if A[i] + A[i+1] > A[i+2]:
ans = A[i] + A[i+1] + A[i+2]
return ans |
21,741 | 759720d9525f4e8f4709726dd254434271809007 | from datetime import timedelta
from airflow import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator
from airflow.utils.dates import days_ago
default_args = {
'owner': 'orix.auyeung',
'depends_on_past': False,
'email': ['orix.auyeung@hkmci.com'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 20,
'retry_delay': timedelta(minutes=3),
'concurrency': 100,
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
# 'wait_for_downstream': False,
# 'dag': dag,
# 'sla': timedelta(hours=2),
# 'execution_timeout': timedelta(seconds=300),
# 'on_failure_callback': some_function,
# 'on_success_callback': some_other_function,
# 'on_retry_callback': another_function,
# 'sla_miss_callback': yet_another_function,
# 'trigger_rule': 'all_success'
}
common_pod_args = {
"is_delete_operator_pod": True,
"image_pull_policy": "Always",
"get_logs": False,
}
crawler_pod_args = {
"affinity": {
'nodeAffinity': {
'requiredDuringSchedulingIgnoredDuringExecution': {
'nodeSelectorTerms': [{
'matchExpressions': [{
'key': 'cloud.google.com/gke-nodepool',
'operator': 'In',
'values': ['crawler-pool']
}]
}]
}
},
'podAntiAffinity': {
'requiredDuringSchedulingIgnoredDuringExecution': [{
'labelSelector': {
'matchExpressions': [{
'key': 'pod-type',
'operator': 'In',
'values': ['crawler-pod']
}]
},
'topologyKey': 'kubernetes.io/hostname'
}]
}
},
"labels": {
'pod-type': 'crawler-pod',
'redis-client': 'true'
},
}
dag = DAG(
'domain_upload_pipeline',
default_args=default_args,
description='Minio-triggered domain analytics pipeline',
schedule_interval=None,
start_date=days_ago(2),
tags=['analytics', 'external trigger'],
)
domain_queuer = KubernetesPodOperator(
namespace='airflow',
image="gcr.io/smartone-gcp-1/domain-queuer:latest",
env_vars={
"RUN_ID": "{{ dag_run.conf['runId'] }}",
"FILENAME": "{{ dag_run.conf['minioObject'] }}"
},
labels={"redis-client": "true"},
name="domain-queuer",
task_id="domain-queuer",
dag=dag,
**common_pod_args,
)
# def landingpage_worker(worker_id):
# return KubernetesPodOperator(namespace='airflow',
# image="gcr.io/smartone-gcp-1/domain-landingpage-parser:latest",
# name="domain-landingpage-worker-{}".format(worker_id),
# task_id="domain-landingpage-worker-{}".format(worker_id),
# dag=dag,
# resources={
# "request_memory": "512Mi",
# "request_cpu": "100m",
# },
# **common_pod_args,
# **crawler_pod_args,)
#
#
# def webshrinker_worker(worker_id):
# return KubernetesPodOperator(namespace='airflow',
# image="gcr.io/smartone-gcp-1/domain-webshrinker-worker:latest",
# name="domain-webshrinker-worker-{}".format(worker_id),
# task_id="domain-webshrinker-worker-{}".format(worker_id),
# dag=dag,
# resources={
# "request_memory": "512Mi",
# "request_cpu": "100m",
# },
# **common_pod_args,
# **crawler_pod_args,)
def googlesearch_worker(worker_id):
return KubernetesPodOperator(namespace='airflow',
image="gcr.io/smartone-gcp-1/domain-googlesearch-parser:latest",
name="domain-googlesearch-worker-{}".format(worker_id),
task_id="domain-googlesearch-worker-{}".format(worker_id),
dag=dag,
resources={
"request_memory": "512Mi",
"request_cpu": "100m",
},
**common_pod_args,
**crawler_pod_args,)
dag.doc_md = __doc__
domain_queuer.doc_md = """\
#### Task Documentation
You can document your task using the attributes `doc_md` (markdown),
`doc` (plain text), `doc_rst`, `doc_json`, `doc_yaml` which gets
rendered in the UI's Task Instance Details page.

"""
start = DummyOperator(task_id="start", dag=dag)
end = DummyOperator(task_id="end", dag=dag)
domain_googlesearch = DummyOperator(task_id="domain-googlesearch", dag=dag)
# domain_landingpage = DummyOperator(task_id="domain-landingpage", dag=dag)
# domain_webshrinker = DummyOperator(task_id="domain-webshrinker", dag=dag)
domain_googlesearch_workers = [googlesearch_worker(x) for x in range(40)]
# domain_landingpage_workers = [landingpage_worker(x) for x in range(3)]
# domain_webshrinker_workers = [webshrinker_worker(x) for x in range(3)]
domain_googlesearch_reporter = KubernetesPodOperator(namespace='airflow',
image="gcr.io/smartone-gcp-1/reporter:latest",
labels={"redis-client": "true"},
name="domain-googlesearch-reporter",
task_id="domain-googlesearch-reporter",
dag=dag,
env_vars={
"RUN_ID": "{{ dag_run.conf['runId'] }}",
"RUN_FLOW": "googlesearch",
"OUTPUT_VOLUME_PATH": ".",
},
resources={
"request_memory": "1024Mi",
"request_cpu": "1000m",
"request_ephemeral_storage": "20Gi",
},
**common_pod_args,)
# domain_landingpage_reporter = KubernetesPodOperator(namespace='airflow',
# image="gcr.io/smartone-gcp-1/reporter:latest",
# labels={"redis-client": "true"},
# name="domain-landingpage-reporter",
# task_id="domain-landingpage-reporter",
# dag=dag,
# env_vars={
# "RUN_ID": "{{ dag_run.conf['runId'] }}",
# "RUN_FLOW": "landingpage",
# "OUTPUT_VOLUME_PATH": ".",
# },
# resources={
# "request_memory": "1024Mi",
# "request_cpu": "1000m",
# "request_ephemeral_storage": "20Gi",
# },
# **common_pod_args,)
# domain_webshrinker_reporter = KubernetesPodOperator(namespace='airflow',
# image="gcr.io/smartone-gcp-1/reporter:latest",
# labels={"redis-client": "true"},
# name="domain-webshrinker-reporter",
# task_id="domain-webshrinker-reporter",
# dag=dag,
# env_vars={
# "RUN_ID": "{{ dag_run.conf['runId'] }}",
# "RUN_FLOW": "webshrinker",
# "OUTPUT_VOLUME_PATH": ".",
# },
# resources={
# "request_memory": "1024Mi",
# "request_cpu": "1000m",
# "request_ephemeral_storage": "20Gi",
# },
# **common_pod_args,)
start.set_downstream(domain_queuer)
# domain_queuer.set_downstream([domain_googlesearch, domain_landingpage, domain_webshrinker])
domain_queuer.set_downstream([domain_googlesearch])
domain_googlesearch.set_downstream(domain_googlesearch_workers)
# domain_landingpage.set_downstream(domain_landingpage_workers)
# domain_webshrinker.set_downstream(domain_webshrinker_workers)
domain_googlesearch_reporter.set_upstream(domain_googlesearch_workers)
# domain_landingpage_reporter.set_upstream(domain_landingpage_workers)
# domain_webshrinker_reporter.set_upstream(domain_webshrinker_workers)
# end.set_upstream([domain_googlesearch_reporter, domain_landingpage_reporter, domain_webshrinker_reporter])
end.set_upstream([domain_googlesearch_reporter])
|
21,742 | f2271c5e4b2d44ae333a175ab56efc85b5b23847 | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
import matplotlib
# Force matplotlib to not use any Xwindows backend.
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from keras.models import Sequential, model_from_json, load_model
from keras.layers.core import Dense, Dropout, Flatten, Activation, SpatialDropout2D, Reshape, Lambda
from keras.layers.normalization import BatchNormalization
from keras.layers.advanced_activations import ELU, PReLU, LeakyReLU
from keras.layers.convolutional import Conv1D
from keras.layers.convolutional import Convolution1D
from keras import layers
from keras.layers import LSTM ,Embedding ,SimpleRNN,CuDNNLSTM
from keras.optimizers import SGD
from keras.callbacks import ModelCheckpoint
from scipy.io import wavfile
import pdb
import scipy.io
# import librosa
import os
from os.path import join as ojoin
# os.environ["CUDA_VISIBLE_DEVICES"]="1"
import time
import numpy as np
import numpy.matlib
import argparse
import random
# import theano
# import theano.tensor as T
import tensorflow as tf
from keras.callbacks import TensorBoard
import keras.backend.tensorflow_backend as KTF
from keras.backend.tensorflow_backend import set_session
#設定使用GPU 檢查是否有抓到
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.99 #使用45%記憶體
set_session(tf.Session(config=config))
if tf.test.gpu_device_name():
print('GPU found')
else:
print("No GPU found")
##定義讀檔路徑function
def get_filepaths(directory):
"""
This function will generate the file names in a directory
tree by walking the tree either top-down or bottom-up. For each
directory in the tree rooted at directory top (including top itself),
it yields a 3-tuple (dirpath, dirnames, filenames).
"""
file_paths = [] # List which will store all of the full filepaths.
# Walk the tree.
for root, directories, files in os.walk(directory):
for filename in files:
if filename.endswith('.wav'):
# Join the two strings in order to form the full filepath.
filepath = os.path.join(root, filename)
file_paths.append(filepath) # Add it to the list.
# pdb.set_trace()
file_paths.sort()
return file_paths
mixed_file=get_filepaths('mixed_all_snr/')
cleaned_file=get_filepaths('clean')
##切割data and test
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(mixed_file, cleaned_file, test_size=0.33, random_state=42)
Train_Noisy_lists=X_train
Train_Clean_paths= y_train
Test_Noisy_lists = X_test
Test_Clean_paths = y_test
Num_testdata=len(Test_Noisy_lists)
Num_traindata=len(Train_Noisy_lists)
def train_data_generator(noisy_list, clean_path):
index=0
while True:
rate, noisy = wavfile.read(noisy_list[index])
noisy=noisy.astype('float32')
if len(noisy.shape)==2:
noisy=(noisy[:,0]+noisy[:,1])/2
noisy=np.reshape(noisy,(1,np.shape(noisy)[0],1))
rate, clean = wavfile.read(clean_path[index])
clean=clean.astype('float32')
clean=clean/2**15
clean=np.reshape(clean,(1,np.shape(clean)[0],1))
index += 1
if index == len(noisy_list):
index = 0
yield noisy, clean
def val_data_generator(noisy_list, clean_path):
index=0
while True:
rate, noisy = wavfile.read(noisy_list[index])
noisy=noisy.astype('float32')
if len(noisy.shape)==2:
noisy=(noisy[:,0]+noisy[:,1])/2
noisy=np.reshape(noisy,(1,np.shape(noisy)[0],1))
rate, clean = wavfile.read(clean_path[index])
clean=clean.astype('float32')
if len(clean.shape)==2:
clean=(clean[:,0]+clean[:,1])/2
clean=clean/2**15
clean=np.reshape(clean,(1,np.shape(clean)[0],1))
index += 1
if index == len(noisy_list):
index = 0
yield noisy, clean
##建立模型
start_time = time.time()
model = Sequential()
model.add(CuDNNLSTM(32,return_sequences=True,input_shape=(None,1)))
model.add(CuDNNLSTM(32,return_sequences=True)) # 返回维度为 32 的向量序列
model.add(Dense(1,activation='tanh'))
model.summary()
##訓練開始
epoch=5
batch_size=1
model.compile(loss='mse', optimizer='adam')
with open('{}.json'.format('firsttry'),'w') as f: # save the model
f.write(model.to_json())
checkpointer = ModelCheckpoint(filepath='{}.hdf5'.format('firsttry'), verbose=1, save_best_only=True, mode='min')
print ('training...')
g1 = train_data_generator(Train_Noisy_lists, Train_Clean_paths)
print()
g2 = val_data_generator(Test_Noisy_lists, Test_Clean_paths)
tbCallBack = TensorBoard(log_dir='./logs', # log 目录
histogram_freq=0, # 按照何等频率(epoch)来计算直方图,0为不计算
# batch_size=32, # 用多大量的数据计算直方图
write_graph=True, # 是否存储网络结构图
write_grads=True, # 是否可视化梯度直方图
write_images=True,# 是否可视化参数
embeddings_freq=0,
embeddings_layer_names=None,
embeddings_metadata=None)
hist=model.fit_generator(g1,
samples_per_epoch=Num_traindata,
epochs=epoch,
verbose=1,
validation_data=g2,
nb_val_samples=Num_testdata,
max_q_size=1,
nb_worker=16,
use_multiprocessing=True
)
##畫Loss圖
# # plotting the learning curve
TrainERR=hist.history['loss']
ValidERR=hist.history['val_loss']
print ('@%f, Minimun error:%f, at iteration: %i' % (hist.history['val_loss'][epoch-1], np.min(np.asarray(ValidERR)),np.argmin(np.asarray(ValidERR))+1))
# print 'drawing the training process...'
plt.figure(4)
plt.plot(range(1,epoch+1),TrainERR,'b',label='TrainERR')
plt.plot(range(1,epoch+1),ValidERR,'r',label='ValidERR')
plt.xlim([1,epoch])
plt.legend()
plt.xlabel('epoch')
plt.ylabel('error')
plt.grid(True)
plt.show()
plt.savefig('Learning_curve_{}.png'.format('FCN_firsttry'), dpi=150)
end_time = time.time()
print ('The code for this file ran for %.2fm' % ((end_time - start_time) / 60.))
|
21,743 | dc794eb05e10d80c0da2da5c9852f2c749a1df72 | import serial
from time import sleep as wait
from struct import pack, unpack
from collections import namedtuple
handshake = 0x77
end_flag = 0x33
drive_flag = 0x45
left_flag = 0x36
right_flag = 0x35
lights_flag = 0x30
ir_read_flag = 0x27
ir_pos_flag = 0x28
class Robot:
def __init__(self, file, baud):
self.location = file
self.port = serial.Serial()
self.port.baudrate = baud
self.port.port = file
self.port.timeout = 1
self.port.dtr = 0
self.port.open()
wait(3)
self.port.write(chr(handshake))
wait(1)
while self.port.read() != chr(0x77):
print("Waiting for handshake")
def shutdown(self):
self.halt()
self.port.close()
def map_short(self, num): #where num is a num 0 - 100
temp = (num * 32767)/100
if temp > 32767:
return 32767
elif temp < -32767:
return -32767
return int(temp)
def pack_short(self,num):
return pack("h", int(num))
def send_cmd(self,flag, data):
self.port.write(chr(flag))
self.port.write(self.pack_short(self.map_short(data)))
def lights_on(self):
self.send_cmd(lights_flag, 0x01)
def lights_off(self):
self.send_cmd(lights_flag, 0x00)
def halt(self):
self.send_cmd(drive_flag, 0)
def turn(self, speed, seconds=None):
self.send_cmd(left_flag, -speed)
self.send_cmd(right_flag, speed)
if seconds != None:
wait(seconds)
self.halt()
return
def drive_forward(self, speed, adjust=None, seconds=None):
if adjust == None:
self.send_cmd(drive_flag, speed)
else:
self.send_cmd(left_flag, speed)
adjusted = speed+adjust
if adjusted > 100:
self.send_cmd(right_flag, 100)
elif adjusted < 0:
self.send_cmd(right_flag, 0)
else:
self.send_cmd(right_flag, speed+adjust)
if seconds == None:
return
wait(seconds)
self.halt()
def drive_backward(self, speed, adjust=None, seconds=None):
if adjust == None:
self.send_cmd(drive_flag, -speed)
else:
self.send_cmd(left_flag, -speed)
adjusted = speed+adjust
if adjusted > 100:
self.send_cmd(right_flag, -100)
elif adjusted < 0:
self.send_cmd(right_flag, -0)
else:
self.send_cmd(right_flag, -speed+adjust)
if seconds == None:
return
wait(seconds)
self.halt()
def drive_right_wheel(self, speed):
self.send_cmd(right_flag, -speed)
def drive_left_wheel(self, speed):
self.send_cmd(left_flag, -speed)
def get_ir_distance(self):
self.send_cmd(ir_read_flag, 1)
data = self.port.read(2)
dist = unpack(">H", data)
return dist[0]
def set_ir_position(self, angle):
self.send_cmd(ir_pos_flag, angle)
|
21,744 | 7146e9142997b062c52895c421d66c8b9b8294c9 | import timeit as timer
# Function argument decorator
def wrapper(func, *args, **kwargs):
def wrapped():
return func(*args, **kwargs)
return wrapped
# Measure the time of a func, then print it
def timer_function(func_args, name="Current function", niter = 30):
t = timer.timeit(func_args, number=niter)
print(f"{t} seconds ({name} , n_iter = {niter})")
def timer_wrap(func, args, niter):
t = timer_function(wrapper(func, *args), name=func.__name__, niter=niter)
|
21,745 | 1b50d5fd3a58d09a16eb728cee6b4d9d305b69d0 | import subprocess
from getpass import getpass
def run_ls():
command = "ls -la".split()
cmd = subprocess.run(
command, stdout=subprocess.PIPE
)
output = cmd.stdout.decode()
print(cmd.stdout)
print(output)
def run_ls_as_sudo():
#pwd = getpass("password: ")
command = "sudo -S ls -la".split()
cmd = subprocess.run(
command, stdout=subprocess.PIPE, encoding="ascii"
)
#output = cmd.stdout.decode()
print(cmd.stdout)
#print(output)
if __name__ == '__main__':
#run_ls()
run_ls_as_sudo()
#cmd = subprocess.run(
# ls, stdout=subprocess.PIPE, input=getpass("password: "), encoding="ascii",
#) |
21,746 | 614c4157f947e294c7b1fd451b11517866aff7e8 | """
Solution to Prime Factors task on Exercism
https://exercism.org/tracks/python/exercises/prime-factors
"""
def is_prime(value: int) -> bool:
"""Check if value is prime"""
if value == 1:
return False
if value <= 0:
raise ValueError("Value must be greater than zero")
for i in range(2, int(value**(1/2)) + 1):
if value % i == 0:
return False
return True
def factors(value: int) -> list:
"""Get all prime factors of the given value"""
prime_factors: list = []
for i in range(2, value + 1):
if i > 2 and i % 2 == 0 or not is_prime(i):
continue
while value % i == 0:
value = int(value / i)
prime_factors.append(i)
if value == 1:
break
return prime_factors
|
21,747 | 57ef1d9a74ee0e0e8ae4a5057d4496970779b388 | import numpy as np
import pandas as pd
import os
from sklearn.model_selection import train_test_split
from keras.utils import to_categorical
DATA_PATH = "sentiment_analysis.xlsx"
def read_file(DATA_PATH):
'''
reads the file provided:
DATA_PATH : The path of the file.
Returns:
y : The target vector of the dataset.
bangla_text: The text of the comments.
'''
df=pd.read_excel(DATA_PATH)
feature_column_name=['text']
predicted_class_name=['score']
y=df[predicted_class_name].values
bangla_text=df[feature_column_name].values
return y, bangla_text
def char_to_utf(bangla_text):
'''
function: Converts bangla characters to it's corresponding utf numbers.
bangla_text: Text of the comments;
Returns:
utf_all: Numpy array of the utf numbers.
'''
utf_all=[]
for text in bangla_text:
for string in text:
utf=[]
for char in string:
number=ord(char)
utf.append(number)
utf=np.asarray(utf)
#
if utf.shape[0]>100:
utf=utf[:100]
else:
pad_width=100-utf.shape[0]
utf=np.pad(utf,pad_width=(0,pad_width),mode='constant')
#
utf_all.append(utf)
utf_all=np.asarray(utf_all)
return utf_all
def maximum_length(utf_all):
'''
utf_all: The numpy array which holds the utf of all the characters.
Returns: The maximum length of the comments in the dataset.
'''
length=0
for utf in utf_all:
shape= utf.shape[0]
if length<shape:
length=shape
return length
def padding(utf_all,max_len):
'''
utf_all: The numpy array which holds the utf of all the characters.
max_len: The maximum length we want to take from a sentence.
Returns: The padded array of the sequence of utf numbers.
'''
for utf in utf_all:
if utf.shape[0]>max_len:
utf=utf[:max_len]
#print("Cut: ", utf)
else:
pad_width=max_len-utf.shape[0]
utf=np.pad(utf,pad_width=(0,pad_width),mode='constant')
#print("Padded: ", utf)
np.save("utf_all.npy", utf_all)
#print(utf_all)
return utf_all
def train_test_set():
'''
Return x_train, x_test, y_train, y_test
'''
y, bangla_text=read_file(DATA_PATH)
utf_all=char_to_utf(bangla_text)
print(utf_all.shape)
x= padding(utf_all,100)
y=np.asarray(y)
y=to_categorical(y)
#print(y.shape)
#print(x.shape)
#print(x)
x_train, x_test, y_train, y_test= train_test_split(x,y, test_size=0.2, random_state=42)
return x_train, x_test, y_train, y_test
if __name__ == '__main__':
x_train, x_test, y_train, y_test=train_test_set()
|
21,748 | 8d8223162a36d931ed61888fcbcb6598a9f53e14 | # -*- coding: utf-8 -*-
fmi_edit = {"menu_id": "edit_menu_item", "menu_text": "Pas aan", "route": "edit", "flags": ["id_required"]}
fmi_delete = {"menu_id": "delete_menu_item", "menu_text": "Verwijder", "route": "delete",
"message" : "Zeker dat u dit item wil verwijderen", "flags": ["id_required", "confirm_before_delete"]}
fmi_copy = {"menu_id": "copy_menu_item", "menu_text": "Kopieer van", "route": "add", "flags": ["id_required"]}
fmi_add = {"menu_id": "add_menu_item", "menu_text": "Voeg toe", "route": "add", "flags": []}
fmi_view = {"menu_id": "view_menu_item", "menu_text": "Details", "route": "view", "flags": ["id_required"]}
fmi_change_pwd = {"menu_id": "change_pwd_menu_item", "menu_text": "Verander paswoord", "route": "change_pwd","flags": ["id_required"]}
fmi_update_rfid = {"menu_id": "update_rfid_menu_item", "menu_text": "Nieuwe code", "route": "new_rfid","flags": ["bootbox_single"]}
fmi_delete_rfid = {"menu_id": "delete_rfid_item", "menu_text": "Verwijder code", "route": "delete_rfid",
"message" : "Zeker dat u deze rfid code wil verwijderen?", "flags": ["confirm_before_delete"]}
fmi_delete_time_ran = {"menu_id": "delete_time_ran_menu_item", "menu_text": "verwijder gelopen tijd", "route": "delete_time_ran",
"message" : "Zeker dat u deze tijd wil verwijderen?","flags": ["confirm_before_delete"]}
default_menu_config = [
fmi_edit,
fmi_copy,
fmi_add,
fmi_view,
fmi_delete
]
user_menu_config = [
fmi_edit,
fmi_change_pwd
]
admin_menu_config = [
fmi_edit,
fmi_copy,
fmi_add,
fmi_view,
fmi_delete,
fmi_change_pwd
]
offence_menu_config = [
fmi_delete
]
register_runner_menu_config = [
fmi_update_rfid,
fmi_delete_rfid,
fmi_delete_time_ran
] |
21,749 | 33c79c962224d647b7fe829e737e739737e34082 | import rl.tools.dds as dds
import ctypes
from data.enums import CMAP
from multiprocessing import Pool
import numpy as np
def convert_card(n):
return CMAP[n % 13], n//13
def convert_hand(hand):
cards = [[], [], [], []]
for c in hand:
card, color = convert_card(c)
cards[color] += card
res = []
for c in cards:
c.reverse()
res.append("".join(c))
return ".".join(res)
def hands_to_pbn(hands):
south = convert_hand(hands[:13])
west = convert_hand(hands[13:26])
north = convert_hand(hands[26:39])
east = convert_hand(hands[39:])
res = "N:{north} {east} {south} {west}".format(north=north, east=east, south=south, west=west)
return bytes(res, 'utf8')
"""
North South East West
NT 0 0 0 0
S 0 0 0 0
H 0 0 0 0
D 0 0 0 0
C 0 0 0 0
"""
def convert_table(myTable):
l = list(map(list,myTable.contents.resTable))
t = [[0,0,0,0], [0,0,0,0], [0,0,0,0], [0,0,0,0], [0,0,0,0]]
t[0][0] = l[0][4]
t[0][1] = l[2][4]
t[0][2] = l[1][4]
t[0][3] = l[3][4]
for suit in range(4):
t[suit+1][0] = l[0][suit]
t[suit+1][1] = l[2][suit]
t[suit+1][2] = l[1][suit]
t[suit+1][3] = l[3][suit]
return t
def solve_pbn(pbn):
tableDealPBN = dds.ddTableDealPBN()
table = dds.ddTableResults()
myTable = ctypes.pointer(table)
tableDealPBN.cards = pbn
res = dds.CalcDDtablePBN(tableDealPBN, myTable)
return convert_table(myTable)
def solve_deal(hands):
pbn = hands_to_pbn(hands)
return solve_pbn(pbn)
class Solver(object):
def __init__(self):
dds.InitStart(2, 4)
self.tableDealPBN = dds.ddTableDealPBN()
self.table = dds.ddTableResults()
self.myTable = ctypes.pointer(self.table)
def score1(self, trump, north, south, we):
hands = np.concatenate((north, we[:13], south, we[13:]))
s = self.solve(hands)
return max([s[i][0] for i in range(5)]) - s[trump][0]
def score2(self, north, south, we):
hands = np.concatenate((north, we[:13], south, we[13:]))
s = self.solve(hands)
return np.max(s) - s
def mean_score2(self, n, north, south, we):
res = [self.score2(north, south, np.random.permutation(we)) for _ in range(n)]
return sum(res)/n
def mean_score(self, n, trump, north, south, we):
res = [self.score1(trump, north, south, np.random.permutation(we)) for _ in range(n)]
return sum(res)/n
def solve(self, hands):
hands = [int(k) for k in hands]
self.tableDealPBN = dds.ddTableDealPBN()
self.table = dds.ddTableResults()
self.myTable = ctypes.pointer(self.table)
self.tableDealPBN.cards = hands_to_pbn(hands)
res = dds.CalcDDtablePBN(self.tableDealPBN, self.myTable)
return convert_table(self.myTable)
|
21,750 | 5c82437b51c89dc43d9af0a56039e311822c6723 | #imports
import sqlite3 as sqlite
import pandas as pd
import csv
import json
class dbdump:
def __init__ (self, path):
self.con = sqlite.connect(path)
self.metadata = {}
def __close__ (self):
self.con.close()
def createdump(self):
cursor = self.con.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type = 'table';")
tables = cursor.fetchall()
for table in tables:
table_name = table[0]
cursor.execute("SELECT * FROM %s" % table_name)
data = [description[0] for description in cursor.description]
data_dict = [{data}]
|
21,751 | bcc84d47e52c54a561468b44c9fd0de144ac1d88 | import numpy
import matplotlib.pyplot as plt
class MyOVBox(OVBox):
def __init__(self):
OVBox.__init__(self)
self.signalHeader = None
def process(self):
global numpyBuffer1
global numpyBuffer2
global numpyBuffer3
global numpyBuffer1_list
#numpyBuffer1 = []
#numpyBuffer2 = []
#numpyBuffer3 = []
for chunkIndex in range( len(self.input[0]) ):
if(type(self.input[0][chunkIndex]) == OVSignalHeader):
self.signalHeader = self.input[0].pop()
outputHeader = OVSignalHeader(
self.signalHeader.startTime,
self.signalHeader.endTime,
[1, self.signalHeader.dimensionSizes[1]],
['Mean']+self.signalHeader.dimensionSizes[1]*[''],
self.signalHeader.samplingRate)
self.output[0].append(outputHeader)
numpyBuffer1 = numpy.zeros(tuple(self.signalHeader.dimensionSizes))
numpyBuffer2 = numpy.zeros(tuple(self.signalHeader.dimensionSizes))
numpyBuffer3 = numpy.zeros(tuple(self.signalHeader.dimensionSizes))
numpyBuffer1_list = []
elif(type(self.input[0][chunkIndex]) == OVSignalBuffer):
chunk = self.input[0].pop()
numpyBuffer = numpy.array(chunk).reshape(tuple(self.signalHeader.dimensionSizes))
#numpyBuffer = numpyBuffer.mean(axis=0)
if((chunk.startTime > 0) &(chunk.endTime <= 20)):
numpyBuffer1 = numpy.hstack((numpyBuffer1, numpyBuffer))
print('1')
if((chunk.startTime > 20) &(chunk.endTime <= 40)):
numpyBuffer2 = numpy.hstack((numpyBuffer2, numpyBuffer))
print('2')
if((chunk.startTime > 40) &(chunk.endTime <= 55)):
numpyBuffer3 = numpy.hstack((numpyBuffer3, numpyBuffer))
print('3')
if((chunk.startTime > 56)):
print('baseline: ', numpy.mean(numpyBuffer1))
print('relaxed: ',numpy.mean(numpyBuffer2))
print('attentive: ', numpy.mean(numpyBuffer3))
numpyBuffer2 = numpyBuffer2 + 0.5
numpyBuffer3 = numpyBuffer3 + 1
chunk = OVSignalBuffer(chunk.startTime, chunk.endTime, numpyBuffer.tolist())
self.output[0].append(chunk)
elif(type(self.input[0][chunkIndex]) == OVSignalEnd):
self.output[0].append(self.input[0].pop())
def uninitialize(self):
plt.hist([numpyBuffer1[0,:], numpyBuffer2[0,:], numpyBuffer3[0,:]], density=True)
#numpy.save('C:\\Users\\Chris\\Documents\\3_Uni\\1_MSc Neuroengineering\\Semester 4\\NISE\\NISE\\Python\\buffer.npy', numpyBuffer1)
plt.show()
box = MyOVBox()
|
21,752 | 5c6e4a2a5a9566ea61c072566bd7482a76432cb2 |
from eval.Answer import Answer
from eval.Question import Question
from eval.USP import USP
|
21,753 | 39622fa1392850bf761baf32558e80c5dc1f40e5 | from Node import *
from Debugger import *
# init the debugger
debug = Debugger()
debug.enable()
class BinarySearchTree(object):
"""A Binary Search Tree Implementation:
Attributes:
name: A string representing the BST's name.
Root: A root node which gets initialized to None.
"""
def __init__(self, name):
"""Create the root node of the BST.
"""
debug.printMsg("We Initiated a BST with no root node")
self.name = name
self.root = None
self.size = 0
def length(self):
"""Returns the length of the BST
"""
return self.length
def __contains__(self,key):
"""overload the *in* operator. credit@interactivepython.org
"""
if self.recursiveLookup(key,self.root):
return True
else:
return False
def __len__(self):
"""internal function returns length
"""
return self.size
def __iter__(self):
return self.root.__iter__()
def __setitem__(self,k,v):
"""Allows to override how we insert things"""
self.insert(k,v)
def insert(self, key, data):
"""This function will insert
data into the BST using a log_2(n)
algorithm
"""
debug.printMsg('Insert for "' + key + '" With data: ' + str(data) )
# if there is no root node
if not self.root:
debug.printMsg("No root was found, create one")
self.root = Node(key, data)
else:
debug.printMsg("Root was found, starting recursive insert")
self.recursiveInsert(key, data, self.root)
# increment the size of the BST
debug.printMsg("Incrementing size of BST")
self.size = self.size + 1
def recursiveInsert(self, key, data, curr):
"""This is the main algorithm for insert
"""
debug.printMsg("Entered recursiveInsert")
# check if the key is greater than current node key
# we will go right
debug.printMsg("checking whether we go right or left")
if key > curr.key:
debug.printMsg("we go right")
# now check if there is a right node already
debug.printMsg("checking if we have available space")
if curr.hasRightChild():
debug.printMsg("nope, calling recursiveInsert again")
# well, we're shit out of luck and need to go further
self.recursiveInsert(key, data, curr.right)
else:
debug.printMsg("yep, we'll insert it here")
# we found an empty spot
curr.right = Node(key, data, curr)
else:
debug.printMsg("we go left")
# now check if there is a left node already
if curr.hasLeftChild():
debug.printMsg("checking if we have available space")
# well, we're shit out of luck and need to go further
self.recursiveInsert(key, data, curr.left)
else:
# we found an empty spot
debug.printMsg("yep, we'll insert it here")
curr.left = Node(key, data, curr)
def lookup(self, key):
"""Gets a specific key from the BST
"""
# check that this tree actually has a root node
debug.printMsg("Call made to Lookup")
debug.printMsg("checking if we have a BST")
if self.root:
debug.printMsg("Calling Recursive Lookup")
(result, err) = self.recursiveLookup(key, self.root)
# if we did not find anything
if err:
debug.printMsg("Oops, we couldn't find anything")
return None
else:
# we found a result
debug.printMsg("we found: ")
return result
else:
debug.printMsg("Oops, the BST seems to not exist")
# root doesnt exist
return None
def recursiveLookup(self, key, curr):
"""Recusrisvely searched the BST using log_2(n) algorithm to
find the key is there is
"""
# basically repeat insert
debug.printMsg("Entered recursiveLookup")
# if we found a match break
debug.printMsg('Checking base condition: ' + key + ' = ' + curr.key)
if key == curr.key:
debug.printMsg("Success, found")
return (curr, None)
# if the key is larger than curr
elif key > curr.key:
debug.printMsg("Nope, now checking if we should go right")
debug.printMsg("yep")
debug.printMsg("Check if we still have room to search")
if curr.hasRightChild():
debug.printMsg("Moving further right")
# move onto the next node along the search path
return self.recursiveLookup(key, curr.right)
else:
debug.printMsg("Nope, ran out of search path. bummer")
# hit the end and there was no match
return (None, True)
else:
debug.printMsg("Nope, we're going left")
debug.printMsg("Check if we still have room to search")
if curr.hasLeftChild():
debug.printMsg("Moving further left")
return self.recursiveLookup(key, curr.left)
else:
debug.printMsg("Shit balls, we ran out of search path")
return (None, True)
|
21,754 | 2d7e394611d2fd2bff113235a0bd17df057a809e | from typing import List
class Solution:
def largestAltitude(self, gain: List[int]) -> int:
res=[0]*(len(gain)+1)
for i in range(1,len(gain)+1):
res[i]=res[i-1]+gain[i-1]
return max(res)
n = int(input())
list = []
for i in range(0,n):
a = int(input())
list.append(a)
print(largestAltitude(1, list)) |
21,755 | 451d42a6b347de622429587860c09c09a6d0f483 | from flask import Flask
import pymysql
app = Flask(__name__)
conn = pymysql.connect(
host= 'vcu-eras-application.cqdbzahrxvgz.us-east-1.rds.amazonaws.com',
port= 3306,
user= 'admin',
password ='vcueras1!',
db = 'VCU_ERAS',
)
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/surgeries', methods=['GET'])
def get_info():
cur = conn.cursor()
cur.execute("SELECT SurgeryInfo FROM Surgeries WHERE SurgeryName = 'Reconstructive Brest Surgery' ")
details = cur.fetchall()
print(details[0][0])
return(details[0][0])
if __name__ == '__main__':
app.run()
|
21,756 | 01f1abc1f8517f54c0bfd74d9912c88a671c9b26 | file=open("18.txt", "r")
a=[[int(val) for val in line.split()] for line in file.readlines()]
file.close()
b=a[-1]
c=size=a.__len__()-1
for i in range(size-1, -1, -1):
for j in range(0, c, 1):
if (a[i][j]+b[j]>a[i][j]+b[j+1]):
b[j]=a[i][j]+b[j]
else:
b[j]=a[i][j]+b[j+1]
c-=1
print(b[0]) |
21,757 | 587a683ae681d19de5fda19576a3e986beca45d3 | from keras.models import Sequential, Model
from keras.layers.core import Dense, Dropout, Activation, Flatten
#from keras.layers.convolutional import Conv2D, MaxPooling2D, AveragePooling2D
#from keras.layers.advanced_activations import LeakyReLU, PReLU
from keras.utils import np_utils, generic_utils, to_categorical
from sklearn.utils import shuffle
import keras
import sys
#from keras import regularizers
#from keras.regularizers import l2
import numpy as np
from keras import backend as K
#K.set_image_dim_ordering('th')
from keras.layers import Input
import os
from PIL import Image
from skimage.transform import resize
from keras.applications.vgg16 import VGG16
from keras.layers import GlobalAveragePooling2D
from keras import optimizers
nb_classes = 10
img_channels = 3
img_rows = 112
img_cols = 112
if len(sys.argv)>1:
train = sys.argv[1]
else:
train = 'sub_imagenet/train'
img_folders = os.listdir(train)
data = []
labels = []
for i in range(len(img_folders)):
images = os.listdir(train+"/"+img_folders[i])
for img in images:
image = Image.open(train+"/"+img_folders[i]+"/"+img)
image = np.array(image, dtype='uint8')
data.append(resize(image, (224,224,3)))
labels.append(i)
X_train = np.array(data)
Y_train = np.array(labels)
Y_train = to_categorical(Y_train, nb_classes)
print('X_train shape:', X_train.shape)
print(X_train.shape[0], 'train samples')
print('Y_train shape:', Y_train.shape)
X_train,Y_train = shuffle(X_train,Y_train)
image_input = Input(shape=(224,224,3))
model = VGG16(include_top=False, weights='imagenet', input_tensor=image_input, pooling=None,classes=nb_classes)
for layer in model.layers[:9]:
layer.trainable = False
output = model.output
output = Flatten()(output)
output = Dense(1024, activation="relu")(output)
predictions = Dense(10, activation="softmax")(output)
model_final = Model(input = model.input, output = predictions)
#keras.utils.multi_gpu_model(model, gpus=2, cpu_merge=False, cpu_relocation=False)
#opt = keras.optimizers.Adamax(lr=0.001, beta_1=0.9, beta_2=0.99, decay=1e-6)# best one
opt = optimizers.SGD(lr=0.0001,momentum=0.9)
model_final.compile(loss='categorical_crossentropy',
optimizer=opt,
metrics=['accuracy'])
def train():
model_final.fit(X_train, Y_train,
batch_size=32,
epochs=20,
shuffle=True)
if len(sys.argv)>1:
model_final.save(sys.argv[2])
else:
model_save('model.h5')
train()
|
21,758 | fda50928f8fcd9ff258380b12c41035607dc025f | # coding: utf-8
import matplotlib
matplotlib.use("Agg")
from ecell4 import *
util.decorator.SEAMLESS_RATELAW_SUPPORT = True
import numpy as np
import pandas as pd
import itertools as itr
import sys
import seaborn as sns
import matplotlib.pylab as plt
from ribo6 import makedir
## liner chart
csvdir = "results/ribo6/csv/liner"
drugNames = ["Streptmycin", "Chloramphenicol"]
imgdir = "results/ribo6/images"
makedir(imgdir)
## normal
for drugName in drugNames:
normal = pd.read_csv("{}/{}_0.csv".format(csvdir, drugName))
Lambda_0 = normal["single"][0]
plt.plot(np.linspace(0, 100, 101), list(map(lambda x: x / Lambda_0, list(normal["single"]))), label="single")
plt.plot(np.linspace(0, 100, 101), list(map(lambda x: x / Lambda_0, list(normal["double"]))), label="double")
plt.legend()
plt.xlabel("Dose", fontsize=20)
plt.ylabel("Growth Rate", fontsize=20)
plt.tight_layout()
plt.savefig("{}/{}_normal.png".format(imgdir, drugName), dpi=300)
plt.close()
## change
for drugName in drugNames:
change = pd.read_csv("{}/{}_1.csv".format(csvdir, drugName))
Lambda_0 = change["single"][0]
plt.plot(np.linspace(0, 100, 101), list(map(lambda x: x / Lambda_0, list(change["single"]))), label="single")
plt.plot(np.linspace(0, 100, 101), list(map(lambda x: x / Lambda_0, list(change["double"]))), label="double")
plt.legend()
plt.xlabel("Dose", fontsize=20)
plt.ylabel("Growth Rate", fontsize=20)
plt.tight_layout()
plt.savefig("{}/{}_change.png".format(imgdir, drugName), dpi=300)
|
21,759 | 01b94dec10b4cffa519d6dbae4d808482bd2a354 | '项目失败,推到重来'
life is short , i use python
|
21,760 | 39e6656a9f6c5ee5b929aca3d969470006816c96 | #########################################
# groupe MPCI 4
# Elive DIPOKO
# Ali GOUMANE
# Marie PHILIBERT
# Stalin SIVASANGAR
#https://github.com/uvsq21916121/projet_incendie.git
#########################################
#importation des bibliothèques
import tkinter as tk
import random as rd
#DEFINITION DES CONSTANTES
HAUTEUR = 600
LARGEUR = 600
color_font = "white"
type_de_terrain = ["blue", "green" , "yellow"]
largeur_parcelle = LARGEUR //100
hauteur_parcelle = HAUTEUR // 100
DUREE_FEU = 1
#DUREE_CENDRE = 500
etat= 100*[0]
for i in range(0,100):
etat[i] = 100*[0]
#DEFINITION DES FONCTIONS
def choix_du_terrain():
"""initialisation du terrain """
for i in range(100):
for j in range(100):
color = rd.choice(type_de_terrain)
canvas.create_rectangle((i*largeur_parcelle, j*hauteur_parcelle),((i+1)*largeur_parcelle, (j+1)*hauteur_parcelle), fill=color)
if (color == "blue"):
etat[i][j] = 2
if (color =="yellow"):
etat[i][j] = 0
if (color == "green"):
etat[i][j] = 1
print (etat[0][0])
return
#PROGRAMME PRINCIPAL
racine = tk.Tk()
canvas = tk.Canvas (racine ,width =LARGEUR, height = HAUTEUR , bg = color_font)
bouton = tk.Button(racine , text ="Démarrer" ,command = choix_du_terrain, font = ("helvetica",30))
bouton.grid (column = 1,row = 6)
canvas.grid(column = 1 ,row = 1 ,rowspan = 5)
racine.mainloop() |
21,761 | 5769fa347a1fa606d39b07a5f58040c0812d0c71 | #coding: utf-8
# Admin manager
from oi.projects.models import Project,Spec,Bid,PromotedProject,ProjectACL,Release
from django.contrib import admin
from django.utils.translation import ugettext as _
class SpecInline(admin.TabularInline):
model = Spec
extra = 0
class ACLInline(admin.TabularInline):
model = ProjectACL
extra = 0
class ProjectAdmin(admin.ModelAdmin):
list_display=('__unicode__','parent','master','public')
list_per_page = 200
list_editable = ('public',)
date_hierarchy = 'created'
actions = ['compute_descendants']
search_fields = ['title']
# list_filter = ('is_staff', 'company')
raw_id_fields = ('parent',)
radio_fields = {'state':admin.HORIZONTAL}
inlines = [SpecInline,ACLInline]
def compute_descendants(self, request, prjqueryset):
for prj in prjqueryset:
for task in prj.descendants.all():
task.save()
self.message_user(request, "OK")
compute_descendants.short_description = _("Recompute all descendants")
admin.site.register(Project, ProjectAdmin)
admin.site.register(Spec)
admin.site.register(Bid)
admin.site.register(PromotedProject)
admin.site.register(Release)
|
21,762 | 80af258777f49a28d9251f6f3a0ef9c09dcb4537 |
import sys
import logging
from logging import handlers
import os, os.path
import cherrypy
from cherrypy import _cplogging
from cherrypy.lib import httputil
#Docker conatiner didn't like daemonizing the app
#from cherrypy.process.plugins import Daemonizer
class Server(object):
def __init__(self, options):
#configure app paths
self.base_dir = os.path.normpath(os.path.abspath(options.basedir))
self.conf_path = os.path.join(self.base_dir, "conf")
log_dir = os.path.join(self.base_dir, "logs")
if not os.path.exists(log_dir):
os.mkdir(log_dir)
# app configurations
cherrypy.config.update(os.path.join(self.conf_path, "server.cfg"))
cherrypy.config.update(os.path.join(self.conf_path, "db.cfg"))
cherrypy.config.update({'error_page.default': self.on_error})
engine = cherrypy.engine
sys.path.insert(0, self.base_dir)
#import services
#from webapp.messages import RestMessages
#events = RestMessages()
#configure sqlalchemy DB plugin
from lib.data.saplugin import SAEnginePlugin
SAEnginePlugin(cherrypy.engine, cherrypy.config['Database']['dbdriver'] + "://" + cherrypy.config['Database']['dbuser'] + ":" + cherrypy.config['Database']['dbpassword'] + "@" + cherrypy.config['Database']['dbhost'] + "/" + cherrypy.config['Database']['dbname']).subscribe()
from lib.data.satool import SATool
cherrypy.tools.db = SATool()
conf = {
'/' : {
'request.dispatch': cherrypy.dispatch.MethodDispatcher(),
'tools.response_headers.on': True,
#Will probably make these JSON
'tools.response_headers.headers': [('Content-Type', 'application/json')],
'tools.proxy.on': True,
'tools.db.on': True
}
}
#app = cherrypy.tree.mount(events, '/events',conf)
def run(self):
#Starting and running the cherrypy engine and daemonizing
engine = cherrypy.engine
#d = Daemonizer(engine)
#d.subscribe()
if hasattr(engine, "signal_handler"):
engine.signal_handler.subscribe()
if hasattr(engine, "console_control_handler"):
engine.console_control_handler.subscribe()
engine.start()
engine.block()
def on_error(self, status, message, traceback, version):
#Least of my concerns at the moment
return traceback
if __name__ == '__main__':
from optparse import OptionParser
def parse_commandline():
curdir = os.path.normpath(os.path.abspath(os.path.curdir))
parser = OptionParser()
parser.add_option("-b","--base-dir", dest="basedir",help="Base directory in which the server "\
"is launched (default: %s)" % curdir)
parser.set_defaults(basedir=curdir)
(options, args) = parser.parse_args()
return options
Server(parse_commandline()).run()
|
21,763 | 6dcea788a6f7b5fd5fb53196b04ebcb10d601d8a |
class TestCaseObject:
mutationlist = [] # list of precomputed edits
def _init_(self, testcase, mutatorid):
self.testcase = testcase
self.mutatorid = mutatorid
self.synthesis = None
def set_synthesis_target(syn):
self.synthesis = syn
def append_mutator(mutator_obj):
mutationlist.append(mutator_obj)
#List of custom methods in test case.
class MutatorObject:
#list of data objects that stores relevant information
#ctype: type of code (with limited ability to identify syntax information)
#code: code
#line: line number of code
def _init_(self, ctype, code, line):
self.ctype = cytype
self.code = code
self.line = line
#testcase
class MutatorOperatorSet:
#mutator_operator_list: list of MutatorOperatorObjects for parsing.
mutator_operator_list = []
def _init_(self):
self.mutator_operator_list = []
def import_list(mutator_operator_list):
self.mutator_operator_list = mutator_operator_list
def append_mutator_operators(mutator_operator):
self.mutator_operator_list.append(mutator_operator)
class MutatorOperatorObject:
#mutator_operator: operators as string literals, '<=','<','>',>='
#mutator_re: set of regular expressions that identify corresponding
def_init(self, mutator_operator, mutator_re):
self.mutator_operator = mutator_operators
self.mutator_re = mutator_re
|
21,764 | c5798d780b5db751469a5b8cc08a120ab67f2946 | import time
import math
import xlsxwriter
import openpyxl
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException, WebDriverException
from selenium.webdriver.common.keys import Keys
workbook = xlsxwriter.Workbook('tgtcat.xlsx')
worksheet = workbook.add_worksheet()
row=0
col=0
inputstr = ["tgtcggcggttcgctttttcttttttgtcgg","tgtcggtacaaaatggcacagcatttgtcgg", "tgtcggaagtcaaataacaaatctttgtcgg", "tgtcggcgatgcaattattgtcttttgtcgg", "tgtctctgtctc", "tgtcaagtgtcaa", "tgtcaaatgtcaa", "tgtcaactgtcaa", "tgtcaattgtcaa", "tgtcattgtcat", "tgtcagattgtcag", "tgtcacttgtcac", "tgtcacgtgtcac", "tgtctaatgtcta", "tgtctttgtctt", "tgtctgtttgtctg", "tgtcgctggttgctgtcgc","tgtcgagtgtcga", "tgtcgttgtcgt", "tgtccattgtcca", "tgtccttatagtccatcacaacggttcatgtcct", "tgtccttcttttctgcttctttgtcct", "tgtccttgtcct"];
MPS = "TCACTATATATAGGAAGTTCATTTCATTTGGAATGGACACGTGTTGTCATTTCTCAACAATTACCAACAACAACAAACAACAAACAACATTATACAATTACTATTTACAATTACATCTAGATAAACAATGGCTTCCTCC"
for string in inputstr:
worksheet.write(row, col, string)
row+=1
driver = webdriver.PhantomJS()
driver.get('http://consite.genereg.net/cgi-bin/consite?rm=t_input_single')
driver.find_element_by_name("seq1").send_keys(string + MPS)
elem = driver.find_element_by_name("submit").click()
elem = driver.find_element_by_name(".submit").click()
elem = driver.find_element_by_name("Image4").click()
rows = driver.find_elements_by_xpath("/html/body/table/tbody/tr/td[2]/div/table/tbody/tr/td/table/tbody/tr/td[1]")
for a in rows:
worksheet.write(row, col, a.text)
row+=1
row=0
col+=1
workbook.close()
|
21,765 | 8cf25fef1b991814c1102e11a79fd17cf6ba4e5c | #!/usr/bin/env python
class Solution(object):
def __init__(self):
self.nums_t = []
def min_heap(self, i, l):
L = i * 2
R = i * 2 + 1
large = i
# print "** = ", self.nums_t[i-1]
if L <= l and self.nums_t[L-1] < self.nums_t[i-1]:
# print "L>", self.nums_t[L-1]
large = L
if R <= l and self.nums_t[R-1] < self.nums_t[large-1]:
# print "R>", self.nums_t[R-1]
large = R
# print "i = " , i , " large = ", large
if large != i:
tp = self.nums_t[large-1]
self.nums_t[large-1] = self.nums_t[i-1]
self.nums_t[i-1] = tp
self.min_heap(large, l)
def build_min_heap(self):
l = len(self.nums_t)
for i in range(l/2, 0, -1):
self.min_heap(i, l)
def findKthLargest(self, nums, k):
self.nums_t = nums[0:k]
# print self.nums_t
self.build_min_heap()
nums_leave = nums[k:]
LL = len(nums_leave)
for i in range(0, LL):
if nums_leave[i] > self.nums_t[0]:
self.nums_t[0] = nums_leave[i]
self.min_heap(1, len(self.nums_t))
return self.nums_t[0]
def findKthLargest1(self, nums, k):
L = len(nums)
if L == 1:
return nums[0]
left = []
right = []
for i in range(1,L):
if nums[i] < nums[0]:
left.append(nums[i])
else:
right.append(nums[i])
new_L = len(right)
if new_L >= k:
return self.findKthLargest1(right, k)
elif new_L == k - 1:
return nums[0]
else:
return self.findKthLargest1(left, k - new_L - 1)
if __name__ == '__main__':
nums = [4,1,3,2,16,9,10,14,8,7]
l = 5
s = Solution()
ans = s.findKthLargest1(nums, l)
print "ans : ", ans
# for i in range(0,l):
# print s.nums_t[i]
|
21,766 | 84604e20659d33ecbcd8e2455b0b936aa4dedee2 | # 该文件用来管理所有会使用到的Flask扩展
from flask_bcrypt import Bcrypt
from flask_pagedown import PageDown
from flask_login import LoginManager
from flask_openid import OpenID
from flask_principal import Principal, Permission, RoleNeed
from flask_celery import Celery
from flask_mail import Mail
from flask_assets import Environment, Bundle
from flask_admin import Admin
from flask_ckeditor import CKEditor
# 创建bcrypt实例
bcrypt = Bcrypt()
# 创建pagedown实例
pagedown = PageDown()
# 创建flask-celery-helper
flask_celery = Celery()
openid = OpenID()
# 创建flask-login的实例
login_manager = LoginManager()
# 创建Flask-Mail的实例
mail = Mail()
# 创建flask-assets的实例
assets_env = Environment()
main_css = Bundle(
'css/bootstrap.css',
'css/bootstrap-theme.css',
filters='cssmin',
output='assets/css/common.css')
main_js = Bundle(
'js/bootstrap.js',
filters='jsmin',
output='assets/js/common.js')
# 创建flask-admin的实例
flask_admin = Admin()
# 设置login的参数
# 设置登录的视图函数
login_manager.login_view = "main.login"
# 一旦cookie被盗,重新登录
login_manager.session_protection = "strong"
#指定提供登录的文案
login_manager.login_message = "Pless login to access this page."
#指定登录信息的类别为info
login_manager.login_message_category = "info"
ckeditor = CKEditor()
@login_manager.user_loader
def load_user(user_id):
"""在用户登录并调用login_user时,根据user_id找到对应的user,如果没找到,返回none,此时的user_id将会自动从session中移除,如能找到user,则user_id会被继续保存"""
from .models import User
return User.query.filter_by(id=user_id).first()
# 创建principal实例
principals = Principal()
# 设置3种权限
admin_permission = Permission(RoleNeed('admin'))
poster_permission = Permission(RoleNeed('poster'))
default_permission = Permission(RoleNeed('default'))
|
21,767 | 0705400f6d33f3ba48f23b9f5e30342e34d0dae7 | from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.operators.python_operator import PythonOperator, BranchPythonOperator
from airflow.models import Variable, BaseOperator
from datetime import datetime
import json
import random
# отправляем xcom значение
def producer(*args, **kwargs):
kwargs['ti'].xcom_push('key', 'World')
print('Send half text')
# принимаем xcom параметр
def consumer(*args, **kwargs):
req = kwargs['ti'].xcom_pull(task_ids='Second', key='key')
print('Hello, {}'.format(req))
# ветвление выбираем между тасками. Можно было бы реализовать сложный пример с передачей xcom значения так же рандомно.
# Например, producer отправляет одно из значений ['Second', 'Fourth'], тут принимаем значение и дальше выбираем. Но в
# целом пример получается полноценным
def branch(**context):
# выбираем рандомно любой таск из нашего списка
_tasks: [] = ['Second', 'Fourth']
return random.choice(_tasks)
def create_dag(_dag_name: str, _default_args: dict) -> DAG:
"""
Создаем DAG динамически.
:param _dag_name: название DAG
:param _default_args: аргументы DAG
:return: новый DAG
"""
_dag = DAG(_dag_name, default_args=_default_args, schedule_interval='@once')
return _dag
def create_task(
_dag: DAG,
_task_id: str,
_trigger_rule: str,
_operator_type: str,
_command: str,
_params: dict = None
) -> BashOperator:
"""
Создаем таски
:param _dag: DAG
:param _trigger_rule: триггер
:param _operator_type: тип оператора
:param _task_id: имя таска
:param _command: команда для таска
:param _params: перменные для заполнения Jinja шаблонов
:return: BashOperator
"""
_task = None
if _operator_type.lower() == 'bash':
_task = BashOperator(
task_id=_task_id,
bash_command=_command,
trigger_rule=_trigger_rule,
params=_params,
dag=_dag,
)
elif _operator_type.lower() == 'python':
_task = PythonOperator(
task_id=_task_id,
python_callable=globals()[_command],
trigger_rule=_trigger_rule,
dag=_dag,
provide_context=True
)
elif _operator_type.lower() == 'branch':
_task = BranchPythonOperator(
task_id=_task_id,
python_callable=globals()[_command],
trigger_rule=_trigger_rule,
dag=_dag,
)
return _task
def read_meta_file(_meta_file_path: str) -> dict:
"""
Загружаем из переменной среды путь до файла с meta данными.
:param: путь к файлу метаданных
:return: сериализованный json
"""
with open(_meta_file_path, 'r') as f:
f = json.load(f)
return f
def parse_dag_meta_json(_data: dict) -> tuple:
"""
Парсим метаданные для DAG
:param: данные json
:return: dag_name, default_args, tasks
"""
_dag_name: str = _data.get('dag_name')
_default_args: dict = _data.get('default_args')
_tasks: [] = _data.get('tasks')
_dependencies: [] = _data.get('dependencies')
return _dag_name, _default_args, _tasks, _dependencies
# импортируем Airflow переменную с ссылкой до метаданных
meta_file_path = Variable.get("meta_path2")
# считываем файл с метданными
data = read_meta_file(meta_file_path)
# возвращаем кортеж с данными
dag_name, default_args, tasks, dependencies = parse_dag_meta_json(data)
# настраиваем когфигурацию ДАГа
default_args = {
"owner": default_args["owner"],
"start_date": datetime.strptime(default_args["start_date"], "%d%m%Y")
}
# создаем DAG для простого-прямого примера связывания
dag = create_dag(dag_name, default_args)
# будем записывать Таски в dict, что бы свзать их по именам
task1: {str: BaseOperator} = {}
for task in tasks:
task1[task['task_id']] = create_task(dag, task['task_id'], task['trigger_rule'], task['operator_type'],
task['command'])
# связываем такси между собой вытаскивая из коллекции по именам
for dep in dependencies:
task1[dep['task_id_from']].set_downstream(task1[dep['task_id_to']])
|
21,768 | 6b36a574ceb07bb3e888da134a2def3ce8c41c4d | #百度翻译的appid与secretKey
appid = 'your appid'
secretKey = 'your secretkey'
#发送与接收设置
#其中rss源为rsshub的router
#格式list[ [rsshub源1,[接收者1,接收者2,接收者3],True(翻译与否)] , [rss源2,[接收者2,接收者4,接收者5]] ]
#例子[ ['twitter/user/ichika_mo',[123456,456789],True],
# ['twitter/user/shirakamifubuki',[123456,456789],False] ]
QQ_Group = []
QQ_Private = []
#CQHttp的控制
access_token = 'your access_token'
secret = 'your secret'
api_root = '酷Qhttp运行的地址' #http
|
21,769 | b9fef26853954a0c2e8da6ca694c7aec6105f24b | import sq.interfaces.http
try:
from .endpoints import urlpatterns
except ImportError:
urlpatterns = None
class WSGIApplication(sq.interfaces.http.WSGIApplication):
def __init__(self, logger=None, is_operational=None):
super(WSGIApplication, self).__init__(urlpatterns=urlpatterns,
logger=logger, is_operational=is_operational)
# pylint: skip-file
# !!! SG MANAGED FILE -- DO NOT EDIT !!!
|
21,770 | bcc5fed0c35defb3ab38211138eb87602e75e47a | from visual_mpc.envs.robot_envs import GripperInterface
import intera_interface
import time
class SawyerDefaultGripper(GripperInterface):
def __init__(self):
self._gripper_control = intera_interface.Gripper("right")
self._gripper_control.calibrate()
self._gripper_control.set_velocity(self._gripper_control.MIN_VELOCITY)
self._gripper_control.open()
def get_gripper_state(self, integrate_force=False):
# returns gripper joint angle, force reading (none if no force)
return self._gripper_control.get_position(), self._gripper_control.get_force()
def set_gripper(self, position, wait=False):
self._gripper_control.set_position(position)
# just busy wait since the gripper is pretty fast
while wait and self._gripper_control.is_moving():
time.sleep(0.1)
@property
def GRIPPER_CLOSE(self):
return self._gripper_control.MIN_POSITION
@property
def GRIPPER_OPEN(self):
return self._gripper_control.MAX_POSITION
def set_gripper_speed(self, new_speed):
assert self._gripper_control.MIN_VELOCITY <= new_speed <= self._gripper_control.MAX_VELOCITY
self._gripper_control.set_velocity(new_speed)
|
21,771 | fa1afd0d496370fd9e1ae396d8a1cd8c3c7fe77c | from django.forms import ModelForm
from .models import Blog
class BlogForm(ModelForm):
class Meta:
model = Blog
fields = ['title', 'description']
#fields = '__all__' # user if you want to use all the fields from the model in the form
#exclude = ['title'] # if you want to user all the fields from the model, but the title |
21,772 | 6070c62bd6c28fd0c52ad77396baeb3a71c73558 | import time
def inputProcess(file):
aFile = open(file, "r")
setWord = set([])
for line in aFile:
setWord.add(line.strip())
aFile.close()
dictWords = {}
for word in setWord:
dictWords[word] = randomnizeWord(word, setWord)
return dictWords
def isLinked(word1, word2):
count = 0
if word1==word2:
return False
for i in range(4):
if word1[i] != word2[i]:
count += 1
if count == 1:
return True
else:
return False
def randomnizeWord(word, setWord):
listWord = []
for index in range(len(word)):
for replace in "ABCDEFGHIJKLMNOPQRSTUVWXYZ":
newWord = word[:index]+replace+word[index+1:]
if newWord in setWord and newWord!=word:
listWord.append(newWord)
return listWord
def transform(begin, end, dictWords):
visited = []
return transformWordBFS(begin, end, dictWords, visited)
def transformWordBFS(begin,end, dictWords, visited):
begin = begin.upper()
end = end.upper()
if begin not in dictWords:
print("The starting word is not in the dictionary")
return None
elif end not in dictWords:
print("The ending word is not in the dictionary")
return None
visited.append(begin)
aQueue = [begin]
parent = {}
while aQueue:
for word in dictWords[aQueue[0]]:
if word not in visited and similarity(word,end):
visited.append(word)
aQueue.append(word)
parent[word] = aQueue[0]
# potential_ans.append(word)
if word==end:
return findingRoute(parent,begin,end)
aQueue = aQueue[1:]
def findingRoute(parent, start, end):
path = [end]
# print(parent)
while path[-1] != start:
path.append(parent[path[-1]])
path.reverse()
return path
def similarity(word1, word2):
for i in range(4):
if word1[i]==word2[i]:
return True
return False
if __name__ == '__main__':
dictWords = inputProcess("fourletterwords.txt")
# print(dictWords["LOWS"])
# print(dictWords["COAT"])
# # print(dictWords["CHAT"])
# print(dictWords["CHAP"])
# # print(dictWords["CHIP"])
# print(dictWords)
begin = time.time()
print(transform("boat","ship",dictWords))
print(time.time()-begin) |
21,773 | c5a5f47999cc4b73882dd2ccbc81890f7ad3d468 | import requests
import argparse
import urllib3
#disable ssl warning
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
#如果报错ssl.SSLError: [SSL: UNSUPPORTED_PROTOCOL] unsupported protocol (_ssl.c:852)
#修改/etc/ssl/openssl.cnf,将TLSv1.2改为TLSv1.0
#参考链接:https://stackoverflow.com/questions/59408646/ssl-sslerror-ssl-unsupported-protocol-unsupported-protocol-ssl-c852-in-d
ENDPOINT = '/ui/vropspluginui/rest/services/uploadova'
def banner():
print(
"""
_______ ________ ___ ___ ___ __ ___ __ ___ ______ ___
/ ____\\ \\ / / ____| |__ \\ / _ \\__ \\/_ | |__ \\/_ |/ _ \\____ |__ \\
| | \\ \\ / /| |__ ______ ) | | | | ) || |______ ) || | (_) | / / ) |
| | \\ \\/ / | __|______/ /| | | |/ / | |______/ / | |\\__, | / / / /
| |____ \\ / | |____ / /_| |_| / /_ | | / /_ | | / / / / / /_
\\_____| \\/ |______| |____|\\___/____||_| |____||_| /_/ /_/ |____|
Author: ybdt
"""
);
def check(ip):
try:
if "https://" in ip:
r = requests.get(ip + ENDPOINT, verify=False, timeout=10);
else:
r = requests.get('https://' + ip + ENDPOINT, verify=False, timeout=10)
except requests.exceptions.ConnectTimeout:
print('[-] ' + ip + ' ConnectTimeout');
return False;
except requests.exceptions.ReadTimeout:
print('[-] ' + ip + ' ReadTimeout');
return False;
except requests.exceptions.ConnectionError:
print('[-] ' + ip + ' ConnectionError');
return False;
if r.status_code == 405:
print('[+] ' + ip + ' vulnerable to CVE-2021-21972!')
return True
else:
print('[-] ' + ip + ' not vulnerable to CVE-2021-21972. Response code: ' + str(r.status_code) + '.')
return False
def main():
banner();
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--inputfile", help="The file contains ips, one per line", required=True);
args = parser.parse_args()
with open(args.inputfile, "r") as f_r:
with open("vulnerable.txt", "w") as f_w:
lines = f_r.readlines();
for line in lines:
line = line.strip("\n").strip("\r\n");
vulnerable = check(line);
if vulnerable:
f_w.write(line + "\n");
main(); |
21,774 | e50bbe5d506ac6bf6ab9aef3b5de530de468688f | # -- coding: utf-8 --
from selenium import webdriver
import time
import math
from selenium.webdriver.support import expected_conditions
def calc(x):
return str(math.log(abs(12*math.sin(int(x)))))
try:
link = "http://suninjuly.github.io/explicit_wait2.html"
browser = webdriver.Chrome()
browser.get(link)
#wait = WebDriverWait(driver, 12)
inpPrice = wait.until(expected_conditions.text_to_be_present_in_element((By.Id, 'price'), '$100'))
button = browser.find_element_by_css_selector("button.btn.btn-primary")
button.click()
time.sleep(2)
alert = browser.switch_to.alert
alert.accept()
finally:
# ��������� ������� ����� ���� �����������
browser.quit() |
21,775 | 9257ee4d586645241340b92650f3b01bc601b1b4 | # ToDo: Create a top level container for different measurement programs at the probe station
|
21,776 | ccf8144f1f489dbb7f91fc861252d2c5096ed061 | import requests
import json
# url = "http://localhost:5000/watchlist/toggle_add"
url = "http://13.229.107.243:5001/watchlist/toggle_add"
data = {
"user_id": "Bob",
"product": "TSLA"
}
response = requests.post(url, data = json.dumps(data))
print(response.text) |
21,777 | b728ed8056d5df78830e87857b9cced8e06b410b | from unittest import TestCase
try:
from unittest.mock import Mock, call
except ImportError:
from mock import Mock, call
from argparse import ArgumentParser
from munch import munchify
from cloud_cleaner.config import CloudCleanerConfig
from cloud_cleaner.resources.fip import Fip
FLOATING_IPS = [
# THESE ARE ATTACHED, AND SHOULDN'T BE DELETED, BY DEFAULT
munchify({
'attached': True,
'fixed_ip_address': '191.168.1.1',
'floating_ip_address': '10.0.0.1',
'id': '1',
'status': 'ACTIVE'
}),
munchify({
'attached': True,
'fixed_ip_address': '172.16.0.2',
'floating_ip_address': '10.0.0.2',
'id': '2',
'status': 'ACTIVE'
}),
munchify({
'attached': True,
'fixed_ip_address': '192.168.10.22',
'floating_ip_address': '8.9.10.3',
'id': '3',
'status': 'ACTIVE'
}),
# THESE ARE NOT ATTACHED AND SHOULD BE DELETED, BY DEFAULT
munchify({
'attached': False,
'fixed_ip_address': '192.168.1.20',
'floating_ip_address': '10.0.0.20',
'id': '20',
'status': 'DOWN'
}),
munchify({
'attached': False,
'fixed_ip_address': '172.16.0.21',
'floating_ip_address': '10.0.0.21',
'id': '21',
'status': 'DOWN'
}),
munchify({
'attached': False,
'fixed_ip_address': '192.168.10.22',
'floating_ip_address': '8.9.10.22',
'id': '22',
'status': 'DOWN'
})
]
class TestFip(TestCase):
def __test_with_calls(self, args, calls):
conn = Mock()
conn.list_floating_ips = Mock(return_value=FLOATING_IPS)
conn.delete_floating_ip = Mock()
calls = [call(i) for i in calls]
config = CloudCleanerConfig(args=args)
config.get_conn = Mock(return_value=conn)
fip = Fip()
fip.register(config)
config.parse_args()
fip.process()
fip.clean()
self.assertEqual(conn.delete_floating_ip.call_args_list, calls)
def test_resource_type(self): # pylint: disable=no-self-use
parser = ArgumentParser()
config = CloudCleanerConfig(parser=parser, args=["--os-auth-url",
"http://no.com",
"fip"])
config.add_subparser = Mock()
fip = Fip()
fip.register(config)
config.add_subparser.assert_called_once_with(Fip.type_name)
def test_resource_handled_from_args(self):
parser = ArgumentParser()
config = CloudCleanerConfig(parser=parser, args=["--os-auth-url",
"httpp://no.com",
Fip.type_name])
fip = Fip()
# If config hasn't yet been registered, then there will be an error
# from within this method, as intended
with self.assertRaises(AttributeError):
fip.process()
fip.register(config)
config.parse_args()
self.assertEqual(Fip.type_name, config.get_resource())
def test_default_filters_active(self):
args = ["--os-auth-url", "http://no.com", "fip"]
calls = ['20', '21', '22']
self.__test_with_calls(args, calls)
def test_force_delete_attached(self):
args = ["--os-auth-url", "http://no.com", "fip", "--with-attached"]
calls = ['1', '2', '3', '20', '21', '22']
self.__test_with_calls(args, calls)
def test_delete_by_subnet(self):
args = ['--os-auth-url', 'http://no.com', 'fip', '--floating-subnet',
'10.0.0.0/8']
calls = ['20', '21']
self.__test_with_calls(args, calls)
def test_delete_by_static_subnet(self):
args = ['--os-auth-url', 'http://no.com', 'fip', '--static-subnet',
'192.168.0.0/16']
calls = ['20', '22']
self.__test_with_calls(args, calls)
|
21,778 | 8f7224fcc7642a795e0534914243d7aead634153 | """
File with commands and supported databases by dumpyme
"""
# Dict with types and commands to create dumps
# {0} - database name
# {1} - folder with dump
dump_commands = {
"mongodb": "mongodump --db {0} --out {1}",
}
"""
POSTGRESQL : pg_dump -d database -U user/owner > file.sql
variações:
--table=nome_da_tabela
--table=nome_da_tabela --table=nome_da_outra_tabela
--exclude=nome_da_tabela
--exclude=nome_da_tabela --exclude=nome_da_outra_tabela
"""
# const with list of supported database types
SUPPORTED = dump_commands.keys()
|
21,779 | 0a4a242012ed9ba91e6caec48cc36c95b2143c1f | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'C:\Users\zhou\Desktop\Client\Shangke.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Shangke(object):
def setupUi(self, Shangke):
Shangke.setObjectName("Shangke")
Shangke.resize(923, 568)
self.centralWidget = QtWidgets.QWidget(Shangke)
self.centralWidget.setObjectName("centralWidget")
self.Img_lable = QtWidgets.QLabel(self.centralWidget)
self.Img_lable.setGeometry(QtCore.QRect(249, 40, 371, 271))
self.Img_lable.setObjectName("Img_lable")
self.label = QtWidgets.QLabel(self.centralWidget)
self.label.setGeometry(QtCore.QRect(33, 50, 61, 20))
self.label.setObjectName("label")
self.label_2 = QtWidgets.QLabel(self.centralWidget)
self.label_2.setGeometry(QtCore.QRect(33, 150, 61, 20))
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(self.centralWidget)
self.label_3.setGeometry(QtCore.QRect(33, 200, 61, 20))
self.label_3.setObjectName("label_3")
self.label_5 = QtWidgets.QLabel(self.centralWidget)
self.label_5.setGeometry(QtCore.QRect(33, 100, 61, 20))
self.label_5.setObjectName("label_5")
self.label_6 = QtWidgets.QLabel(self.centralWidget)
self.label_6.setGeometry(QtCore.QRect(33, 250, 61, 21))
self.label_6.setObjectName("label_6")
self.lineEdit = QtWidgets.QLineEdit(self.centralWidget)
self.lineEdit.setGeometry(QtCore.QRect(100, 50, 113, 20))
self.lineEdit.setReadOnly(True)
self.lineEdit.setObjectName("lineEdit")
self.lineEdit_2 = QtWidgets.QLineEdit(self.centralWidget)
self.lineEdit_2.setGeometry(QtCore.QRect(100, 100, 113, 20))
self.lineEdit_2.setReadOnly(True)
self.lineEdit_2.setObjectName("lineEdit_2")
self.lineEdit_3 = QtWidgets.QLineEdit(self.centralWidget)
self.lineEdit_3.setGeometry(QtCore.QRect(100, 150, 113, 20))
self.lineEdit_3.setReadOnly(True)
self.lineEdit_3.setObjectName("lineEdit_3")
self.lineEdit_4 = QtWidgets.QLineEdit(self.centralWidget)
self.lineEdit_4.setGeometry(QtCore.QRect(100, 200, 113, 20))
self.lineEdit_4.setReadOnly(True)
self.lineEdit_4.setObjectName("lineEdit_4")
self.lineEdit_6 = QtWidgets.QLineEdit(self.centralWidget)
self.lineEdit_6.setGeometry(QtCore.QRect(100, 250, 113, 20))
self.lineEdit_6.setReadOnly(True)
self.lineEdit_6.setObjectName("lineEdit_6")
self.Add_new_class_button = QtWidgets.QPushButton(self.centralWidget)
self.Add_new_class_button.setGeometry(QtCore.QRect(20, 320, 191, 51))
self.Add_new_class_button.setObjectName("Add_new_class_button")
self.Output_CWA_excel_Now_button = QtWidgets.QPushButton(self.centralWidget)
self.Output_CWA_excel_Now_button.setGeometry(QtCore.QRect(20, 390, 191, 51))
self.Output_CWA_excel_Now_button.setObjectName("Output_CWA_excel_Now_button")
self.Exit_button = QtWidgets.QPushButton(self.centralWidget)
self.Exit_button.setGeometry(QtCore.QRect(20, 460, 191, 51))
self.Exit_button.setObjectName("Exit_button")
self.Off_class_button = QtWidgets.QPushButton(self.centralWidget)
self.Off_class_button.setGeometry(QtCore.QRect(690, 460, 191, 51))
self.Off_class_button.setObjectName("Off_class_button")
self.textEdit = QtWidgets.QTextEdit(self.centralWidget)
self.textEdit.setGeometry(QtCore.QRect(240, 350, 421, 161))
self.textEdit.setObjectName("textEdit")
self.label_4 = QtWidgets.QLabel(self.centralWidget)
self.label_4.setGeometry(QtCore.QRect(240, 320, 131, 31))
font = QtGui.QFont()
font.setPointSize(15)
font.setBold(True)
font.setWeight(75)
self.label_4.setFont(font)
self.label_4.setObjectName("label_4")
self.Start_CWA_button = QtWidgets.QPushButton(self.centralWidget)
self.Start_CWA_button.setGeometry(QtCore.QRect(690, 380, 191, 51))
self.Start_CWA_button.setObjectName("Start_CWA_button")
self.rec_label_7 = QtWidgets.QLabel(self.centralWidget)
self.rec_label_7.setGeometry(QtCore.QRect(660, 20, 211, 161))
self.rec_label_7.setObjectName("rec_label_7")
self.stu_pic_label_7 = QtWidgets.QLabel(self.centralWidget)
self.stu_pic_label_7.setGeometry(QtCore.QRect(710, 200, 121, 151))
self.stu_pic_label_7.setObjectName("stu_pic_label_7")
Shangke.setCentralWidget(self.centralWidget)
self.retranslateUi(Shangke)
QtCore.QMetaObject.connectSlotsByName(Shangke)
def retranslateUi(self, Shangke):
_translate = QtCore.QCoreApplication.translate
Shangke.setWindowTitle(_translate("Shangke", "Shangke"))
self.Img_lable.setText(_translate("Shangke", "Img"))
self.label.setText(_translate("Shangke", "科 目:"))
self.label_2.setText(_translate("Shangke", "节 次:"))
self.label_3.setText(_translate("Shangke", "教师工号:"))
self.label_5.setText(_translate("Shangke", "班 级:"))
self.label_6.setText(_translate("Shangke", "上课日期:"))
self.Add_new_class_button.setText(_translate("Shangke", "新增上课信息"))
self.Output_CWA_excel_Now_button.setText(_translate("Shangke", "打印当前节次考勤报表"))
self.Exit_button.setText(_translate("Shangke", "退出"))
self.Off_class_button.setText(_translate("Shangke", "下课"))
self.label_4.setText(_translate("Shangke", "签到情况:"))
self.Start_CWA_button.setText(_translate("Shangke", "开始考勤"))
self.rec_label_7.setText(_translate("Shangke", "TextLabel"))
self.stu_pic_label_7.setText(_translate("Shangke", "TextLabel"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Shangke = QtWidgets.QMainWindow()
ui = Ui_Shangke()
ui.setupUi(Shangke)
Shangke.show()
sys.exit(app.exec_())
|
21,780 | aa1a39d8ce6fcb7eb877731fa0dbf1baae491151 | #Python STD package
import time
from pytz import timezone
from datetime import datetime
import random
from pprint import pprint as print
import os
import logging
from logging.handlers import RotatingFileHandler
import sys
#Installed packages
import pytz
import requests
fmt_date = '%Y-%m-%d'
fmt_hour = '%H:%M:%S'
fmt_hour_round = '%H.%M'
utc = pytz.utc
eastern = timezone('America/Montreal')
def get_log_formatter():
# https://tutorialedge.net/python/python-logging-best-practices/
log_formatter = logging.Formatter('%(asctime)s %(levelname)-8s [%(filename)s:%(funcName)s:%(lineno)d] %(message)s')
logFile = './log.log'
my_handler = RotatingFileHandler(
logFile, mode='a', maxBytes=5*1024*1024,
backupCount=2, encoding=None, delay=0)
my_handler.setFormatter(log_formatter)
my_handler.setLevel(logging.INFO)
app_log = logging.getLogger('root')
app_log.setLevel(logging.INFO)
app_log.addHandler(my_handler)
return app_log
if __name__ == '__main__':
pass |
21,781 | a06ff55feead3e13366397a56b4b9a74af7b0e9d | def aplicar_funcion_str():
n_caracteres = input('Texto: ')
fun = input('Función a aplicar: ')
return getattr(str, fun)(n_caracteres)
if __name__ == '__main__':
print(aplicar_funcion_str())
|
21,782 | a3b5fdc1fb7e8b7a83b07acdd36f9e75b031570f | #创建一个单例
'''
需求
原来每创建一个对象,会指向一个内存空间。
而单例不管创建多少个对象,都会指向同一块内存空间
只让__init__方法初始化一次对象(也就是以后一直就是初始化的那个对象)
'''
class Dog(object):
__instance = None #常见一个私有属性(可以理解为变量)
__init_flag = False
def __new__(cls,name): #name在这里暂时用不上,主要是防止创建时传参报错
if cls.__instance == None:
cls.__instance = object.__new__(cls) #将创建对象的引用赋值给__instance
return cls.__instance
else:
#return上一次创建的对象的引用
return cls.__instance
def __init__(self,name):
if Dog.__init_flag == False:
self.name = name
Dog.__init_flag = True
a = Dog("旺财")
print (id(a))
print (a.name)
b = Dog("哮天犬")
print (id(b))
print (b.name)
'''
结果:两个对象的name都是旺财
2019567346240
旺财
2019567346240
旺财
'''
|
21,783 | f596accdea1771aa06db16b10799cd2f8c8104e6 | from typing import List, Optional, Deque
from cuckoo_filter import CuckooFilterBit
from collections import deque
from read import Read
from copy import deepcopy
import sys
class CuckooBitTree:
def __init__(self, theta, k, num_buckets, fp_size, bucket_size, max_iter):
"""
Wrapper around the Node structure of tree for inserting, querying
:param theta: Parameter to determine strictness of querying
:param k: Size of kmer
:param num_buckets: Parameter for CuckooFilter
:param fp_size: Parameter for CuckooFilter
:param bucket_size: Parameter for CuckooFilter
:param max_iter: Parameter for CuckooFilter
"""
self.root: Optional[Node] = None
self.theta: float = theta
self.k: int = k
self.num_buckets = num_buckets
self.fp_size = fp_size
self.bucket_size = bucket_size
self.max_iter = max_iter
self.aggregate_size = self.get_insternal_size()
def insert(self, dataset: List[Read]) -> bool:
"""
Creates a new node from this read and adds it into the tree
:param dataset: the dataset reads
:return: None
"""
node_to_insert = Node(self.k, self.num_buckets, self.fp_size, self.bucket_size, self.max_iter)
node_to_insert.populate_dataset_info(dataset)
self.aggregate_size += node_to_insert.get_size()
if self.root is None:
self.root = node_to_insert
return True
parent = None
current = self.root
while current:
if current.num_children() == 0:
"""
current is a leaf representing a dataset, so
create a new parent that contains node_to_insert
and current as children
"""
new_parent = Node(self.k, self.num_buckets, self.fp_size, self.bucket_size, self.max_iter)
self.aggregate_size += new_parent.get_size()
new_parent.parent = parent
# Kmers from existing and new leaf
new_parent.filter = deepcopy(current.filter)
new_parent.insert_kmers_from_dataset(dataset)
# Set appropriate parent/child pointers
current.parent = new_parent
node_to_insert.parent = new_parent
new_parent.children.append(current)
new_parent.children.append(node_to_insert)
# Special case where root is a leaf
if parent is None:
# current is root -> new_parent is now root
self.root = new_parent
return True
# Set new_parent as child of old parent
idx = parent.children.index(current)
parent.children[idx] = new_parent
return True
elif current.num_children() == 1:
# insert kmers
current.insert_kmers_from_dataset(dataset)
# we found an empty slot to insert into
current.children.append(node_to_insert)
return True
elif current.num_children() == 2:
# insert kmers
current.insert_kmers_from_dataset(dataset)
# select "best" child
score_0 = current.children[0].score(dataset)
score_1 = current.children[1].score(dataset)
best_child = 0 if score_0 < score_1 else 1
# recur
parent = current
current = current.children[best_child]
raise Exception("Did not insert successfully!")
def query(self, query: str) -> List[str]:
"""
Perform a DFS of the tree and collects reads that
pass similarity test.
:param query: The query string to be broken into kmers
:return: The list of read_ids that "match"
"""
nodes_to_explore: Deque[Node] = deque()
nodes_to_explore.append(self.root)
out: List[str] = []
while nodes_to_explore:
current = nodes_to_explore.popleft()
total_kmers_found = 0
total_kmers = 0
for kmer in kmers_in_string(query, self.k):
if current.filter.contains(kmer):
total_kmers_found += 1
total_kmers += 1
if total_kmers_found >= self.theta * total_kmers:
for child in current.children:
nodes_to_explore.append(child)
if current.num_children() == 0:
out.append(current.dataset_id)
return out
def contains(self, query):
"""
A wrapper for backward comptibility with other data structure implementations
"""
return self.query(query)
def get_insternal_size(self):
"""
Returns the total number of bytes occupied by the filter object
"""
return (
sys.getsizeof(self.theta) +
sys.getsizeof(self.num_buckets) +
sys.getsizeof(self.k) +
sys.getsizeof(self.fp_size) +
sys.getsizeof(self.max_iter) +
sys.getsizeof(self.bucket_size)
)
class Node:
def __init__(self, k, num_buckets, fp_size, bucket_size, max_iter):
"""
Represents a single node of Cuckoo Tree.
"""
self.children: List[Node] = []
self.parent: Optional[Node] = None
self.filter = CuckooFilterBit(num_buckets, fp_size, bucket_size, max_iter)
self.dataset_id: Optional[str] = None
self.k = k
def populate_dataset_info(self, dataset: List[Read]) -> None:
self.dataset_id = dataset[0].filename
self.insert_kmers_from_dataset(dataset)
def insert_kmers_from_dataset(self, dataset: List[Read]) -> None:
for read in dataset:
for kmer in read.kmers(self.k):
self.filter.insert_no_duplicates(kmer)
def num_children(self) -> int:
return len(self.children)
def score(self, dataset: List[Read]) -> int:
"""
"Hamming distance" score where lower is better
:param dataset: The dataset to compare against
:return:
"""
kmers_in_common = 0
for read in dataset:
for kmer in read.kmers(self.k):
if self.filter.contains(kmer):
kmers_in_common += 1
return self.filter.num_items_in_filter - kmers_in_common
def get_size(self):
"""
Returns the total number of bytes occupied by the filter object
"""
return (
sys.getsizeof(self.children) +
sys.getsizeof(self.parent) +
sys.getsizeof(self.dataset_id) +
sys.getsizeof(self.k) +
self.filter.get_size()
)
def kmers_in_string(string: str, k):
for i in range(len(string) - k + 1):
yield string[i:i + k]
|
21,784 | a5c158adce8f1f3e119a7788c42c45f68b493ae1 | import os
import re
import subprocess
import time
def isBoot(wait_count=60):
print 'check the device boot status'
time.sleep(1)
waitcount = 0
while waitcount <= wait_count:
if os.popen('adb shell getprop dev.bootcomplete').read().strip() == '1':
print 'boot complete,start skip google security'
return True
else:
print 'device offline,wait for boot'
time.sleep(1)
waitcount += 1
return False
def installApk():
print 'start install apk for device'
time.sleep(1)
subprocess.Popen('adb install ./runner/flash/lib/JIO.apk', shell=True)
def checkLog(timeout=60):
print 'start ckeck logcat for vending window'
time.sleep(1)
timeinit = 0
while timeinit <= timeout:
if re.search('{com\.android\.vending/com\.google\.android\.finsky\.verifier\.impl\.ConsentDialog}*', os.popen('adb shell logcat -d |grep activity').read()):
print 'find the google vending window,start click'
time.sleep(1)
for i in range(10):
os.system('adb shell input tap 1176 593')
time.sleep(1)
print 'skip succuess'
break
print 'not find the window,waiting....'
timeinit += 5
time.sleep(5)
def startSkip():
if isBoot():
installApk()
checkLog()
else:
print 'skip failed, no devices boot'
|
21,785 | 227931c5e88a707904f6e4aa11fa305b56f5b600 |
class Node(object):
def __init__(self, data=None, next_node=None):
self.data = data
self.next = next_node
def print_node(self):
print "data = " + str(self.data)
def main():
node_1 = Node(1)
node_2 = Node(2)
node_3 = Node(3)
node_1.next = node_2
node_2.next = node_3
head = node_1
print "original data"
node_1.print_node()
head.print_node()
node_1.data = 0
print"changed data"
node_1.print_node()
head.print_node()
main()
|
21,786 | d120e9713e65d4693fbff8f993ade4e5c521ac17 | import numpy as np
import matplotlib.pyplot as plt
from random import shuffle
# INITIALIZING GLOBAL VARIABLES
global layers
global ERROR
global number_of_neurons_by_layer
layers = []
ERROR = []
def logistic(x):
return 1/(1+np.exp(-x))
def dlogistic(x):
return logistic(x)*(1-logistic(x))
def init(layers_size):
global number_of_neurons_by_layer
number_of_neurons_by_layer = layers_size
"""
"number_of_neurons_by_layer" is a list that counts the number of neurons
from the input to the output layer
"y" is the concatenation of "1" and "v", the first layer has only this parameter;
"v" is the flow vector;
"weigths" are the concatenation of a column of biases followed by columns of weigths;
"biases" are the first column of "weigths";
"delta" is the lower-case delta from class notes (derivative of squared error with
respect to "v");
"Delta_w" is the upper-case Delta from class notes (the step for updating the
weigths: it need one more parameter, the length-step of gradient descent method,
the parameter <eta> from class notes);
"error" is the error vector between the desired and the obtained outputs, it is
stored only at the last layer;
"error2" is the summation of squared errors calculated at the vector "error", it
is also stored only at the last layer;
"""
layers.clear()
for i in range(len(number_of_neurons_by_layer)):
if i == 0:
y = np.ones((number_of_neurons_by_layer[i]+1, 1))
d = {"y": y}
layers.append(d)
continue
w = np.random.normal(0, 1, (
number_of_neurons_by_layer[i],
number_of_neurons_by_layer[i-1]+1
))
b = w[:, 0]
y = np.ones((number_of_neurons_by_layer[i]+1, 1))
v = y[1:, :]
delta = np.zeros(v.shape)
Delta_w = np.zeros(w.shape)
d = {"weigths": w, "biases": b, "y": y,
"v": v, "delta": delta, "Delta_w": Delta_w}
layers.append(d)
layers[-1]["error"] = 0*layers[-1]["y"][1:, :].copy()
layers[-1]["error2"] = 0
# END: INITIALIZING THE LAYERS
def flow(input_):
"""
it makes the flow of a given input through the network,
all data are stored in the layers "y" and "v"
"""
global number_of_neurons_by_layer
if len(input_) != number_of_neurons_by_layer[0]:
raise IndexError(
f"\033[91mInput length is incorrect. It must be {number_of_neurons_by_layer[0]}.\033[m")
layers[0]["y"][1:] = np.array(input_).flatten().reshape(len(input_), 1)
for i_lay in range(1, len(layers)):
layers[i_lay]["v"][:] = logistic(
layers[i_lay]["weigths"] @ layers[i_lay-1]["y"]
)
def error(input_, output):
"""
it computes the error vector between desired and obtained output,
stored at the last layer
"""
global number_of_neurons_by_layer
if len(output) != number_of_neurons_by_layer[-1]:
raise IndexError(
f"\033[91mDesired output length is incorrect. It must be {number_of_neurons_by_layer[-1]}.\033[m")
output = np.array(output).reshape(len(output), 1)
flow(input_)
layers[-1]["error"] = output - layers[-1]["v"]
def error2(input_, output):
"""
it computes the sum of quadratic error of a given input,
stored at the last layer
"""
error(input_, output)
layers[-1]["error2"] = layers[-1]["error"].T @ layers[-1]["error"]
def backpropagate(eta, momentum):
"""
it computes "delta" and "Delta_w"
"""
for i_lay in range(len(layers)-1, 0, -1):
lay = layers[i_lay]
if i_lay == len(layers)-1:
lay["delta"] = lay["error"] * dlogistic(lay["v"])
else:
lay["delta"] = (layers[i_lay+1]["weigths"][:, 1:].T @ layers[i_lay+1]
["delta"]) * dlogistic(lay["v"])
lay["Delta_w"] = eta * lay["delta"] @ layers[i_lay - 1]["y"].T +\
momentum * lay["Delta_w"]
def updateweigths():
"""
once you have "Delta_w", it makes $ w <- w + Delta_w
"""
for i_lay in range(1, len(layers)):
layers[i_lay]["weigths"] += layers[i_lay]["Delta_w"]
def getweigths():
"""
it gets the list of weigths
"""
ls = []
for i_lay in range(1, len(layers)):
ls.append(layers[i_lay]["weigths"])
return ls
def get_Delta_weigths():
"""
it gets the list of "Delta_w"
"""
ls = []
for i_lay in range(1, len(layers)):
ls.append(layers[i_lay]["Delta_w"])
return ls
def setweigths(ls):
"""
it sets the list of "weigths": they must be of the same type of data (numpy.array)
and same dimension.
"""
for i_lay in range(1, len(layers)):
layers[i_lay]["weigths"] = ls[i_lay-1]
def set_Delta_weigths(ls):
"""
it sets the list of "Delta_w": they must be of the same type of data (numpy.array)
and same dimension.
"""
for i_lay in range(1, len(layers)):
layers[i_lay]["Delta_w"] = ls[i_lay-1]
def train_cyclic(inputs, outputs, eta=0.55, maxit=1000, momentum=0.1, plot=False):
"""
it performs the cyclic mode of training
"""
global ERROR
ERROR.clear()
min_error = 100
ins_outs = list(zip(inputs, outputs))
counter = 0
while counter <= maxit:
counter += 1
shuffle(ins_outs)
for pair in ins_outs:
i, o = pair
error2(i, o)
ERROR.append(layers[-1]["error2"].item())
try:
if ERROR[-1] < min_error:
min_error = ERROR[-1]
optimal_w = getweigths()
min_error_counter = counter
print(
f"Minimum error found = {min_error}, at counter = {min_error_counter}", end="\r")
except:
pass
backpropagate(eta, momentum)
updateweigths()
setweigths(optimal_w)
print(f"\vMinimum error reached at the {min_error_counter}st cycle")
if plot:
plt.plot(np.arange(len(ERROR)), ERROR, "b*-")
plt.xlabel("Number of cycles")
plt.ylabel("Sum of quadratic errors")
plt.title("CYCLIC MODE\nERROR vs CYCLES")
plt.grid()
plt.show()
def train_batch(inputs, outputs, eta=0.55, maxit=1000, momentum=0.1, plot=False):
"""
it performs the batch mode of training
"""
global ERROR
ERROR.clear()
min_error = 100
ins_outs = list(zip(inputs, outputs))
counter = 0
while counter <= maxit:
counter += 1
shuffle(ins_outs)
Dws = []
errors = []
for pair in ins_outs:
i, o = pair
error2(i, o)
errors.append(layers[-1]["error2"].item())
ws = getweigths()
backpropagate(eta, momentum)
Dws.append(get_Delta_weigths())
setweigths(ws)
ERROR.append(sum(errors))
try:
if ERROR[-1] < min_error:
min_error = ERROR[-1]
optimal_w = getweigths()
min_error_counter = counter
print(
f"Minimum error found = {min_error}, at counter = {min_error_counter}", end="\r")
except:
pass
Delta_w = []
for ws in range(len(Dws[0])):
Delta_w.append(
sum(
[Dws[pattern][ws] for pattern in range(len(ins_outs))]
)
)
set_Delta_weigths(Delta_w)
updateweigths()
setweigths(optimal_w)
print(f"\vMinimum error reached at the {min_error_counter}st cycle")
if plot:
plt.plot(np.arange(len(ERROR)), ERROR, "b*-")
plt.xlabel("Number of cycles")
plt.ylabel("Sum of quadratic errors")
plt.title("BATCH MODE:\nERROR vs CYCLES")
plt.grid()
plt.show()
def test(inputs, outputs, plot=False, tolerance=1e-4):
print("-"*10, "BEGINNING OF THE TEST", "-"*10)
ins_outs = list(zip(inputs, outputs))
output_obtained = []
errors = []
successes, failures = 0, 0
for io in ins_outs:
i, o = io
error2(i, o)
errors.append(layers[-1]["error2"].item())
output_obtained.append(layers[-1]["v"].flatten().tolist())
if errors[-1] < tolerance:
successes += 1
else:
failures += 1
if plot:
plt.plot(np.arange(len(errors)), errors, "b*-")
plt.xlabel("pattern to test")
plt.ylabel("Sum of quadratic errors")
plt.title("ERROR vs PATTERN")
plt.grid()
plt.show()
mean = sum(errors)/len(errors)
print(f"TEST SAYS: Mean square error is {mean}.")
print("TEST STATISTICS: \ttolerance is", tolerance)
print("\033[94mSuccesses:\t{:8d},\t {:.2f}%".format(
successes, 100*successes/len(ins_outs)))
print("\033[91mFailures:\t{:8d},\t {:.2f}%\033[m".format(
failures, 100*failures/len(ins_outs)))
print("\033[95mThe mean of quadratic error is {:.6g}.\033[m".format(mean))
print("Quadratic errors:\n", errors, "\n")
print("Outputs:\n", output_obtained, "\n")
print("-"*10, "END OF THE TEST", "-"*10)
return errors, mean
def save(filename):
W = getweigths()
file = open(filename, "w")
file.write("import numpy as np\n\n")
file.write("weigths = [\n")
for w in W:
file.write("np.")
file.write(w.__repr__()+",\n")
file.write("]\n\n")
file.close()
|
21,787 | 738a63bd37055b13caebd6e21b74228018586093 | # -*- coding: utf-8 -*-
"""
Created on Wed Jun 17 12:12:14 2015
@author: Meredith
"""
|
21,788 | 27ad5db6d2358edfda5f7ff81155978f023dc6d6 | import os
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
app = Flask(__name__)
app.config['SECRET_KEY'] = 'aisehi'
basedir = os.path.abspath(os.path.dirname(__file__))
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///'+os.path.join(basedir,'data.sqlite')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
Migrate(app,db)
from my_project.pokemons.views import pokemon_blueprint
from my_project.trainers.views import trainer_blueprint
from my_project.matches.views import match_blueprint
app.register_blueprint(pokemon_blueprint,url_prefix='/pokemons')
app.register_blueprint(trainer_blueprint,url_prefix='/trainers')
app.register_blueprint(match_blueprint,url_prefix='/matches')
|
21,789 | 31ec22a2b8ddcb5a630718f25f36119b2b908f7f | #-*- coding: euc-kr -*-
import maya.cmds as cmds
def addCustomAttribute(main_=None):
if main_ is None:
main_ = cmds.ls(sl=1)
else:
main_ = cmds.ls(main_)
for x in main_:
name_ = '.Add__Ctr__'
if cmds.objExists(x+name_)== False:
cmds.addAttr (x, ln = 'Add__Ctr__' ,nn = "Add__Ctr__",at = "enum",en = "___________:")
cmds.setAttr (x+name_,e=1,keyable = 1)
cmds.setAttr (x+name_,lock = 1)
else:
cmds.setAttr ('%s.Add__Ctr__'%x,e=1,keyable = 1)
def makeOwnGRP(lonelyNode, style='Pos'):
if type(lonelyNode) == list:
lonelyNode = lonelyNode[0]
cmds.select(cl=True)
if style == 'Pos':
grp = cmds.group(em=True, n=lonelyNode+'_Pos')
elif style == 'GRP':
grp = cmds.group(em=True, n=lonelyNode+'_GRP')
cmds.select(lonelyNode, r=1)
cmds.select(grp, add=1)
cmds.delete(cmds.parentConstraint(mo=0, sr="none", st="none"))
cmds.parent(lonelyNode, grp)
return grp
def getRoot(nodType, sel=None):
if sel is None:
cmds.select(sl=True, r=True)
else:
cmds.select(sel, r=True)
sel = cmds.ls(sl=True, l=True)[0].split('|')
cmds.select(cl=True)
for i, j in enumerate(sel):
if cmds.nodeType(sel[i+1]) == str(nodType):
root = sel[i+1]
return root
def controllerColor(ctrl, color):
if type(ctrl) == list:
ctrl = ctrl[0]
colorSet = {'yellow':17, 'red':13, 'default':5, 'pink':9, 'white':16, 'blue':6}
color = colorSet.get(color, 'darkblue')
cmds.setAttr("{0}.overrideEnabled".format(ctrl), True)
cmds.setAttr("{0}.overrideColor".format(ctrl), color)
def makeController(selec, parent=False, shape='circle', addName=None, scale=1, newName=None, pointConst=False, normalPlane='yz'):
ctrlList = []
if selec is None:
selec = cmds.ls(sl=True)
if type(selec) != list:
selec = selec.split()
for j in range(len(selec)):
#if name is None:
if '_joint' in selec[j]:
name = selec[j].replace('_joint', '')
if '_end' in name:
name = name.replace('_end', '')
else:
name = selec[j]
if addName != None:
name = name + addName
if newName != None:
name = newName
if normalPlane == 'yz':
normalPlane = (1, 0, 0)
elif normalPlane == 'xz':
normalPlane = (0, 1, 0)
elif normalPlane == 'xy':
normalPlane = (0, 0, 1)
if shape == 'circle':
ctrl = cmds.circle(nr=normalPlane, c=(0, 0, 0), r=2, n=name+'_ctrl')
elif shape=='star':
ctrl = cmds.circle(nr=normalPlane, c=(0, 0, 0), r=2, n=name+'_ctrl')
cmds.select(cl=True)
for i in [1, 3, 5, 7]:
cmds.select(ctrl[0]+'.cv['+str(i)+']', add=True)
cmds.scale(2, 2, 2, r=True, ocp=True)
elif shape=='cube':
ctrl = cmds.curve(d=1, p=[(0.5, 0.5, 0.5), (0.5, 0.5, -0.5), (-0.5, 0.5, -0.5), (-0.5, 0.5, 0.5),
(0.5, 0.5, 0.5), (0.5, -0.5, 0.5), (0.5, -0.5, -0.5), (0.5, 0.5, -0.5),
(-0.5, 0.5, -0.5), (-0.5, -0.5, -0.5), (0.5, -0.5, -0.5), (0.5, -0.5, 0.5),
(-0.5, -0.5, 0.5), (-0.5, 0.5, 0.5), (-0.5, 0.5, -0.5), (-0.5, -0.5, -0.5), (-0.5, -0.5, 0.5)],
k=[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16], n=name+'_ctrl')
elif shape=='cross':
ctrl = cmds.curve(d=1, p=[(-0.0959835, 0, -0.751175), (-0.0959835, 0, -0.0987656), (-0.751175, 0, -0.0987656), (-0.751175, 0, -0.336638),
(-1.001567, 0, 0), (-0.751175, 0, 0.336638), (-0.751175, 0, 0.0987656), (-0.0959835, 0, 0.0987656), (-0.0959835, 0, 0.751175),
(-0.336638, 0, 0.751175), (0, 0, 1.001567), (0.336638, 0, 0.751175), (0.0959835, 0, 0.751175), (0.0959835, 0, 0.0987656),
(0.751175, 0, 0.0987656), (0.751175, 0, 0.336638), (1.001567, 0, 0), (0.751175, 0, -0.336638), (0.751175, 0, -0.0987656),
(0.0959835, 0, -0.0987656), (0.0959835, 0, -0.751175), (0.336638, 0, -0.751175), (0, 0, -1.001567), (-0.336638, 0, -0.751175), (-0.0959835, 0, -0.751175)],
k=[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24], n=name+'_ctrl')
if scale != 1:
cmds.select(cl=True)
if type(ctrl) != list: ctrl = ctrl.split()
spans = cmds.getAttr(ctrl[0]+'.spans')
if shape=='cube' or shape=='cross': spans += 1
for i in range(spans):
cmds.select(ctrl[0]+'.cv['+str(i)+']', add=True)
cmds.scale(scale, scale, scale, r=True, ocp=True)
cmds.select(cl=True)
posGrp = cmds.group(ctrl, n=name+'_Pos')
cmds.group(ctrl, n=name+'_Const')
if pointConst is True:
cmds.delete(cmds.pointConstraint(selec[j], posGrp))
else:
cmds.delete(cmds.parentConstraint(selec[j], posGrp))
if parent is True:
cmds.parent(selec[j], ctrl[0])
ctrlList.append(ctrl[0])
return ctrlList
def aligner(driven, driver, Const=False):
cmds.select(driver, r=1)
cmds.select(driven, add=1)
if Const == 'parent':
cmds.delete(cmds.parentConstraint(mo=False, sr="none", st="none"))
elif Const == 'orient':
cmds.delete(cmds.orientConstraint(mo=False))
elif Const == 'point':
cmds.delete(cmds.pointConstraint(mo=False))
def limbsSetUp(initialSelec):
#++ functions ++#
def makeIKRibbon(selectedName):
rollNamingFirst = '_roll_1_skin_joint'
rollNamingEnd = '_roll_end_joint'
selecRollFirst = selectedName+rollNamingFirst
selecRollEnd = selectedName+rollNamingEnd
if cmds.objExists('spik_GRP') is False:
spikGRP = cmds.group(empty=True, w=True, n='spik_GRP')
else:
spikGRP = cmds.ls('spik_GRP')[0]
#!!!!!!!!!
sideName = selectedName
if selectedName.split('_')[0] == 'shoulder':
sideName = sideName.replace('shoulder', 'arm')
elif selectedName.split('_')[0] == 'elbow':
sideName = sideName.replace('elbow', 'arm')
elif selectedName.split('_')[0] == 'thigh':
sideName = sideName.replace('thigh', 'leg')
elif selectedName.split('_')[0] == 'knee':
sideName = sideName.replace('knee', 'leg')
sideName = sideName.replace('_joint', '')
#!!!!!!!!!
spikHandle = cmds.ikHandle(sj=selecRollFirst, ee=selecRollEnd, sol='ikSplineSolver', pcv=False, ns=4, n=selectedName + '_spik_handle')
spikCRV = selectedName+'_spik_curve'
cmds.rename('curve1', spikCRV)
curvePoint = cmds.pointOnCurve(spikCRV, p=True)
jointNum = 1
spikJNTList = []
for i in range(7):
spikJntPos = cmds.pointPosition(spikCRV+'.cv['+str(i)+']')
cmds.select(cl=True)
if i != 1 and i != 5:
spikJNT = cmds.joint(p=spikJntPos, n=selectedName+'_spik_'+str(jointNum)+'_joint', rad=2)
cmds.hide(spikJNT)
if i == 6:
aligner(spikJNT, selecRollEnd, Const='orient')
else:
aligner(spikJNT, selecRollFirst, Const='orient')
cmds.makeIdentity(spikJNT, apply=True, t=0, r=1, s=0, n=1, pn=1)
spikJNTList.append(spikJNT)
jointNum += 1
cmds.select(spikJNTList, r=True)
cmds.select(spikCRV, add=True)
cmds.skinCluster(tsb=True, dr=4.0)
spikCtrlList = makeController(spikJNTList, parent=True, )
crvInfoNode = cmds.createNode('curveInfo', n=spikCRV+'_crvINFO')
cmds.connectAttr(spikCRV+'.worldSpace[0]', crvInfoNode+'.inputCurve')
mdNode1 = cmds.createNode('multiplyDivide', n=spikCRV+'_MD1')
cmds.setAttr(mdNode1+'.operation', 2)
mdNode2 = cmds.createNode('multiplyDivide', n=spikCRV+'_MD2')
cmds.setAttr(mdNode2+'.operation', 2)
crvLength = cmds.getAttr(crvInfoNode+'.arcLength')
cmds.setAttr(mdNode2+'.input2X', crvLength)
cmds.connectAttr(crvInfoNode+'.arcLength', mdNode1+'.input1X')
cmds.connectAttr(mdNode1+'.outputX', mdNode2+'.input1X')
rollList = cmds.listRelatives(selecRollFirst, c=True, ad=True, type='joint')
list.reverse(rollList)
rollList.insert(0, selecRollFirst)
for i in range(len(rollList)):
cmds.connectAttr(mdNode2+'.outputX', rollList[i]+'.scaleX')
val1 = 0.75
val2 = 0.25
for i in range(1,4):
cmds.select(spikCtrlList[0], r=True)
cmds.select(spikCtrlList[-1], add=True)
ctrlConst = cmds.listRelatives(spikCtrlList[i], parent=True)[0]
cmds.select(ctrlConst, add=True)
cmds.pointConstraint(mo=False, weight=1)
cmds.setAttr(ctrlConst+'_pointConstraint1.'+spikCtrlList[0]+'W0', val1)
cmds.setAttr(ctrlConst+'_pointConstraint1.'+spikCtrlList[-1]+'W1', val2)
val1 -= 0.25
val2 += 0.25
rotJNT1 = cmds.duplicate(rollList[0], po=True, n=selectedName+'_rot_1_ik_joint')
rotJNT2 = cmds.duplicate(rollList[-1], po=True, n=selectedName+'_rot_2_ik_joint')
cmds.parent(rotJNT1, w=True)
cmds.parent(rotJNT2, rotJNT1)
rotSCik = cmds.ikHandle(sj=rotJNT1[0], ee=rotJNT2[0], sol='ikSCsolver', n=selectedName + '_rot_SCik_handle')
cmds.parent(selectedName + '_rot_SCik_handle', spikCtrlList[-1])
rotGRP = cmds.group(empty=True, n=selectedName+'_rot_GRP', w=True)
aligner(rotGRP, spikCtrlList[0], Const='parent')
for i in range(1,4):
spikCtrlPos = cmds.listRelatives(cmds.listRelatives(spikCtrlList[i], ap=True, p=True), p=True)
cmds.parent(spikCtrlPos, rotGRP)
cmds.select(rotJNT1, r=True)
cmds.select(rotGRP, add=True)
cmds.orientConstraint(mo=True)
cmds.parent(rotJNT1, spikCtrlList[0])
cmds.hide(rotJNT1)
cmds.hide(rotSCik)
cmds.hide(spikHandle[0])
#!!!!!!!!!
if cmds.objExists(sideName+'_spik_GRP') is False:
sidespikGRP = cmds.group(empty=True, w=True, n=sideName+'_spik_GRP')
else:
sidespikGRP = cmds.ls(sideName+'_spik_GRP')[0]
#!!!!!!!!!
selectedspikGRP = cmds.group(empty=True, w=True, n=selectedName+'_spik_GRP')
cmds.parent(spikHandle[0], spikCRV, selectedspikGRP)
cmds.parent(selectedspikGRP, sidespikGRP)
return spikCtrlList
#++ MAIN ++#
print(initialSelec)
side = initialSelec.split('_')[1]
selecFirst = initialSelec
selecSecond = (cmds.listRelatives(selecFirst, c=True))[0]
selecThird = (cmds.listRelatives(selecSecond, c=True))[0]
#* make IK set
dup = cmds.duplicate(initialSelec, renameChildren=True, n=initialSelec.replace('_joint', '_IK_joint'))
cmds.rename(dup[1], dup[1].replace('_joint1', '_IK_joint'))
cmds.rename(dup[2], dup[2].replace('_joint1', '_IK_joint'))
#* make FK set
dup = cmds.duplicate(initialSelec, renameChildren=True, n=initialSelec.replace('_joint', '_FK_joint'))
cmds.rename(dup[1], dup[1].replace('_joint1', '_FK_joint'))
cmds.rename(dup[2], dup[2].replace('_joint1', '_FK_joint'))
selectedIKFirst = initialSelec.replace('_joint', '_IK_joint')
selectedIKMiddle = (cmds.listRelatives(selectedIKFirst, c=True))[0]
selectedIKLast = (cmds.listRelatives(selectedIKMiddle, c=True))[0]
selectedFKFirst = initialSelec.replace('_joint', '_FK_joint')
selectedFKMiddle = (cmds.listRelatives(selectedFKFirst, c=True))[0]
selectedFKLast = (cmds.listRelatives(selectedFKMiddle, c=True))[0]
selectedName1 = selecFirst.replace('_joint', '')
ribbonCtrls1 = makeIKRibbon(selectedName1)
selectedName2 = selecSecond.replace('_joint', '')
ribbonCtrls2 = makeIKRibbon(selectedName2)
selectedName3 = selecThird.replace('_joint', '')
#++ get Side GRP name ++#
sideGRPName = initialSelec
if initialSelec.split('_')[0] == 'shoulder':
sideGRPName = sideGRPName.replace('shoulder', 'arm')
elif initialSelec.split('_')[0] == 'thigh':
sideGRPName = sideGRPName.replace('thigh', 'leg')
sideGRPName = sideGRPName.replace('_joint', '')
#++ initial classify +##
rollShoulderGRP = makeOwnGRP(selectedName1+'_roll_1_skin_joint', style='GRP')
rollElbowGRP = makeOwnGRP(selectedName2+'_roll_1_skin_joint', style='GRP')
skinJointGRP = cmds.group(empty=True, p='joint_GRP', n=sideGRPName+'_joint_GRP')
# cmds.parent(rollShoulderGRP, rollElbowGRP, skinJointGRP)
if cmds.objExists('global_GRP') is False:
globalGRP = cmds.group(empty=True, w=True, n='global_GRP')
else:
globalGRP = cmds.ls('global_GRP')[0]
if cmds.objExists('Extras') is False:
extras = cmds.group(empty=True, w=True, n='Extras')
else:
extras = cmds.ls('Extras')[0]
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
cmds.select(cl=True)
cmds.select(globalGRP, hi=True)
check = cmds.ls(sl=True)
if ('joint_GRP' in check) is False:
cmds.parent('joint_GRP', globalGRP)
cmds.parent('spik_GRP', extras)
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
#++ func ELbow Lock ++#
upCtrlPosLast = selecFirst.replace('_joint', '_spik_5_Pos')
lowCtrlPosFirst = selecSecond.replace('_joint', '_spik_1_Pos')
lockGRP = cmds.group(empty=True, n=selectedName2+'_lock_ctrl_GRP')
aligner(lockGRP, lowCtrlPosFirst, Const='parent')
lockCtrl = makeController(lockGRP, shape='star')
cmds.parent(upCtrlPosLast, lowCtrlPosFirst, lockGRP)
# cmds.delete(lockGRP)
addCustomAttribute(lockCtrl)
cmds.addAttr(lockCtrl, ln='Sub_Controller_Visibility', nn="Sub Controller Visibility", at="enum", en="Off:On")
cmds.setAttr(lockCtrl[0]+'.Sub_Controller_Visibility', e=1, keyable=1)
cmds.connectAttr(lockCtrl[0]+'.Sub_Controller_Visibility', ribbonCtrls1[-1]+'.visibility')
cmds.connectAttr(lockCtrl[0]+'.Sub_Controller_Visibility', ribbonCtrls2[0]+'.visibility')
# cmds.parentConstraint(lockCtrl[0], lockGRP, mo=True)
#++ Follow Codes Belongs Main ++#
lockCtrlPos = getRoot(nodType='transform', sel=lockCtrl)
cmds.parentConstraint(selectedIKMiddle, lockCtrlPos, mo=True)
cmds.parentConstraint(selectedIKFirst, getRoot(nodType='transform', sel=ribbonCtrls1[0]))
cmds.parentConstraint(selectedIKLast, getRoot(nodType='transform', sel=ribbonCtrls2[-1]))
#:: Main classify ::#
sideIKGRP = cmds.group(empty=True, w=True, n=sideGRPName+'_IK_GRP')
ribbonCtrlPos1 = getRoot(nodType='transform', sel=ribbonCtrls1[0])
ribbonCtrlPos2 = getRoot(nodType='transform', sel=ribbonCtrls1[1])
ribbonCtrlPos4 = getRoot(nodType='transform', sel=ribbonCtrls2[-2])
ribbonCtrlPos5 = getRoot(nodType='transform', sel=ribbonCtrls2[-1])
sideBendctrlGRP = cmds.group(empty=True, w=True, n=sideGRPName+'_bend_ctrl_GRP')
cmds.parent(ribbonCtrlPos2, ribbonCtrlPos4, sideBendctrlGRP)
#++ Make IK controller ++#
wristIKsubCtrl = makeController(selectedIKLast, shape='star', addName='_sub', scale=0.8)
wristIKmainCtrl = makeController(selectedIKLast, shape='star', addName='_main', scale=1.1)
cmds.parent(getRoot(sel=wristIKsubCtrl, nodType='transform'), wristIKmainCtrl)
shoulderIKCtrl = makeController(selectedIKFirst, shape='star')
rpIKHandle = cmds.ikHandle(sj=selectedIKFirst, ee=selectedIKLast, sol='ikRPsolver', n=selectedName1+'_IK_handle')
cmds.parent(rpIKHandle[0], wristIKsubCtrl)
cmds.hide(rpIKHandle)
controllerColor(shoulderIKCtrl, 'red')
controllerColor(wristIKmainCtrl, 'red')
controllerColor(wristIKsubCtrl, 'red')
cmds.parent(getRoot(sel=wristIKmainCtrl, nodType='transform'), sideIKGRP)
cmds.parent(getRoot(sel=shoulderIKCtrl, nodType='transform'), sideIKGRP)
#++ make FK Controllers ++#
shoulderFKctrl = makeController(selectedFKFirst, scale=1.2)
controllerColor(shoulderFKctrl, 'yellow')
shoulderFKctrlPos = getRoot(sel=shoulderFKctrl, nodType='transform')
addCustomAttribute(shoulderFKctrl)
cmds.addAttr(shoulderFKctrl, ln='stretch', nn='stretch', at='float', minValue=0, defaultValue=1)
cmds.setAttr(shoulderFKctrl[0]+'.stretch', e=1, keyable=1)
elbowFKctrl = makeController(selectedFKMiddle, scale=1.2)
controllerColor(elbowFKctrl, 'yellow')
elbowFKctrlPos = getRoot(sel=elbowFKctrl, nodType='transform')
addCustomAttribute(elbowFKctrl)
cmds.addAttr(elbowFKctrl, ln='stretch', nn='stretch', at='float', minValue=0, defaultValue=1)
cmds.setAttr(elbowFKctrl[0]+'.stretch', e=1, keyable=1)
wristFKctrl = makeController(selectedFKLast, scale=1.2)
controllerColor(wristFKctrl, 'yellow')
wristFKctrlPos = getRoot(sel=wristFKctrl, nodType='transform')
sideFKGRP = cmds.group(empty=True, w=True, n=sideGRPName+'_FK_GRP')
cmds.parent(shoulderFKctrlPos, elbowFKctrlPos, wristFKctrlPos, sideFKGRP)
#++ make fk connections ++#
cmds.parentConstraint(selectedFKMiddle, lockCtrlPos, mo=True)
cmds.parentConstraint(selectedFKLast, ribbonCtrlPos5, mo=True)
cmds.parentConstraint(selectedFKFirst, ribbonCtrlPos1, mo=True)
cmds.parentConstraint(selectedIKMiddle, rollElbowGRP, mo=True)
cmds.parentConstraint(selectedFKMiddle, rollElbowGRP, mo=True)
cmds.parentConstraint(shoulderFKctrl, selectedFKFirst, mo=True)
cmds.orientConstraint(elbowFKctrl, selectedFKMiddle)
cmds.pointConstraint(selectedFKLast, cmds.listRelatives(wristFKctrl, p=True)[0], mo=True)
cmds.pointConstraint(selectedFKMiddle, cmds.listRelatives(elbowFKctrl, p=True)[0])
#* fk shoulder controller drives elbow controller GRP by orient Constraining
cmds.orientConstraint(shoulderFKctrl, cmds.listRelatives(elbowFKctrl, p=True)[0], mo=True)
cmds.orientConstraint(elbowFKctrl, cmds.listRelatives(wristFKctrl, p=True)[0], mo=True)
#* fk shoulder and elbow stretch connections
cmds.connectAttr(shoulderFKctrl[0]+'.stretch', selectedFKFirst+'.scaleX')
cmds.connectAttr(elbowFKctrl[0]+'.stretch', selectedFKMiddle+'.scaleX')
#++ hands and finger setUp ++#
wristRootJNT = selectedName3+'_root_joint'
wristRootGRP = makeOwnGRP(wristRootJNT, style='GRP')
cmds.parent(wristRootGRP, 'joint_GRP')
cmds.pointConstraint(selectedIKLast, wristRootGRP, mo=True)
cmds.pointConstraint(selectedFKLast, wristRootGRP, mo=True)
#++ Set Orientation Buffer Locators ++#
ikWristLoc = cmds.spaceLocator(n=selectedName3+'_ik_loc')
aligner(ikWristLoc, wristIKsubCtrl, Const='parent')
cmds.parent(ikWristLoc, wristIKsubCtrl)
fkWristLoc = cmds.spaceLocator(n=selectedName3+'_fk_loc')
aligner(fkWristLoc, wristFKctrl, Const='parent')
cmds.parent(fkWristLoc, wristFKctrl)
cmds.orientConstraint(ikWristLoc, fkWristLoc, wristRootGRP, mo=True)
#++ pole Vector Controller ++#
polVecCtrl = makeController(selectedIKMiddle, addName='_poleVector', shape='cube', scale=1.1)
controllerColor(polVecCtrl, 'pink')
poleVecPoly = cmds.polyCreateFacet(p=[cmds.xform(selectedIKFirst, q=True, t=True, ws=True), cmds.xform(selectedIKMiddle, q=True, t=True, ws=True),
cmds.xform(selectedIKLast, q=True, t=True, ws=True)], ch=True, tx=1, n='poleVecPoly')
cmds.select(cl=True)
poleVtx = '{0}.vtx[1]'.format(poleVecPoly[0])
poleVecPos = getRoot(sel=polVecCtrl, nodType='transform')
cmds.normalConstraint(poleVtx, poleVecPos)
cmds.delete(poleVecPoly)
cmds.poleVectorConstraint(polVecCtrl, rpIKHandle[0])
cmds.parent(poleVecPos, sideIKGRP)
def fingerSetUp(side, sidectrlGRP):
#++ make FK finger Controllers ++#
fingerFKJNTLists = cmds.ls('finger_*'+side+'*_FK_joint', type='joint')
fingerFKJNTLists = list(filter(lambda x: 'end' not in x, fingerFKJNTLists)) #* or -> [f for f in fingerFKJNTLists if '_end_' not in f]
fingerAllCtrlLists = makeController(selec=fingerFKJNTLists, scale=0.3)
for i in fingerAllCtrlLists:
controllerColor(i, 'yellow')
for i in range(len(fingerFKJNTLists)):
cmds.parentConstraint(fingerAllCtrlLists[i], fingerFKJNTLists[i], mo=True)
#* separate finger Controller Lists by 3 then make child the later(-1) to the former(-2)
fingerCtrlLists = list(filter(lambda x: 'root' not in x and '0' not in x, fingerAllCtrlLists))
fingerCtrlLists = [fingerCtrlLists[f:f+3] for f in range(0, len(fingerCtrlLists), 3)]
cmds.parent(getRoot(sel=fingerCtrlLists[0][-1], nodType='transform'), fingerCtrlLists[0][-2])
cmds.parent(getRoot(sel=fingerCtrlLists[0][-2], nodType='transform'), fingerCtrlLists[0][-3])
cmds.parent(getRoot(sel=fingerCtrlLists[1][-1], nodType='transform'), fingerCtrlLists[1][-2])
cmds.parent(getRoot(sel=fingerCtrlLists[1][-2], nodType='transform'), fingerCtrlLists[1][-3])
cmds.parent(getRoot(sel=fingerCtrlLists[2][-1], nodType='transform'), fingerCtrlLists[2][-2])
cmds.parent(getRoot(sel=fingerCtrlLists[2][-2], nodType='transform'), fingerCtrlLists[2][-3])
cmds.parent(getRoot(sel=fingerCtrlLists[3][-1], nodType='transform'), fingerCtrlLists[3][-2])
cmds.parent(getRoot(sel=fingerCtrlLists[3][-2], nodType='transform'), fingerCtrlLists[3][-3])
cmds.parent(getRoot(sel=fingerCtrlLists[4][-1], nodType='transform'), fingerCtrlLists[4][-2])
# cmds.parent(getRoot(sel=fingerCtrlLists[4][-2], nodType='transform'), fingerCtrlLists[4][-3])
if filter(lambda x: '0' in x, fingerAllCtrlLists) is not 0:
#fingerAllCtrlLists = [fingerAllCtrlLists[f:f+4] for f in range(0, len(fingerAllCtrlLists), 4)]
cmds.parent(getRoot(sel=fingerCtrlLists[0][-3], nodType='transform'), fingerAllCtrlLists[0])
cmds.parent(getRoot(sel=fingerCtrlLists[1][-3], nodType='transform'), fingerAllCtrlLists[4])
cmds.parent(getRoot(sel=fingerCtrlLists[2][-3], nodType='transform'), fingerAllCtrlLists[8])
cmds.parent(getRoot(sel=fingerCtrlLists[3][-3], nodType='transform'), fingerAllCtrlLists[13])
cmds.parent(getRoot(sel=fingerCtrlLists[4][-2], nodType='transform'), fingerAllCtrlLists[18])
if filter(lambda x: 'root' in x, fingerAllCtrlLists) is not 0:
cmds.parent(getRoot(sel=fingerAllCtrlLists[8], nodType='transform'), fingerAllCtrlLists[12])
cmds.parent(getRoot(sel=fingerAllCtrlLists[13], nodType='transform'), fingerAllCtrlLists[17])
cmds.parent(getRoot(sel=fingerAllCtrlLists[12], nodType='transform'), fingerAllCtrlLists[17])
fingerFKctrlGRP = cmds.group(empty=True, w=True, n='finger_FK_ctrl_GRP')
aligner(fingerFKctrlGRP, wristRootJNT, Const='parent')
if cmds.objExists('finger_'+side+'_ctrl_GRP') is False:
fingerCtrlGRP = cmds.group(empty=True, w=True, n='finger_'+side+'_ctrl_GRP')
aligner(fingerCtrlGRP, wristRootJNT, Const='parent')
else:
fingerCtrlGRP = cmds.ls('finger_'+side+'_ctrl_GRP')[0]
cmds.parent(getRoot(sel=fingerCtrlLists[0][0], nodType='transform'), fingerFKctrlGRP)
cmds.parent(getRoot(sel=fingerCtrlLists[1][0], nodType='transform'), fingerFKctrlGRP)
cmds.parent(getRoot(sel=fingerCtrlLists[2][0], nodType='transform'), fingerFKctrlGRP)
# cmds.parent(getRoot(sel=fingerCtrlLists[3][0], nodType='transform'), fingerFKctrlGRP)
cmds.parent(getRoot(sel=fingerCtrlLists[4][0], nodType='transform'), fingerFKctrlGRP)
cmds.parent(fingerFKctrlGRP, fingerCtrlGRP)
cmds.parentConstraint(wristRootJNT, fingerCtrlGRP, mo=True)
#++ make IK finger Controllers ++#
fingerIKJNTLists = cmds.ls('finger_*'+side+'*_IK_joint', type='joint')
fingerIKJNTLists = [f for f in fingerIKJNTLists if '_0_' not in f]
fingerIKJNTLists = [fingerIKJNTLists[f:f+4] for f in range(0, len(fingerIKJNTLists), 4)]
fingerIKhandle1 = cmds.ikHandle(sj=fingerIKJNTLists[0][0], ee=fingerIKJNTLists[0][-1], solver='ikSCsolver', n=fingerIKJNTLists[0][0].split('_')[1]+side+'_IK_handle')
fingerIKhandle2 = cmds.ikHandle(sj=fingerIKJNTLists[1][0], ee=fingerIKJNTLists[1][-1], solver='ikSCsolver', n=fingerIKJNTLists[1][0].split('_')[1]+side+'_IK_handle')
fingerIKhandle3 = cmds.ikHandle(sj=fingerIKJNTLists[2][0], ee=fingerIKJNTLists[2][-1], solver='ikSCsolver', n=fingerIKJNTLists[2][0].split('_')[1]+side+'_IK_handle')
fingerIKhandle4 = cmds.ikHandle(sj=fingerIKJNTLists[3][0], ee=fingerIKJNTLists[3][-1], solver='ikSCsolver', n=fingerIKJNTLists[3][0].split('_')[1]+side+'_IK_handle')
fingerIKhandle5 = cmds.ikHandle(sj=fingerIKJNTLists[4][0], ee=fingerIKJNTLists[4][-1], solver='ikSCsolver', n=fingerIKJNTLists[4][0].split('_')[1]+side+'_IK_handle')
cmds.hide(fingerIKhandle1[0])
cmds.hide(fingerIKhandle2[0])
cmds.hide(fingerIKhandle3[0])
cmds.hide(fingerIKhandle4[0])
cmds.hide(fingerIKhandle5[0])
for i in range(0, 5):
for j in range(len(fingerIKJNTLists[i])-1):
cmds.connectAttr(fingerIKJNTLists[i][j]+'.rotate', cmds.listRelatives(fingerCtrlLists[i][j], p=True)[0]+'.rotate')
fingerIKctrl1 = makeController(fingerIKJNTLists[0][-1], shape='cube', scale=0.6)
controllerColor(fingerIKctrl1, 'red')
fingerIKctrl2 = makeController(fingerIKJNTLists[1][-1], shape='cube', scale=0.6)
controllerColor(fingerIKctrl2, 'red')
fingerIKctrl3 = makeController(fingerIKJNTLists[2][-1], shape='cube', scale=0.6)
controllerColor(fingerIKctrl3, 'red')
fingerIKctrl4 = makeController(fingerIKJNTLists[3][-1], shape='cube', scale=0.6)
controllerColor(fingerIKctrl4, 'red')
fingerIKctrl5 = makeController(fingerIKJNTLists[4][-1], shape='cube', scale=0.6)
controllerColor(fingerIKctrl5, 'red')
cmds.parent(fingerIKhandle1[0], fingerIKctrl1)
cmds.parent(fingerIKhandle2[0], fingerIKctrl2)
cmds.parent(fingerIKhandle3[0], fingerIKctrl3)
cmds.parent(fingerIKhandle4[0], fingerIKctrl4)
cmds.parent(fingerIKhandle5[0], fingerIKctrl5)
fingerIKctrlGRP = cmds.group(empty=True, w=True, n='finger_IK_ctrl_GRP')
aligner(fingerIKctrlGRP, wristRootJNT, Const='parent')
cmds.parent(getRoot(sel=fingerIKctrl1[0], nodType='transform'), fingerIKctrlGRP)
cmds.parent(getRoot(sel=fingerIKctrl2[0], nodType='transform'), fingerIKctrlGRP)
cmds.parent(getRoot(sel=fingerIKctrl3[0], nodType='transform'), fingerIKctrlGRP)
cmds.parent(getRoot(sel=fingerIKctrl4[0], nodType='transform'), fingerIKctrlGRP)
cmds.parent(getRoot(sel=fingerIKctrl5[0], nodType='transform'), fingerIKctrlGRP)
cmds.parent(fingerIKctrlGRP, fingerCtrlGRP)
cmds.parent(fingerCtrlGRP, sidectrlGRP)
#++ make Twist SetUp ++#
twistJNT = cmds.duplicate(selecFirst, parentOnly=True, n=selectedName1+'_twist_joint')
twistEndJNT = cmds.duplicate(selecSecond, parentOnly=True, n=selectedName1+'_twist_end_joint')
twistRotJNT = cmds.duplicate(selecFirst, parentOnly=True, n=selectedName1+'_twist_rot_joint')
cmds.parent(twistJNT, w=True)
cmds.parent(twistEndJNT, twistJNT)
upTwistJNTGRP = makeOwnGRP(twistJNT, style='GRP')
upTwistikHandle = cmds.ikHandle(sj=twistJNT[0], ee=twistEndJNT[0], solver='ikRPsolver', n=twistJNT[0].replace('_joint', '_ik_handle'))
upTwistikHandleGRP = makeOwnGRP(upTwistikHandle[0], style='GRP')
cmds.setAttr(upTwistikHandle[0]+'.poleVectorX', 0)
cmds.setAttr(upTwistikHandle[0]+'.poleVectorY', 0)
cmds.setAttr(upTwistikHandle[0]+'.poleVectorZ', 0)
cmds.pointConstraint(selectedIKFirst, upTwistJNTGRP, mo=True)
cmds.pointConstraint(selectedFKFirst, upTwistJNTGRP, mo=True)
cmds.pointConstraint(selectedIKMiddle, upTwistikHandleGRP, mo=True)
cmds.pointConstraint(selectedFKMiddle, upTwistikHandleGRP, mo=True)
cmds.parent(twistRotJNT, w=True)
upTwistRotGRP = makeOwnGRP(twistRotJNT, style='GRP')
cmds.parentConstraint(selectedIKFirst, upTwistRotGRP, mo=True)
cmds.parentConstraint(selectedFKFirst, upTwistRotGRP, mo=True)
cmds.orientConstraint(twistJNT, twistRotJNT, mo=True)
uptwistMD = cmds.createNode('multiplyDivide', n=twistJNT[0]+'_MD')
if side == 'R':
cmds.setAttr(uptwistMD+'.input2X', 1)
else:
cmds.setAttr(uptwistMD+'.input2X', -1)
cmds.connectAttr(twistRotJNT[0]+'.rotateX', uptwistMD+'.input1X')
cmds.connectAttr(uptwistMD+'.outputX', selectedName1+'_spik_handle.twist')
#* Make Wrist Twist SetUp from here
wristTwistJNT = cmds.duplicate(wristRootJNT, po=True, n=wristRootJNT.replace('_root_joint', '_twist_root_joint'))
wristTwistRotJNT = cmds.duplicate(wristRootJNT, po=True, n=wristRootJNT.replace('_root_joint', '_rot_joint'))
wristTwistEndJNT = cmds.duplicate(wristRootJNT.replace('_root_joint', '_end_joint'), po=True, n=wristRootJNT.replace('_end_joint', '_twist_end_joint'))
cmds.parent(wristTwistJNT, w=True)
cmds.parent(wristTwistEndJNT, wristTwistJNT)
lowTwistikHandle = cmds.ikHandle(sj=wristTwistJNT[0], ee=wristTwistEndJNT[0], solver='ikRPsolver', n=wristTwistJNT[0].replace('_root_joint', '_ik_handle'))
cmds.setAttr(lowTwistikHandle[0]+'.poleVectorX', 0)
cmds.setAttr(lowTwistikHandle[0]+'.poleVectorY', 0)
cmds.setAttr(lowTwistikHandle[0]+'.poleVectorZ', 0)
lowTwistikHandleGRP = makeOwnGRP(lowTwistikHandle[0], style='GRP')
cmds.parentConstraint(wristIKsubCtrl, lowTwistikHandleGRP, mo=True)
cmds.parentConstraint(wristFKctrl, lowTwistikHandleGRP, mo=True)
wristTwistJNTGRP = makeOwnGRP(wristTwistJNT, style='GRP')
wristTwistRotJNTGRP = makeOwnGRP(wristTwistRotJNT, style='GRP')
cmds.pointConstraint(selectedIKLast, wristTwistJNTGRP, mo=True)
cmds.pointConstraint(selectedFKLast, wristTwistJNTGRP, mo=True)
cmds.parentConstraint(wristRootJNT, wristTwistRotJNTGRP, mo=True)
cmds.orientConstraint(wristTwistJNT, wristTwistRotJNT, mo=True)
lowtwistMD = cmds.createNode('multiplyDivide', n=wristTwistJNT[0]+'_MD')
cmds.setAttr(lowtwistMD+'.input2X', -1)
cmds.connectAttr(wristTwistRotJNT[0]+'.rotateX', lowtwistMD+'.input1X')
cmds.connectAttr(lowtwistMD+'.outputX', selectedName2+'_spik_handle.twist')
#* classify hierarchy and make lowArm fk controlls whole lowArmtwist System to prevent double transform
upJNTsGRP = cmds.group(empty=True, w=True, n=selectedName1+'_joint_GRP')
lowJNTsGRP = cmds.group(empty=True, w=True, n=selectedName2+'_joint_GRP')
aligner(lowJNTsGRP, selectedFKMiddle, Const='parent')
cmds.parent(upTwistJNTGRP, upJNTsGRP)
cmds.parent(upTwistRotGRP, upJNTsGRP)
cmds.parent(rollShoulderGRP, upJNTsGRP)
cmds.parent(rollElbowGRP, lowJNTsGRP)
cmds.parent(wristTwistJNTGRP, lowJNTsGRP)
cmds.parent(wristRootGRP, lowJNTsGRP)
cmds.parent(wristTwistRotJNTGRP, lowJNTsGRP)
cmds.parent(upJNTsGRP, skinJointGRP)
cmds.parent(lowJNTsGRP, skinJointGRP)
cmds.parentConstraint(selectedIKMiddle, lowJNTsGRP, mo=True)
cmds.parentConstraint(selectedFKMiddle, lowJNTsGRP, mo=True)
#++ make ik fk Switcher ++#
ikfkSwitcher = makeController(selectedIKLast, shape='cube', addName='_FK_switch', scale=1.1)
addCustomAttribute(ikfkSwitcher)
cmds.addAttr(ikfkSwitcher, ln='IKFK', nn="IKFK", at="float", maxValue=1, minValue=0)
cmds.setAttr(ikfkSwitcher[0]+'.IKFK', e=1, keyable=1)
controllerColor(ikfkSwitcher, 'white')
IKFKswitchREV = cmds.createNode('reverse', n='IKFKswitchREV')
cmds.connectAttr('{0}.IKFK'.format(ikfkSwitcher[0]), '{0}.inputX'.format(IKFKswitchREV))
#* ik - fk visibility
cmds.connectAttr('{0}.outputX'.format(IKFKswitchREV), '{0}.visibility'.format(sideIKGRP))
cmds.connectAttr('{0}.outputX'.format(IKFKswitchREV), '{0}.visibility'.format(selectedIKFirst))
cmds.connectAttr('{0}.IKFK'.format(ikfkSwitcher[0]), '{0}.visibility'.format(sideFKGRP))
cmds.connectAttr('{0}.IKFK'.format(ikfkSwitcher[0]), '{0}.visibility'.format(selectedFKFirst))
#* ik connections
cmds.connectAttr(IKFKswitchREV+'.outputX', ribbonCtrlPos1+'_parentConstraint1.'+selectedIKFirst+'W0')
cmds.connectAttr(IKFKswitchREV+'.outputX', lockCtrlPos+'_parentConstraint1.'+selectedIKMiddle+'W0')
cmds.connectAttr(IKFKswitchREV+'.outputX', ribbonCtrlPos5+'_parentConstraint1.'+selectedIKLast+'W0')
cmds.connectAttr(IKFKswitchREV+'.outputX', rollElbowGRP+'_parentConstraint1.'+selectedIKMiddle+'W0')
cmds.connectAttr(IKFKswitchREV+'.outputX', wristRootGRP+'_orientConstraint1.'+ikWristLoc[0]+'W0')
cmds.connectAttr(IKFKswitchREV+'.outputX', wristRootGRP+'_pointConstraint1.'+selectedIKLast+'W0')
cmds.connectAttr(IKFKswitchREV+'.outputX', lowJNTsGRP+'_parentConstraint1.'+selectedIKMiddle+'W0')
cmds.connectAttr(IKFKswitchREV+'.outputX', wristTwistJNTGRP+'_pointConstraint1.'+selectedIKLast+'W0')
cmds.connectAttr(IKFKswitchREV+'.outputX', lowTwistikHandleGRP+'_parentConstraint1.'+wristIKsubCtrl[0]+'W0')
cmds.connectAttr(IKFKswitchREV+'.outputX', upTwistJNTGRP+'_pointConstraint1.'+selectedIKFirst+'W0')
cmds.connectAttr(IKFKswitchREV+'.outputX', upTwistikHandleGRP+'_pointConstraint1.'+selectedIKMiddle+'W0')
cmds.connectAttr(IKFKswitchREV+'.outputX', upTwistRotGRP+'_parentConstraint1.'+selectedIKFirst+'W0')
#* fk connections
cmds.connectAttr(ikfkSwitcher[0]+'.IKFK', rollElbowGRP+'_parentConstraint1.'+selectedFKMiddle+'W1')
cmds.connectAttr(ikfkSwitcher[0]+'.IKFK', ribbonCtrlPos1+'_parentConstraint1.'+selectedFKFirst+'W1')
cmds.connectAttr(ikfkSwitcher[0]+'.IKFK', lockCtrlPos+'_parentConstraint1.'+selectedFKMiddle+'W1')
cmds.connectAttr(ikfkSwitcher[0]+'.IKFK', ribbonCtrlPos5+'_parentConstraint1.'+selectedFKLast+'W1')
cmds.connectAttr(ikfkSwitcher[0]+'.IKFK', wristRootGRP+'_orientConstraint1.'+fkWristLoc[0]+'W1')
cmds.connectAttr(ikfkSwitcher[0]+'.IKFK', wristRootGRP+'_pointConstraint1.'+selectedFKLast+'W1')
cmds.connectAttr(ikfkSwitcher[0]+'.IKFK', lowJNTsGRP+'_parentConstraint1.'+selectedFKMiddle+'W1')
cmds.connectAttr(ikfkSwitcher[0]+'.IKFK', wristTwistJNTGRP+'_pointConstraint1.'+selectedFKLast+'W1')
cmds.connectAttr(ikfkSwitcher[0]+'.IKFK', lowTwistikHandleGRP+'_parentConstraint1.'+wristFKctrl[0]+'W1')
cmds.connectAttr(ikfkSwitcher[0]+'.IKFK', upTwistJNTGRP+'_pointConstraint1.'+selectedFKFirst+'W1')
cmds.connectAttr(ikfkSwitcher[0]+'.IKFK', upTwistikHandleGRP+'_pointConstraint1.'+selectedFKMiddle+'W1')
cmds.connectAttr(ikfkSwitcher[0]+'.IKFK', upTwistRotGRP+'_parentConstraint1.'+selectedFKFirst+'W1')
#++ IK stretch SetUp ++#
lenCRV = cmds.curve(d=1,
p=[cmds.xform(selectedIKFirst, query=True, t=True, ws=True), cmds.xform(selectedIKMiddle, query=True, t=True, ws=True), cmds.xform(selectedIKLast, query=True, t=True, ws=True)],
k=(0, 1, 2), n=selectedName1+'_arcLength_curve')
cmds.select(clear=True)
cmds.select(selectedIKFirst, r=True)
cmds.select(selectedIKMiddle, selectedIKLast, add=True)
cmds.select(lenCRV, add=True)
arcLenSkinCluster = cmds.skinCluster(tsb=True, dr=4.0)
cmds.skinPercent(arcLenSkinCluster[0], lenCRV+'.cv[0]', transformValue=[(selectedIKFirst, 1)])
cmds.skinPercent(arcLenSkinCluster[0], lenCRV+'.cv[1]', transformValue=[(selectedIKMiddle, 1)])
cmds.skinPercent(arcLenSkinCluster[0], lenCRV+'.cv[2]', transformValue=[(selectedIKLast, 1)])
arcLen = cmds.arclen(lenCRV)
distanceNode = cmds.createNode('distanceBetween', n=selectedIKFirst+'_DIST')
cmds.connectAttr(shoulderIKCtrl[0]+'.worldMatrix', distanceNode+'.inMatrix1')
cmds.connectAttr(wristIKsubCtrl[0]+'.worldMatrix', distanceNode+'.inMatrix2')
distanceMD = cmds.createNode('multiplyDivide', n='stretchIK_Dist_MD')
cmds.setAttr(distanceMD+'.operation', 2)
cmds.connectAttr(distanceNode+'.distance', distanceMD+'.input1X')
distanceCOND = cmds.createNode('condition', n='stretchIK_Dist_COND')
cmds.setAttr(distanceCOND+'.operation', 2)
cmds.connectAttr(distanceMD+'.outputX', distanceCOND+'.firstTerm')
cmds.connectAttr(distanceMD+'.outputX', distanceCOND+'.colorIfTrueR')
cmds.setAttr(distanceCOND+'.secondTerm', arcLen)
cmds.setAttr(distanceCOND+'.colorIfFalseR', arcLen)
distanceMD2 = cmds.createNode('multiplyDivide', n='stretchIK_Dist_MD2')
cmds.setAttr(distanceMD2+'.operation', 2)
cmds.setAttr(distanceMD2+'.input2X', arcLen)
cmds.delete(lenCRV)
#* Add ik stretch OnOff Attribute to ik wrist Controller
addCustomAttribute(wristIKmainCtrl)
cmds.addAttr(wristIKmainCtrl[0], ln='Stretch_On_Off', nn="Stretch_On_Off", at="float", maxValue=1, minValue=0, defaultValue=0)
cmds.addAttr(wristIKmainCtrl[0], ln='Up_Stretch', nn="Up_Stretch", at="float", minValue=0, defaultValue=1)
cmds.addAttr(wristIKmainCtrl[0], ln='Low_Stretch', nn="Low_Stretch", at="float", minValue=0, defaultValue=1)
cmds.setAttr(wristIKmainCtrl[0]+'.Stretch_On_Off', e=1, keyable=1)
cmds.setAttr(wristIKmainCtrl[0]+'.Up_Stretch', e=1, keyable=1)
cmds.setAttr(wristIKmainCtrl[0]+'.Low_Stretch', e=1, keyable=1)
stretchBLND = cmds.createNode('blendColors', n='stretchIK_BLND')
cmds.connectAttr(distanceCOND+'.outColorR', stretchBLND+'.color1R')
cmds.setAttr(stretchBLND+'.color2R', arcLen)
cmds.connectAttr(stretchBLND+'.outputR', distanceMD2+'.input1X')
cmds.connectAttr(wristIKmainCtrl[0]+'.Stretch_On_Off', stretchBLND+'.blender')
stretchUpMD = cmds.createNode('multiplyDivide', n='stretch_IK_Up_MD')
stretchLowMD = cmds.createNode('multiplyDivide', n='stretch_IK_Low_MD')
cmds.setAttr(stretchUpMD+'.operation', 1)
cmds.setAttr(stretchLowMD+'.operation', 1)
cmds.connectAttr(wristIKmainCtrl[0]+'.Up_Stretch', stretchUpMD+'.input1X')
cmds.connectAttr(wristIKmainCtrl[0]+'.Low_Stretch', stretchLowMD+'.input1X')
cmds.connectAttr(distanceMD2+'.outputX', stretchUpMD+'.input2X')
cmds.connectAttr(distanceMD2+'.outputX', stretchLowMD+'.input2X')
#* Now Connects to joint's scaleX
cmds.connectAttr(stretchUpMD+'.outputX', selectedIKFirst+'.scaleX')
cmds.connectAttr(stretchLowMD+'.outputX', selectedIKMiddle+'.scaleX')
#* Add poleVector Twist Attribute
addCustomAttribute(wristIKmainCtrl)
cmds.addAttr(wristIKmainCtrl[0], ln='Twist', nn="Twist", at="float", defaultValue=0)
cmds.setAttr(wristIKmainCtrl[0]+'.Twist', e=1, keyable=1)
cmds.connectAttr(wristIKmainCtrl[0]+'.Twist', rpIKHandle[0]+'.twist')
#++ later classify ++#
if cmds.objExists('ctrl_GRP') is False:
ctrlGRP = cmds.group(empty=True, w=True, n='ctrl_GRP')
cmds.parent(ctrlGRP, globalGRP)
else:
ctrlGRP = cmds.ls('ctrl_GRP')[0]
bendRootGRP = cmds.group(empty=True, w=True, n=sideGRPName+'_bend_root_GRP')
sidectrlGRP = cmds.group(empty=True, w=True, n=sideGRPName+'_ctrl_GRP')
sideTwistIKhandleGRP = cmds.group(empty=True, w=True, n=sideGRPName+'_twist_ik_handle_GRP')
if cmds.objExists('twist_ik_handle_GRP') is False:
twistIKhandleGRP = cmds.group(empty=True, w=True, n='twist_ik_handle_GRP')
cmds.parent(twistIKhandleGRP, extras)
else:
twistIKhandleGRP = cmds.ls('twist_ik_handle_GRP')[0]
if cmds.objExists('twist_ik_handle_GRP') is False:
twistIKhandleGRP = cmds.group(empty=True, w=True, n='twist_ik_handle_GRP')
cmds.parent(twistIKhandleGRP, extras)
else:
twistIKhandleGRP = cmds.ls('twist_ik_handle_GRP')[0]
cmds.parent(sideGRPName+'_spik_GRP', 'spik_GRP')
cmds.parent(lockGRP, lockCtrl)
cmds.parent(ribbonCtrlPos5, ribbonCtrlPos1, bendRootGRP)
cmds.parent(bendRootGRP, sideBendctrlGRP)
cmds.parent(sideIKGRP, sideFKGRP, getRoot(nodType='transform', sel=ikfkSwitcher), sidectrlGRP)
cmds.parent(sideBendctrlGRP, sidectrlGRP)
cmds.parent(lockCtrlPos, sidectrlGRP)
cmds.parent(sidectrlGRP, ctrlGRP)
cmds.parent(upTwistikHandleGRP, sideTwistIKhandleGRP)
cmds.parent(lowTwistikHandleGRP, sideTwistIKhandleGRP)
cmds.parent(sideTwistIKhandleGRP, twistIKhandleGRP)
#++ Do finger SetUp if finger exists ++#
for i in cmds.listRelatives(wristRootJNT, children=True):
if 'finger' == i.split('_')[0]:
print('fingerGRP Found')
fingerSetUp(side, sidectrlGRP)
def doLimbsSetUp():
initialSelecList = cmds.ls(sl=True)
for i in initialSelecList:
limbsSetUp(i)
#++ hide Extras ++#
cmds.hide('Extras')
def doSpineSetUp():
#++ Spine SetUp ++#
waistJNTs = cmds.ls('waist_*')
cmds.hide(waistJNTs)
waistMidJNT = waistJNTs.pop(0)
waistJNTs.insert(1, waistMidJNT)
spineJNTs = cmds.ls('spine_*')
chestJNTs = [f for f in cmds.ls('chest_*') if not '_IK_' in f]
chestRootJNT = chestJNTs.pop(1)
chestJNTs.insert(0, chestRootJNT)
hipJNTs = cmds.ls('hip_*')[::-1]
spineIKhandle = cmds.ikHandle(sj=spineJNTs[0], ee=spineJNTs[-1], sol='ikSplineSolver', pcv=False, ns=4, n='spine_spik_handle')
spineIKCRV = cmds.rename(spineIKhandle[-1], 'spine_spik_curve')
cmds.skinCluster(waistJNTs, spineIKCRV, toSelectedBones=True, dropoffRate=4.0)
waistMidctrl = makeController(selec=waistJNTs[1], pointConst=True, normalPlane='xz')
waistRootctrl = makeController(selec=waistJNTs[0], pointConst=True, normalPlane='xz')
chestLowctrl = makeController(selec=waistJNTs[2], newName='chest_Low', pointConst=True, normalPlane='xz')
hipctrl = makeController(selec=waistJNTs[0], shape='cube', newName='hip', scale=7, pointConst=True, normalPlane='xz')
pelvisctrl = makeController(selec=waistJNTs[0], newName='pelvis', scale=3, pointConst=True, normalPlane='xz')
chestUpctrl = makeController(selec=chestJNTs[1], newName='chest_Up', pointConst=True, normalPlane='xz')
waistIKctrl = makeController(selec=waistJNTs[1], shape='cross', addName='_IK', pointConst=True, scale=6, parent=True, normalPlane='xz')
waistIKctrlGRP = getRoot(sel=waistIKctrl, nodType='transform')
controllerColor(waistIKctrl, color='red')
controllerColor(pelvisctrl, color='blue')
controllerColor(hipctrl, color='yellow')
controllerColor(waistRootctrl, color='default')
controllerColor(waistMidctrl, color='default')
controllerColor(chestLowctrl, color='default')
controllerColor(chestUpctrl, color='default')
cmds.parentConstraint(waistJNTs[0], waistJNTs[2], waistIKctrlGRP, mo=True)
cmds.parent(waistIKctrlGRP, waistMidctrl)
cmds.parent(getRoot(sel=chestLowctrl, nodType='transform'), waistMidctrl)
cmds.parent(getRoot(sel=waistMidctrl, nodType='transform'), waistRootctrl)
cmds.parent(waistJNTs[2], chestLowctrl)
cmds.parent(getRoot(sel=chestUpctrl, nodType='transform'), chestLowctrl)
waistRootJNT_GRP = cmds.group(empty=True, w=True, n=waistJNTs[0]+'_GRP')
waistRootJNT_Pos = makeOwnGRP(waistRootJNT_GRP, style='Pos')
aligner(waistRootJNT_Pos, waistJNTs[0], Const='point')
cmds.connectAttr(waistRootctrl[0]+'.translate', waistRootJNT_GRP+'.translate')
cmds.connectAttr(waistRootctrl[0]+'.rotate', waistRootJNT_GRP+'.rotate')
cmds.parent(waistJNTs[0], waistRootJNT_GRP)
cmds.parent(waistRootJNT_Pos, hipctrl)
cmds.parentConstraint(hipctrl, hipJNTs[0], mo=True)
#* Add hip rotation offset Attribute to hip controller
addCustomAttribute(hipctrl)
cmds.addAttr(hipctrl, ln='Rotation_OffSet', nn="Rotation_OffSet", at="float", defaultValue=0)
cmds.setAttr(hipctrl[0]+'.Rotation_OffSet', e=1, keyable=1)
hipRotOffset_MD = cmds.createNode('multiplyDivide', n='hip_Rot_Offset_MD')
cmds.connectAttr(hipctrl[0]+'.rotateX', hipRotOffset_MD+'.input1X')
cmds.connectAttr(hipctrl[0]+'.rotateY', hipRotOffset_MD+'.input1Y')
cmds.connectAttr(hipctrl[0]+'.rotateZ', hipRotOffset_MD+'.input1Z')
cmds.connectAttr(hipctrl[0]+'.Rotation_OffSet', hipRotOffset_MD+'.input2X')
cmds.connectAttr(hipctrl[0]+'.Rotation_OffSet', hipRotOffset_MD+'.input2Y')
cmds.connectAttr(hipctrl[0]+'.Rotation_OffSet', hipRotOffset_MD+'.input2Z')
cmds.connectAttr(hipRotOffset_MD+'.outputX', waistJNTs[0]+'.rotateX')
cmds.connectAttr(hipRotOffset_MD+'.outputY', waistJNTs[0]+'.rotateY')
cmds.connectAttr(hipRotOffset_MD+'.outputZ', waistJNTs[0]+'.rotateZ')
#* chest SetUp
chestIKhandle = cmds.ikHandle(sj=chestJNTs[0], ee=chestJNTs[-1], solver='ikSplineSolver', pcv=False, ns=4, n='chest_spik_handle')
chestIKCRV = cmds.rename(chestIKhandle[-1], 'chest_spik_curve')
chestIKJNTs = cmds.ls('chest_*_IK_joint')
chestIKJNTs.reverse()
cmds.skinCluster(chestIKJNTs, chestIKCRV, toSelectedBones=True, dropoffRate=4.0)
#* more classify
cmds.parent(getRoot(sel=waistRootctrl, nodType='transform'), pelvisctrl)
cmds.parent(getRoot(sel=hipctrl, nodType='transform'), pelvisctrl)
bodySpikGRP = cmds.group(empty=True, w=True, n='body_spik_GRP')
cmds.parent(chestIKhandle[0], chestIKCRV, bodySpikGRP)
cmds.parent(spineIKhandle[0], spineIKCRV, bodySpikGRP)
cmds.setAttr(waistMidctrl[0]+'.rotateOrder', 1)
cmds.setAttr(waistRootctrl[0]+'.rotateOrder', 1)
cmds.setAttr(chestLowctrl[0]+'.rotateOrder', 1)
cmds.setAttr(hipctrl[0]+'.rotateOrder', 1)
cmds.setAttr(pelvisctrl[0]+'.rotateOrder', 1)
cmds.setAttr(chestUpctrl[0]+'.rotateOrder', 1)
cmds.setAttr(waistIKctrl[0]+'.rotateOrder', 1)
cmds.parentConstraint(chestLowctrl, cmds.listRelatives(chestRootJNT, parent=True)[0], mo=True)
cmds.parent(bodySpikGRP, 'Extras')
#* waist Twist SetUp
waistTwistPMA = cmds.createNode('plusMinusAverage', n='waistTwist_PMA')
waistTwistMD = cmds.createNode('multiplyDivide', n='waistTwist_MD')
hipTwistMD = cmds.createNode('multiplyDivide', n='hipTwist_MD')
cmds.addAttr(waistRootctrl, ln='Twist_Offset', nn="Twist_Offset", at="float", minValue=0, defaultValue=1)
cmds.setAttr(waistRootctrl[0]+'.Twist_Offset', e=1, keyable=1)
cmds.addAttr(waistMidctrl, ln='Twist_Offset', nn="Twist_Offset", at="float", minValue=0, defaultValue=1)
cmds.setAttr(waistMidctrl[0]+'.Twist_Offset', e=1, keyable=1)
cmds.addAttr(chestLowctrl, ln='Twist_Offset', nn="Twist_Offset", at="float", minValue=0, defaultValue=1)
cmds.setAttr(chestLowctrl[0]+'.Twist_Offset', e=1, keyable=1)
cmds.connectAttr(waistRootctrl[0]+'.rotateY', waistTwistMD+'.input1X')
cmds.connectAttr(waistMidctrl[0]+'.rotateY', waistTwistMD+'.input1Y')
cmds.connectAttr(chestLowctrl[0]+'.rotateY', waistTwistMD+'.input1Z')
cmds.connectAttr(hipctrl[0]+'.rotateY', hipTwistMD+'.input1X')
cmds.connectAttr(waistRootctrl[0]+'.Twist_Offset', waistTwistMD+'.input2X')
cmds.connectAttr(waistMidctrl[0]+'.Twist_Offset', waistTwistMD+'.input2Y')
cmds.connectAttr(chestLowctrl[0]+'.Twist_Offset', waistTwistMD+'.input2Z')
cmds.connectAttr(waistTwistMD+'.outputX', waistTwistPMA+'.input2D[0].input2Dx')
cmds.connectAttr(waistTwistMD+'.outputY', waistTwistPMA+'.input2D[1].input2Dx')
cmds.connectAttr(waistTwistMD+'.outputZ', waistTwistPMA+'.input2D[2].input2Dx')
cmds.connectAttr(hipTwistMD+'.outputX', waistTwistPMA+'.input2D[3].input2Dx')
cmds.connectAttr(waistTwistPMA+'.output2Dx', spineIKhandle[0]+'.twist')
#* reverse Twist SetUp
#++ set all constraints .interpType to 2
allOrientList = cmds.ls('*orientConstraint1')
allParentList = cmds.ls('*parentConstraint1')
for o in allOrientList:
cmds.setAttr('{0}.interpType'.format(o), 2)
for p in allParentList:
cmds.setAttr('{0}.interpType'.format(p), 2)
#++ progressBar ++#
|
21,790 | e534dc7a58e216a8bfbfd86330623bbf1f99ce68 | import threading
from wsgiref.simple_server import make_server
from leap.soledad.common.couch import CouchServerState
from leap.soledad.server import SoledadApp
from leap.soledad.server.gzip_middleware import GzipMiddleware
from leap.soledad.server.auth import SoledadTokenAuthMiddleware
from util import log
class SoledadServerThread(threading.Thread):
def __init__(self, server):
threading.Thread.__init__(self)
self._server = server
def run(self):
self._server.serve_forever()
def stop(self):
self._server.shutdown()
@property
def port(self):
return self._server.server_port
def make_soledad_server_thread(couch_port):
state = CouchServerState(
'http://127.0.0.1:%d' % couch_port,
'shared',
'tokens')
application = GzipMiddleware(
SoledadTokenAuthMiddleware(SoledadApp(state)))
server = make_server('', 0, application)
t = SoledadServerThread(server)
return t
def get_soledad_server(couchdb_port):
log("Starting soledad server... ", line_break=False)
soledad_server = make_soledad_server_thread(couchdb_port)
soledad_server.start()
log("soledad server started on port %d." % soledad_server.port)
return soledad_server
|
21,791 | 7e5f5ff5a28efe59f4ac8b7c4354a542c7931635 | import pygame
from pygame.locals import *
from pygame.color import *
import os, sys
sys.path.insert(0, os.path.join(os.getcwd(), "pymunk-4.0.0"))
import pymunk
from pymunk import Vec2d
import math
from sounds import *
####################################
# Common sharing helper functions
####################################
def rotateImage(p0, p1, image):
x0, y0 = p0
x1, y1 = p1
if x1 < x0:
angle = math.atan((y1 - y0)/(x0 - x1))
elif x1 > x0:
angle = -math.atan((y0 - y1)/(x0 - x1))
image = pygame.transform.flip(image, True, False)
else:
if y1 < y0: angle = -1/2 * math.pi
elif y1 > y0: angle = 1/2 * math.pi
else: angle = 0
angle = radian2degree(angle)
image = pygame.transform.rotate(image, angle)
return image
def radian2degree(angle):
degree = 180
return angle/math.pi * degree
def getDistance(x1, y1, x0, y0):
distance = ((x1 - x0) ** 2 + (y1 - y0) ** 2) ** (1/2)
return distance
def getPygameCoords(coordinate, data):
# convert the given coordinate in pymunk to pygame
return int(coordinate.x), int(data.height-coordinate.y)
def drawTemp(screen, x, y, image):
x -= image.get_width()/2
y -= image.get_height()/2
screen.blit(image, (x, y))
def getImpulse(distance, data):
if distance >= data.launchRadius:
distance = data.launchRadius
horizontal, vertical = 100, 0
impulse = distance * Vec2d(horizontal, vertical)
impulse[0] = round(impulse[0], 10)
return impulse
def getLaunchAngle(data):
if data.x1 < data.x0:
angle = math.atan((data.y1 - data.y0)/(data.x0 - data.x1))
elif data.x1 > data.x0:
angle = math.pi - math.atan((data.y0 - data.y1)/(data.x0 - data.x1))
else:
if data.y1 < data.y0: angle = -1/2 * math.pi
elif data.y1 > data.y0: angle = 1/2 * math.pi
else: angle = 0
return round(angle, 10)
def existObstacles(data, obj1, obj2):
existRect = existRectObstacles(data, obj1, obj2)
existCircle = existCircleObstacles(data, obj1, obj2)
return existRect or existCircle
def existRectObstacles(data, obj1, obj2):
try: p1 = (x1, y1) = obj1.body.position
except: p1 = (obj1.x, obj1.y)
try: p2 = (x2, y2) = obj2.body.position
except: p2 = (obj2.x, obj2.y)
for target in data.bricks:
rect = (left, top, right, bot) = target.getBounds()
result = rectBetweenPoints(p1, p2, rect)
if result != False: return result
return False
def existCircleObstacles(data, obj1, obj2):
try: p1 = (x1, y1) = obj1.body.position
except: p1 = (obj1.x, obj1.y)
try: p2 = (x2, y2) = obj2.body.position
except: p2 = (obj2.x, obj2.y)
for target in data.objects:
if type(target) == Stone:
cx, cy = target.body.position
radius = target.radius
circle = (cx, cy, radius)
result = circleBetweenPoints(p1, p2, circle)
if result != False: return result
return False
def rectBetweenPoints(p1, p2, rect):
left, top, right, bot = rect
diag1 = ((left, top), (right, bot))
diag2 = ((left, bot), (right, top))
line = (p1, p2)
return linesIntersect(line, diag1) or linesIntersect(line, diag2)
def circleBetweenPoints(p1, p2, circle):
cx, cy, radius = circle
point = (cx, cy)
line = (p1, p2)
perpendicularLength = solvePerpendicular(point, line)
return radius >= perpendicularLength
def solvePerpendicular(point, line):
# solve the length of the perpendicular segment given a point and a line
# first, solve the perpendicular function
x, y = point
slope1, intercept1 = lineToFunction(line)
if slope1 == 0: slope1 = 10**(-10)
slope2 = -1/slope1
intercept2 = y - slope2 * x
xi = (intercept2 - intercept1)/(slope1 - slope2)
yi = slope2 * xi + intercept2
distance = getDistance(x, y, xi, yi)
return distance
def linesIntersect(line1, line2):
# compute the intersect point
slope1, intercept1 = lineToFunction(line1)
slope2, intercept2 = lineToFunction(line2)
# if parallel
if slope1 == slope2: return False
# then solve the intersect
x = (intercept2 - intercept1)/(slope1 - slope2)
# judge if it is in the range
(x1, y1), (x2, y2) = line1
(x3, y3), (x4, y4) = line2
return (min(x1, x2) <= x <= max(x1, x2)) and \
(min(x3, x4) <= x <= max(x3, x4))
def lineToFunction(line):
(x1, y1), (x2, y2) = line
slope = (y1 - y2)/(x1 - x2)
intercept = y1 - (y1 - y2)/(x1 - x2) * x1
return slope, intercept
####################################
# Game Class Definition
####################################
class Bird(object):
def __init__(self, x, y, impulse, angle, data):
# this __init__ function is inspired by:
# http://www.pymunk.org/en/latest/pymunk.html#pymunk.Body
# http://www.pymunk.org/en/latest/pymunk.html#pymunk.Circle
# http://github.com/estevaofon/angry-birds-python/blob/master
# /src/characters.py
self.mass = 5
self.radius = 16
self.moment = pymunk.moment_for_circle(self.mass, 0, self.radius, (0,0))
self.body = pymunk.Body(self.mass, self.moment)
self.body.position = x, data.height - y
self.angle = angle
self.impulse = impulse.rotated(self.angle)
self.body.apply_impulse(self.impulse)
self.shape = pymunk.Circle(self.body, self.radius, (0, 0))
self.shape.elasticity = 0.95
self.currImpulse = self.impulse
self.disappearing = False
self.disappeared = False
self.disappearTime = 100
data.space.add(self.body, self.shape)
self.fallPlayed = False
self.flyPlayed = False
def rebound(self, data):
x, y = self.body.position
r = self.radius
for brick in data.bricks:
left, top, right, bot = brick.getBounds()
# collide at 8 different regions:
# collide at left or right
if (left - r <= x <= left and bot <= y <= top) or \
(right <= x <= right + r and bot <= y <= top):
self.currImpulse = Vec2d(-data.coeff * self.currImpulse[0],
data.coeff * self.currImpulse[1])
self.body.apply_impulse(self.currImpulse)
# collide at top or bottom
if (left <= x <= right and top <= y <= top + r) or \
(left <= x <= right and bot - r <= y <= bot):
self.currImpulse = Vec2d(data.coeff * self.currImpulse[0],
-data.coeff * self.currImpulse[1])
self.body.apply_impulse(self.currImpulse)
# collide at top-left corner
if x <= left and y >= top:
distance = getDistance(x, y, left, top)
if distance <= r:
self.currImpulse = Vec2d(-data.coeff * self.currImpulse[0],
-data.coeff * self.currImpulse[1])
self.body.apply_impulse(self.currImpulse)
# collide at top-right corner
if x >= right and y >= top:
distance = getDistance(x, y, right, top)
if distance <= r:
self.currImpulse = Vec2d(-data.coeff * self.currImpulse[0],
-data.coeff * self.currImpulse[1])
self.body.apply_impulse(self.currImpulse)
# collide at left-bot corner
if x <= left and y <= bot:
distance = getDistance(x, y, left, bot)
if distance <= r:
self.currImpulse = Vec2d(-data.coeff * self.currImpulse[0],
-data.coeff * self.currImpulse[1])
self.body.apply_impulse(self.currImpulse)
# collide at right-bot corner
if x >= right and y <= bot:
distance = getDistance(x, y, right, bot)
if distance <= r:
self.currImpulse = Vec2d(-data.coeff * self.currImpulse[0],
-data.coeff * self.currImpulse[1])
self.body.apply_impulse(self.currImpulse)
def draw(self, screen, data):
phase1, phase2, phase3, step = 100, 70, 40, 20
screenCoord = getPygameCoords(self.body.position, data)
x = screenCoord[0] - self.image.get_width()/2
y = screenCoord[1] - self.image.get_height()/2
if self.disappearing:
if self.disappearTime > 0:
self.disappearTime -= step
if phase2 <= self.disappearTime < phase1:
screen.blit(data.phase1, (x, y))
if phase3 <= self.disappearTime < phase2:
screen.blit(data.phase2, (x, y))
if 0 <= self.disappearTime < phase3:
screen.blit(data.phase3, (x, y))
if self.disappearTime <= 0:
self.disappeared = True
else:
if self.name in data.paths:
path = data.paths[self.name]
if len(path) > 1:
x1, y1 = path[-1]
x2, y2 = path[-2]
if x2 < x1:
angle = math.atan((y2-y1)/(x2-x1))
image = self.image
elif x1 < x2:
angle = math.atan((y2-y1)/(x2-x1))
image = pygame.transform.flip(self.image, True, False)
else:
if y2 < y1: angle = -1/2 * math.pi
elif y2 > y1: angle = 1/2 * math.pi
else: angle = 0
image = self.image
angle = radian2degree(angle)
image = pygame.transform.rotate(image, angle)
screen.blit(image, (x, y))
else:
p0 = (data.x0, data.y0)
p1 = (data.x1, data.y1)
image = rotateImage(p0, p1, self.image)
screen.blit(image, (x, y))
def sounds(self):
if self.flyPlayed == False:
playSounds("sounds/fly.ogg")
self.flyPlayed = True
if self.disappearing:
if self.fallPlayed == False:
playSounds("sounds/fall.ogg")
self.fallPlayed = True
class RedBird(Bird):
def __init__(self, x, y, impulse, angle, data, image):
super().__init__(x, y, impulse, angle, data)
self.image = image
self.cost = 2
class YellowBird(Bird):
def __init__(self, x, y, impulse, angle, data, image):
super().__init__(x, y, impulse, angle, data)
self.image = image
self.accelerated = False
self.cost = 3
self.initImpulse = impulse
def accelerate(self, accAngle):
if self.accelerated == False:
ratio = 1.5
self.impulse *= ratio
self.accAngle = math.pi - accAngle
self.body.apply_impulse(self.initImpulse.rotated(self.accAngle))
self.accelerated = True
class BlueBird(Bird):
def __init__(self, x, y, impulse, angle, data, image):
super().__init__(x, y, impulse, angle, data)
self.initImpulse = impulse
self.image = image
self.tripled = False
self.cost = 5
def scatterShot(self, angle, data):
if self.tripled == False:
units = 18
x, y = self.body.position
upperAngle = math.pi - (angle + math.pi/units)
lowerAngle = math.pi - (angle - math.pi/units)
upperBird = BlueBirdCopy(x, y, self.initImpulse, upperAngle, data,
self.image)
birdID = "bird" + str(data.birdID) + "upperCopy"
upperBird.name = birdID
data.objects.append(upperBird)
lowerBird = BlueBirdCopy(x, y, self.initImpulse, lowerAngle, data,
self.image)
birdID = "bird" + str(data.birdID) + "lowerCopy"
lowerBird.name = birdID
data.objects.append(lowerBird)
self.tripled = True
class BlueBirdCopy(BlueBird):
def __init__(self, x, y, impulse, angle, data, image):
super().__init__(x, y, impulse, angle, data, image)
self.body.position.y = data.height - self.body.position.y
self.cost = 0
self.tripled = True
def sounds(self):
pass
class GreenBird(Bird):
def __init__(self, x, y, impulse, angle, data, image):
super().__init__(x, y, impulse, angle, data)
self.image = image
self.cost = 8
self.callBacked = False
def callBack(self, data):
if self.callBacked == False:
coeff = 20
x, y = self.body.position
distance = getDistance(data.launchCx, data.launchCy, x, y)
dx = data.launchCx - x
dy = data.launchCy - y
# adjust impluse
if distance > coeff:
distance = coeff
# apply impulse
self.currImpulse = Vec2d(dx, dy) * distance - self.currImpulse
self.body.apply_impulse(self.currImpulse)
self.callBacked = True
class WhiteBird(Bird):
def __init__(self, x, y, impulse, angle, data, image):
super().__init__(x, y, impulse, angle, data)
self.image = image
self.cost = 15
self.exploded = False
def explode(self, data):
self.explosionRadius = 300
x0, y0 = self.body.position
units = 50
if self.exploded == False:
for target in data.objects:
if isinstance(target, Pig):
x1, y1 = target.body.position
distance = getDistance(x0, y0, x1, y1)
if (distance <= self.explosionRadius) and \
(not existObstacles(data, self, target)):
coeff = (self.explosionRadius - distance)
impulseUnit = Vec2d((x1 - x0)/units, (y1 - y0)/units)
impulse = impulseUnit * coeff
target.body.apply_impulse(impulse)
playSounds("sounds/tnt.ogg")
self.exploded = True
self.disappearing = True
def draw(self, screen, data):
phase0, phase1, phase2, phase3, step = 100, 80, 60, 40, 20
screenCoord = getPygameCoords(self.body.position, data)
x = screenCoord[0] - self.image.get_width()/2
y = screenCoord[1] - self.image.get_height()/2
if self.disappearing:
if self.disappearTime > 0:
self.disappearTime -= step
if phase1 <= self.disappearTime < phase0:
screen.blit(data.phase0, (x, y))
if phase2 <= self.disappearTime < phase1:
screen.blit(data.phase1, (x, y))
if phase3 <= self.disappearTime < phase2:
screen.blit(data.phase2, (x, y))
if 0 <= self.disappearTime < phase3:
screen.blit(data.phase3, (x, y))
if self.disappearTime <= 0:
self.disappeared = True
else:
super().draw(screen, data)
class Pig(object):
def __init__(self, x, y, radius, image, data):
self.image = image
self.mass = 1
self.radius = radius
self.moment = pymunk.moment_for_circle(self.mass, 0, self.radius, (0,0))
self.body = pymunk.Body(self.mass, self.moment)
self.body.position = x, y
self.shape = pymunk.Circle(self.body, self.radius, (0, 0))
self.shape.elasticity = 0.95
self.pts = 15
self.disappearing = False
self.disappeared = False
self.disappearTime = 100
data.space.add(self.body, self.shape)
self.fallPlayed = False
def draw(self, screen, data):
phase1, phase2, phase3, step = 100, 70, 40, 20
screenCoord = getPygameCoords(self.body.position, data)
x = screenCoord[0] - self.image.get_width()/2
y = screenCoord[1] - self.image.get_height()/2
if self.disappearing:
if self.disappearTime > 0:
self.disappearTime -= step
if phase2 <= self.disappearTime < phase1:
screen.blit(data.phase1, (x, y))
if phase3 <= self.disappearTime < phase2:
screen.blit(data.phase2, (x, y))
if 0 <= self.disappearTime < phase3:
screen.blit(data.phase3, (x, y))
if self.disappearTime <= 0:
self.disappeared = True
else:
screen.blit(self.image, (x, y))
def sounds(self):
if self.disappearing:
if self.fallPlayed == False:
playSounds("sounds/fall.ogg")
self.fallPlayed = True
class Stone(object):
def __init__(self, x, y, image, data):
self.image = image
self.mass = 50
self.radius = 18
self.moment = pymunk.moment_for_circle(self.mass, 0, self.radius, (0,0))
self.body = pymunk.Body(self.mass, self.moment)
self.body.position = x, y
self.shape = pymunk.Circle(self.body, self.radius, (0, 0))
self.shape.elasticity = 0.95
self.crushed = False
self.disappearing = False
self.disappeared = False
self.disappearTime = 100
data.space.add(self.body, self.shape)
self.fallPlayed = False
def crush(self, data):
if not self.crushed:
x0, y0 = self.body.position
for target in data.objects:
if isinstance(target, Pig):
x1, y1 = target.body.position
distance = getDistance(x0, y0, x1, y1)
threshold = self.radius + target.radius
if distance <= threshold:
target.disappearing = True
self.crushed = True
self.disappearing = True
return
def draw(self, screen, data):
phase1, phase2, phase3, step = 100, 70, 40, 20
screenCoord = getPygameCoords(self.body.position, data)
x = screenCoord[0] - self.image.get_width()/2
y = screenCoord[1] - self.image.get_height()/2
if self.disappearing:
if self.disappearTime > 0:
self.disappearTime -= step
if phase2 <= self.disappearTime < phase1:
screen.blit(data.phase1, (x, y))
if phase3 <= self.disappearTime < phase2:
screen.blit(data.phase2, (x, y))
if 0 <= self.disappearTime < phase3:
screen.blit(data.phase3, (x, y))
if self.disappearTime <= 0:
self.disappeared = True
else:
screen.blit(self.image, (x, y))
def sounds(self):
if self.disappearing:
if self.fallPlayed == False:
playSounds("sounds/fall.ogg")
self.fallPlayed = True
class Brick(object):
def __init__(self, x, y, width, height, image, data):
self.image = image
self.body = pymunk.Body()
self.body.position = (x, y)
self.left = -width//2
self.right = width//2
self.height = height
self.brick = pymunk.Segment(self.body, (self.left, 0),
(self.right, 0), self.height)
data.space.add(self.brick)
def getBounds(self):
cx, cy = self.body.position
left, right = cx + self.left, cx + self.right
top, bot = cy + self.height, cy - self.height
return left, top, right, bot
def draw(self, screen, data):
screenCoord = getPygameCoords(self.body.position, data)
x = screenCoord[0] - self.image.get_width()/2
y = screenCoord[1] - self.image.get_height()/2
screen.blit(self.image, (x, y))
class vBrick(Brick):
def __init__(self, x, y, width, height, image, data):
self.image = image
self.body = pymunk.Body()
self.body.position = (x, y)
self.top = height//2
self.bot = -height//2
self.width = width
self.brick = pymunk.Segment(self.body, (0, self.top), (0, self.bot),
self.width)
data.space.add(self.brick)
def getBounds(self):
cx, cy = self.body.position
left, right = cx - self.width, cx + self.width
top, bot = cy + self.top, cy + self.bot
return left, top, right, bot
class Seesaw(object):
# this classed is created by following the pymunk official tutorial:
# http://www.pymunk.org/en/latest/pymunk.html#pymunk.Segment
# http://www.pymunk.org/en/latest/tutorials/SlideAndPinJoint.html
def __init__(self, x, y, width, height, image, data):
self.mass = 10
self.moment = 100000
self.fence = 20
self.width = width
self.height = height
self.limit = 25
self.distance = 100
self.centerBody = pymunk.Body()
self.centerBody.position = (x, y)
self.limitBody = pymunk.Body()
self.limitBody.position = (x - self.distance, y)
self.mainBody = pymunk.Body(self.mass, self.moment)
self.mainBody.position = (x, y)
self.seesaw = pymunk.Segment(self.mainBody, (-self.width//2, 0),
(self.width//2, 0), self.height)
self.centerBody = pymunk.PinJoint(self.mainBody, self.centerBody,
(0, 0), (0, 0))
self.limitBody = pymunk.SlideJoint(self.mainBody, self.limitBody,
(-self.distance, 0), (0, 0), 0, self.limit)
data.space.add(self.seesaw, self.mainBody,
self.centerBody, self.limitBody)
self.image = image
def draw(self, screen, data):
angle = radian2degree(self.seesaw.body.angle)
image = pygame.transform.rotate(self.image, angle)
x, y = getPygameCoords(self.seesaw.body.position, data)
x -= image.get_width()//2
y -= image.get_height()//2
screen.blit(image, (x, y))
class Wormhole(object):
def __init__(self, x, y, image, data, hScope=0, vScope=0):
self.x = x
self.y = y
self.dx = 5
self.dy = 5
self.image = image
self.radius = 30
self.hScope = hScope
self.vScope = vScope
self.leftBound = self.x - hScope
self.rightBound = self.x + hScope
self.lowerBound = self.y - vScope
self.upperBound = self.y + vScope
self.ejecting = False
self.soundsPlayed = False
def move(self):
if self.hScope != 0:
if self.x <= self.leftBound\
or self.x >= self.rightBound:
self.dx *= -1
self.x += self.dx
if self.vScope != 0:
if self.y <= self.lowerBound\
or self.y >= self.upperBound:
self.dy *= -1
self.y += self.dy
def link(self, other):
self.port = other
other.port = self
def detect(self, data):
x0, y0 = self.x, self.y
for target in data.objects:
if isinstance(target, Bird):
if target.name == "bird" + str(data.birdID):
x1, y1 = target.body.position
distance = getDistance(x0, y0, x1, y1)
threshold = self.radius + target.radius
if not self.ejecting:
if distance <= threshold:
self.port.ejecting = True
self.transfer(target, data)
self.soundsPlayed = False
else:
if distance > threshold:
self.ejecting = False
def transfer(self, target, data):
exit = self.port
target.body.position = exit.x, exit.y
if self.soundsPlayed == False:
playSounds("sounds/wormhole.ogg")
self.soundsPlayed = True
def draw(self, screen, data):
x, y = self.x, data.height - self.y
x -= self.image.get_width()//2
y -= self.image.get_height()//2
screen.blit(self.image, (x, y))
class Blackhole(object):
def __init__(self, x, y, affectRadius, image, hScope=0, vScope=0):
self.x = x
self.y = y
self.dx = 5
self.dy = 5
self.radius = 20
self.affectRadius = affectRadius
self.image = image
self.activated = True
self.timeCountDown = 100
self.hScope = hScope
self.vScope = vScope
self.leftBound = self.x - hScope
self.rightBound = self.x + hScope
self.lowerBound = self.y - vScope
self.upperBound = self.y + vScope
def attract(self, data):
if self.activated:
x0, y0 = self.x, self.y
for target in data.objects:
x1, y1 = target.body.position
distance = getDistance(x0, y0, x1, y1)
if distance <= self.affectRadius:
data.objectsToRemove.append(target)
def activationCountDown(self):
step = 2
reset = 100
if self.timeCountDown != 0:
self.timeCountDown -= step
if self.timeCountDown <= 0:
self.timeCountDown = reset
self.activated = not self.activated
def move(self):
if self.activated:
if self.hScope != 0:
if self.x <= self.leftBound\
or self.x >= self.rightBound:
self.dx *= -1
self.x += self.dx
if self.vScope != 0:
if self.y <= self.lowerBound\
or self.y >= self.upperBound:
self.dy *= -1
self.y += self.dy
def draw(self, screen, data):
if self.activated:
x, y = self.x, data.height - self.y
x -= self.image.get_width()/2
y -= self.image.get_height()/2
screen.blit(self.image, (x, y))
class Bomb(object):
def __init__(self, x, y, affectRadius, image):
self.x = x
self.y = y
self.radius = 25
self.explosionRadius = affectRadius
self.disappearing = False
self.triggered = False
self.disappeared = False
self.image = image
self.disappearTime = 100
self.explodePlayed = False
def trigger(self, data):
if not self.triggered:
for target in data.objects:
x, y = target.body.position
distance = getDistance(self.x, self.y, x, y)
if distance <= self.radius + target.radius:
self.triggered = True
def explode(self, data):
x0, y0 = self.x, self.y
units = 60
if self.triggered:
if self.explodePlayed == False:
playSounds("sounds/tnt.ogg")
self.explodePlayed = True
for target in data.objects:
x1, y1 = target.body.position
distance = getDistance(x0, y0, x1, y1)
if (distance <= self.explosionRadius) and \
(not existObstacles(data, self, target)):
coeff = (self.explosionRadius - distance)
impulseUnit = Vec2d((x1 - x0)/units, (y1 - y0)/units)
impulse = impulseUnit * coeff
target.body.apply_impulse(impulse)
self.disappearing = True
def draw(self, screen, data):
phase0, phase1, phase2, phase3, step = 100, 80, 60, 40, 20
x = self.x - self.image.get_width()/2
y = data.height - self.y - self.image.get_height()/2
if self.disappearing:
if self.disappearTime > 0:
self.disappearTime -= step
if phase1 <= self.disappearTime < phase0:
screen.blit(data.phase0, (x, y))
if phase2 <= self.disappearTime < phase1:
screen.blit(data.phase1, (x, y))
if phase3 <= self.disappearTime < phase2:
screen.blit(data.phase2, (x, y))
if 0 <= self.disappearTime < phase3:
screen.blit(data.phase3, (x, y))
if self.disappearTime <= 0:
self.disappeared = True
else:
screen.blit(self.image, (x, y))
|
21,792 | 0714f2b5b0ceab7d42879bb861bd5be10b95618e | # -*- coding: utf-8 -*-
"""
Copyright 2016 cocoatomo
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import os, sys
version='0.5.0'
long_description = '\n'.join([
open(os.path.join('.', 'README.rst')).read(),
])
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Documentation',
]
setup(
name='pygments-dmdl',
version=version,
description='DMDL lexer and highlighter for Pygments',
long_description=long_description,
classifiers=classifiers,
keywords=[],
author='cocoatomo',
author_email='cocoatomo77@gmail.com',
url='https://github.com/cocoatomo/pygments-dmdl',
license='Apache License (2.0)',
namespace_packages=['dmdl'],
packages=find_packages('.'),
package_dir={'': '.'},
include_package_data=True,
install_requires=[],
entry_points="""
[pygments.lexers]
dmdl = dmdl.lexer:DmdlLexer
""",
zip_safe=False,
setup_requires=['pytest-runner'],
tests_require=['pytest'],
cmdclass={},
)
|
21,793 | 6a7254c7a99f7bdb1f3be078772146c9442e0371 | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^about/', views.about, name='about'),
url(r'^code/', views.code, name='code'),
url(r'^model_description/', views.model_description, name='model_description'),
url(r'^dashboard/', views.dashboard, name='dashboard'),
]
|
21,794 | 48cc6aa19a6313e783143b47e63518027c98d9ae | raio = float(input())
pi = 3.14159
volume = (4/3 * pi * raio**(3))
print('VOLUME = %.3f' % volume)
|
21,795 | dd6e4a17aff314a300f3107c797b0474ff4b71cf | import tkinter as tk
from tkinter import ttk
from tkinter import messagebox
from Clases.calendarioExamenes import CalendarioExamenes
from tkcalendar import Calendar, DateEntry
import datetime
from datetime import date,timedelta
from Clases.jsonConverter import jsonConverter
import showCalendars as sW
import threading
from tkinter.ttk import Progressbar
class SubWindow(tk.Toplevel):
def __init__(self, master, *argv):
super().__init__(master)
self._second_window = None
self.grupo = argv[0][1]
self.title("Generar Excel del {}".format(self.grupo))
for i in range(3):
self.columnconfigure(i, weight=1)
self.rowconfigure(i, weight=1)
self.calendario = argv[0][0]
self.txts = ["" for x in range(6)]
# Change what happens when you click the X button
# This is done so changes also reflect in the main window class
self.protocol('WM_DELETE_WINDOW', master.close)
self.lbl1 = ttk.Label(self, text="Parcial 1")
self.lbl1.grid(column=0, row=0,sticky="nsew", padx=10, pady=5)
self.lbl2 = ttk.Label(self, text="Parcial 2")
self.lbl2.grid(column=0, row=1,sticky="nsew", padx=10, pady=5)
self.lbl3 = ttk.Label(self, text="Parcial 3")
self.lbl3.grid(column=0, row=2,sticky="nsew", padx=10, pady=5)
self.txts[0] = ttk.Entry(self, text="")
self.txts[0].grid(column = 1, row = 0,columnspan=2)
self.txts[1] = ttk.Entry(self, text="")
self.txts[1].grid(column = 1, row = 1,columnspan=2)
self.txts[2] = ttk.Entry(self, text="")
self.txts[2].grid(column = 1, row = 2,columnspan=2)
self.lbl4 = ttk.Label(self, text="Fecha ordinario")
self.lbl4.grid(column=3, row=0,sticky=tk.W, padx=5)
self.lbl5 = ttk.Label(self, text="Fecha extraordinario 1")
self.lbl5.grid(column=3, row=1, padx=5)
self.lbl6 = ttk.Label(self, text="Fecha extraordinario 2")
self.lbl6.grid(column=3, row=2, padx=5)
self.txts[3] = ttk.Entry(self, text="")
self.txts[3].grid(column = 4, row = 0,columnspan=2, padx=10, pady=5)
self.txts[4] = ttk.Entry(self, text="")
self.txts[4].grid(column = 4, row = 1,columnspan=2, padx=10, pady=5)
self.txts[5] = ttk.Entry(self, text="")
self.txts[5].grid(column = 4, row = 2,columnspan=2, padx=10, pady=5)
self.btn = ttk.Button(self, text="Generar Excel", command= lambda : self.generateExcel())
self.btn.grid(column= 4, row=3,sticky="nsew", columnspan=2, padx=5, pady=5, ipadx=5, ipady=5)
self.progress = ttk.Progressbar(self, orient=tk.HORIZONTAL,length=100, mode='indeterminate')
now = datetime.datetime.now()
self.dia =now.day
self.mes =now.month
self.anio =now.year
for i in range(6):
self.txts[i].bind("<Button-1>", self.tkinterCalendar)
def tkinterCalendar(self, event):
# respuesta = tk.messagebox.askquestion(parent=self, message="¿Cuenta con la fecha especifica? \n\n Conteste NO para guardar como PENDIENTE", title="Agregar fecha",icon = 'info')
# if respuesta == 'yes':
def print_sel():
f = (cal.selection_get()).weekday()
if f < 5:
event.widget.config(state="normal")
self.updateDate(cal.selection_get())
event.widget.delete(0, tk.END)
event.widget.insert(0, cal.selection_get())
event.widget.config(state="disabled")
top.destroy()
else:
tk.messagebox.showerror(parent=top, message="Por favor elija un dia valido", title="Seleccionar fecha")
def print_pendiente():
event.widget.config(state="normal")
self.updateDate(cal.selection_get())
event.widget.delete(0, tk.END)
event.widget.insert(0, 'Pediente')
event.widget.config(state="disabled")
top.destroy()
top = tk.Toplevel(self)
cal = Calendar(top,
font="Arial 14", selectmode='day',
cursor="hand1", year=self.anio, month=self.mes, day=self.dia)
cal.pack(fill="both", expand=True)
ttk.Button(top, text="ok", command=print_sel).pack(expand=True, fill=tk.BOTH, ipadx=10, ipady=10, pady=2)
ttk.Button(top, text="Guardar como pendiente", command= print_pendiente).pack(expand=True, fill=tk.BOTH, ipadx=10, ipady=10, pady=2)
# else:
def updateDate(self, date):
self.dia = date.day
self.mes = date.month
self.anio = date.year
def generateExcel(self):
def real_traitement():
self.progress.grid(row=4,column=0,columnspan=6, sticky="nsew")
self.progress.start()
json = jsonConverter()
fechas = json.getFechas(self)
calendariosExamenes = []
fechaInicio= []
fechaFinal = []
listaMateriasNoAplicadas = []
listaMateriasNoAplicadas2 = []
for i in range(6):
fecha = self.txts[i].get()
if fecha == None or fecha == '' or fecha == 'Pediente':
fechaInicio.append('Pendiente')
fechaFinal.append('Pendiente')
calendariosExamenes.append(
'Pendiente'
)
else:
array = json.getCalendario(self, self.grupo)
fecha = fecha.split('-')
objCalendarioExamenes = CalendarioExamenes(array, self.grupo, fechas, fecha)
if i < 4:
examenes = objCalendarioExamenes.crearCalendarioExmanes()
else:
examenes = objCalendarioExamenes.crearCalendarioExamenesExtraordinarios()
fechaInicio.append(objCalendarioExamenes.fechaInicio)
fechaFinal.append(fecha)
calendariosExamenes.append(
examenes
)
listaMateriasNoAplicadas.append(
objCalendarioExamenes.listaMateriasNoAplicadas
)
listaMateriasNoAplicadas2.append(
objCalendarioExamenes.listaMateriasNoAplicadas
)
array = json.getCalendario(self, self.grupo)
self.new_window(calendariosExamenes, array, fechaInicio, self.grupo, fechaFinal, listaMateriasNoAplicadas, listaMateriasNoAplicadas2)
self.progress.stop()
self.progress.grid_forget()
threading.Thread(target=real_traitement).start()
def new_window(self, *argv):
# This prevents multiple clicks opening multiple windows
if self._second_window is not None:
return
self._second_window = sW.SubWindow(self, argv)
def close(self):
# Destory the 2nd window and reset the value to None
if self._second_window is not None:
self._second_window.destroy()
self._second_window = None
|
21,796 | 6c0a01b772c299eb6773c67b16c2d654bcc2e218 | import connexion
import six
from openapi_server.models.cargo_response import CargoResponse # noqa: E501
from openapi_server.models.door_response import DoorResponse # noqa: E501
from openapi_server.models.humidity_response import HumidityResponse # noqa: E501
from openapi_server.models.inline_object19 import InlineObject19 # noqa: E501
from openapi_server.models.inline_object20 import InlineObject20 # noqa: E501
from openapi_server.models.inline_object21 import InlineObject21 # noqa: E501
from openapi_server.models.inline_object22 import InlineObject22 # noqa: E501
from openapi_server.models.inline_object23 import InlineObject23 # noqa: E501
from openapi_server.models.inline_object24 import InlineObject24 # noqa: E501
from openapi_server.models.inline_response2008 import InlineResponse2008 # noqa: E501
from openapi_server.models.sensor_history_response import SensorHistoryResponse # noqa: E501
from openapi_server.models.temperature_response import TemperatureResponse # noqa: E501
from openapi_server import util
def get_sensors(access_token, group_param): # noqa: E501
"""/sensors/list
Get sensor objects. This method returns a list of the sensor objects in the Samsara Cloud and information about them. # noqa: E501
:param access_token: Samsara API access token.
:type access_token: str
:param group_param:
:type group_param: dict | bytes
:rtype: InlineResponse2008
"""
if connexion.request.is_json:
group_param = InlineObject23.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def get_sensors_cargo(access_token, sensor_param): # noqa: E501
"""/sensors/cargo
Get cargo monitor status (empty / full) for requested sensors. # noqa: E501
:param access_token: Samsara API access token.
:type access_token: str
:param sensor_param:
:type sensor_param: dict | bytes
:rtype: CargoResponse
"""
if connexion.request.is_json:
sensor_param = InlineObject19.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def get_sensors_door(access_token, sensor_param): # noqa: E501
"""/sensors/door
Get door monitor status (closed / open) for requested sensors. # noqa: E501
:param access_token: Samsara API access token.
:type access_token: str
:param sensor_param:
:type sensor_param: dict | bytes
:rtype: DoorResponse
"""
if connexion.request.is_json:
sensor_param = InlineObject20.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def get_sensors_history(access_token, history_param): # noqa: E501
"""/sensors/history
Get historical data for specified sensors. This method returns a set of historical data for the specified sensors in the specified time range and at the specified time resolution. # noqa: E501
:param access_token: Samsara API access token.
:type access_token: str
:param history_param:
:type history_param: dict | bytes
:rtype: SensorHistoryResponse
"""
if connexion.request.is_json:
history_param = InlineObject21.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def get_sensors_humidity(access_token, sensor_param): # noqa: E501
"""/sensors/humidity
Get humidity for requested sensors. This method returns the current relative humidity for the requested sensors. # noqa: E501
:param access_token: Samsara API access token.
:type access_token: str
:param sensor_param:
:type sensor_param: dict | bytes
:rtype: HumidityResponse
"""
if connexion.request.is_json:
sensor_param = InlineObject22.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def get_sensors_temperature(access_token, sensor_param): # noqa: E501
"""/sensors/temperature
Get temperature for requested sensors. This method returns the current ambient temperature (and probe temperature if applicable) for the requested sensors. # noqa: E501
:param access_token: Samsara API access token.
:type access_token: str
:param sensor_param:
:type sensor_param: dict | bytes
:rtype: TemperatureResponse
"""
if connexion.request.is_json:
sensor_param = InlineObject24.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
|
21,797 | 2b613df5caed52b08c2cffb6f6675fefe407bcec | from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World"
@app.route("/Lwin")
def Lwin():
return "Hello LwinMeMeKhaing"
|
21,798 | 160772027862595cc905f032732cd5835ae232e0 | import os
from pathlib import Path
import pandas as pd
import time
import experiment_config_gpflow as config
import experiment_funcs_gpflow as ef
import warnings
warnings.filterwarnings(action='ignore', module='GPy')
records = pd.DataFrame()
# Data for acquiring MP
experiments = []
triggerAt = 1
tstart = time.time()
count_experiments = 0.0
for folder in config.DATA_FOLDERS:
location = Path(config.DATA_ROOT).joinpath(folder)
trainFiles = list(filter(lambda name: 'train' in name, os.listdir(location))) ##
for file in trainFiles:
data = ef.read_experiment_data(folder, location, file)
idx = ef.get_idx(file)
print(folder, file)
# Now the experiment (different kernels) start
for kName in list(config.EXPERIMENTS.keys()):
print(kName)
inputs = ef.ExperimentInput(kName, data, config.EXPERIMENTS[kName],
config.LIKELIHOOD_EXPERIMENT.get(kName, config.LIKELIHOOD_DEFAULT),
config.MODEL_EXPERIMENT.get(kName, config.MODEL_DEFAULT),
config.PRIORS_EXPERIMENT.get(kName, config.PRIOR_DEFAULT),
config.SCORE.get(kName, config.SCORE_DEFAULT),
config.INDUCING_EXPERIMENT.get(kName, config.INDUCING_DEFAULT),
config.N_RESTARTS.get(kName, config.N_RESTARTS_DEF))
if config.FORCE_RERUN_IF_EXISTS:
ef.train_and_save(inputs)
else:
result_path = Path(config.RESULT_ROOT).joinpath(folder).joinpath(str(idx))
model_file = result_path.joinpath('{0} {1}.predictions.pbz2'.format(kName, config.CHECKPOINTS[-1]))
# Check so we dont rerun the same model if it is already saved from a previous run
if model_file.exists() is False:
ef.train_and_save(inputs)
else:
print("{0} already exists for {1}-{2}".format(kName, folder, idx))
|
21,799 | 7e0f83a84c6c963899ef2092339e299c20c6fd0a | from django import forms
from .models import Wishlist
from products.models import Product
from django.forms import TextInput, Textarea
class WishlistForm(forms.ModelForm):
class Meta:
model = Wishlist
fields = (
"products",
"name",
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
placeholders = {
'product': 'Product...',
'name': 'name',
}
for field in self.fields:
if self.fields[field].required:
placeholder = f'{placeholders[field]}'
else:
placeholder = placeholders[field]
self.fields[field].widget.attrs['placeholder'] = placeholder
self.fields[field].widget.attrs['class'] = 'border-black rounded-0'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.