hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6add2881036ce2ba1dd465cd39edc0303dd8be19 | 242 | py | Python | twitterauth/models.py | clips-tk/gradefisl | 86ddcb61fa221de250a6a5515e227cf4a804dbb6 | [
"MIT"
] | 3 | 2015-11-11T19:13:58.000Z | 2016-05-14T10:38:38.000Z | twitterauth/models.py | clips-tk/gradefisl | 86ddcb61fa221de250a6a5515e227cf4a804dbb6 | [
"MIT"
] | null | null | null | twitterauth/models.py | clips-tk/gradefisl | 86ddcb61fa221de250a6a5515e227cf4a804dbb6 | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import User
class Profile(models.Model):
user = models.ForeignKey(User)
oauth_token = models.CharField(max_length=200)
oauth_secret = models.CharField(max_length=200)
| 26.888889 | 51 | 0.772727 | 34 | 242 | 5.382353 | 0.558824 | 0.10929 | 0.196721 | 0.262295 | 0.295082 | 0 | 0 | 0 | 0 | 0 | 0 | 0.028708 | 0.136364 | 242 | 8 | 52 | 30.25 | 0.84689 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
0a82c075c9137939ccdead5f5e4090bfad298445 | 37 | py | Python | kidb/__main__.py | x-yzt/kidb | 5f0eec208accf4adb90b2ba1082d63921bc0e9ef | [
"MIT"
] | null | null | null | kidb/__main__.py | x-yzt/kidb | 5f0eec208accf4adb90b2ba1082d63921bc0e9ef | [
"MIT"
] | null | null | null | kidb/__main__.py | x-yzt/kidb | 5f0eec208accf4adb90b2ba1082d63921bc0e9ef | [
"MIT"
] | null | null | null | from kidb.app import app
app.run()
| 7.4 | 24 | 0.702703 | 7 | 37 | 3.714286 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.189189 | 37 | 4 | 25 | 9.25 | 0.866667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
0a9a492a1489574c6b7c989155dbf031e5487230 | 24,454 | py | Python | tests/components/zha/test_light.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 6 | 2017-08-02T19:26:39.000Z | 2020-03-14T22:47:41.000Z | tests/components/zha/test_light.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 58 | 2020-08-03T07:33:02.000Z | 2022-03-31T06:02:05.000Z | tests/components/zha/test_light.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 14 | 2018-08-19T16:28:26.000Z | 2021-09-02T18:26:53.000Z | """Test zha light."""
from datetime import timedelta
from unittest.mock import AsyncMock, MagicMock, call, patch, sentinel
import pytest
import zigpy.profiles.zha as zha
import zigpy.types
import zigpy.zcl.clusters.general as general
import zigpy.zcl.clusters.lighting as lighting
import zigpy.zcl.foundation as zcl_f
from homeassistant.components.light import DOMAIN, FLASH_LONG, FLASH_SHORT
from homeassistant.components.zha.core.group import GroupMember
from homeassistant.components.zha.light import FLASH_EFFECTS
from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE
import homeassistant.util.dt as dt_util
from .common import (
async_enable_traffic,
async_find_group_entity_id,
async_test_rejoin,
find_entity_id,
get_zha_gateway,
send_attributes_report,
)
from tests.common import async_fire_time_changed
ON = 1
OFF = 0
IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8"
IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e9"
IEEE_GROUPABLE_DEVICE3 = "03:2d:6f:00:0a:90:69:e7"
LIGHT_ON_OFF = {
1: {
"device_type": zha.DeviceType.ON_OFF_LIGHT,
"in_clusters": [
general.Basic.cluster_id,
general.Identify.cluster_id,
general.OnOff.cluster_id,
],
"out_clusters": [general.Ota.cluster_id],
}
}
LIGHT_LEVEL = {
1: {
"device_type": zha.DeviceType.DIMMABLE_LIGHT,
"in_clusters": [
general.Basic.cluster_id,
general.LevelControl.cluster_id,
general.OnOff.cluster_id,
],
"out_clusters": [general.Ota.cluster_id],
}
}
LIGHT_COLOR = {
1: {
"device_type": zha.DeviceType.COLOR_DIMMABLE_LIGHT,
"in_clusters": [
general.Basic.cluster_id,
general.Identify.cluster_id,
general.LevelControl.cluster_id,
general.OnOff.cluster_id,
lighting.Color.cluster_id,
],
"out_clusters": [general.Ota.cluster_id],
}
}
@pytest.fixture
async def coordinator(hass, zigpy_device_mock, zha_device_joined):
"""Test zha light platform."""
zigpy_device = zigpy_device_mock(
{
1: {
"in_clusters": [general.Groups.cluster_id],
"out_clusters": [],
"device_type": zha.DeviceType.COLOR_DIMMABLE_LIGHT,
}
},
ieee="00:15:8d:00:02:32:4f:32",
nwk=0x0000,
node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
)
zha_device = await zha_device_joined(zigpy_device)
zha_device.available = True
return zha_device
@pytest.fixture
async def device_light_1(hass, zigpy_device_mock, zha_device_joined):
"""Test zha light platform."""
zigpy_device = zigpy_device_mock(
{
1: {
"in_clusters": [
general.OnOff.cluster_id,
general.LevelControl.cluster_id,
lighting.Color.cluster_id,
general.Groups.cluster_id,
general.Identify.cluster_id,
],
"out_clusters": [],
"device_type": zha.DeviceType.COLOR_DIMMABLE_LIGHT,
}
},
ieee=IEEE_GROUPABLE_DEVICE,
nwk=0xB79D,
)
zha_device = await zha_device_joined(zigpy_device)
zha_device.available = True
return zha_device
@pytest.fixture
async def device_light_2(hass, zigpy_device_mock, zha_device_joined):
"""Test zha light platform."""
zigpy_device = zigpy_device_mock(
{
1: {
"in_clusters": [
general.OnOff.cluster_id,
general.LevelControl.cluster_id,
lighting.Color.cluster_id,
general.Groups.cluster_id,
general.Identify.cluster_id,
],
"out_clusters": [],
"device_type": zha.DeviceType.COLOR_DIMMABLE_LIGHT,
}
},
ieee=IEEE_GROUPABLE_DEVICE2,
nwk=0xC79E,
)
zha_device = await zha_device_joined(zigpy_device)
zha_device.available = True
return zha_device
@pytest.fixture
async def device_light_3(hass, zigpy_device_mock, zha_device_joined):
"""Test zha light platform."""
zigpy_device = zigpy_device_mock(
{
1: {
"in_clusters": [
general.OnOff.cluster_id,
general.LevelControl.cluster_id,
lighting.Color.cluster_id,
general.Groups.cluster_id,
general.Identify.cluster_id,
],
"out_clusters": [],
"device_type": zha.DeviceType.COLOR_DIMMABLE_LIGHT,
}
},
ieee=IEEE_GROUPABLE_DEVICE3,
nwk=0xB89F,
)
zha_device = await zha_device_joined(zigpy_device)
zha_device.available = True
return zha_device
@patch("zigpy.zcl.clusters.general.OnOff.read_attributes", new=MagicMock())
async def test_light_refresh(hass, zigpy_device_mock, zha_device_joined_restored):
"""Test zha light platform refresh."""
# create zigpy devices
zigpy_device = zigpy_device_mock(LIGHT_ON_OFF)
zha_device = await zha_device_joined_restored(zigpy_device)
on_off_cluster = zigpy_device.endpoints[1].on_off
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
# allow traffic to flow through the gateway and device
await async_enable_traffic(hass, [zha_device])
on_off_cluster.read_attributes.reset_mock()
# not enough time passed
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=20))
await hass.async_block_till_done()
assert on_off_cluster.read_attributes.call_count == 0
assert on_off_cluster.read_attributes.await_count == 0
assert hass.states.get(entity_id).state == STATE_OFF
# 1 interval - 1 call
on_off_cluster.read_attributes.return_value = [{"on_off": 1}, {}]
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=80))
await hass.async_block_till_done()
assert on_off_cluster.read_attributes.call_count == 1
assert on_off_cluster.read_attributes.await_count == 1
assert hass.states.get(entity_id).state == STATE_ON
# 2 intervals - 2 calls
on_off_cluster.read_attributes.return_value = [{"on_off": 0}, {}]
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=80))
await hass.async_block_till_done()
assert on_off_cluster.read_attributes.call_count == 2
assert on_off_cluster.read_attributes.await_count == 2
assert hass.states.get(entity_id).state == STATE_OFF
@patch(
"zigpy.zcl.clusters.lighting.Color.request",
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
)
@patch(
"zigpy.zcl.clusters.general.Identify.request",
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
)
@patch(
"zigpy.zcl.clusters.general.LevelControl.request",
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
)
@patch(
"zigpy.zcl.clusters.general.OnOff.request",
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
)
@pytest.mark.parametrize(
"device, reporting",
[(LIGHT_ON_OFF, (1, 0, 0)), (LIGHT_LEVEL, (1, 1, 0)), (LIGHT_COLOR, (1, 1, 3))],
)
async def test_light(
hass, zigpy_device_mock, zha_device_joined_restored, device, reporting
):
"""Test zha light platform."""
# create zigpy devices
zigpy_device = zigpy_device_mock(device)
zha_device = await zha_device_joined_restored(zigpy_device)
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
assert entity_id is not None
cluster_on_off = zigpy_device.endpoints[1].on_off
cluster_level = getattr(zigpy_device.endpoints[1], "level", None)
cluster_color = getattr(zigpy_device.endpoints[1], "light_color", None)
cluster_identify = getattr(zigpy_device.endpoints[1], "identify", None)
assert hass.states.get(entity_id).state == STATE_OFF
await async_enable_traffic(hass, [zha_device], enabled=False)
# test that the lights were created and that they are unavailable
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
# allow traffic to flow through the gateway and device
await async_enable_traffic(hass, [zha_device])
# test that the lights were created and are off
assert hass.states.get(entity_id).state == STATE_OFF
# test turning the lights on and off from the light
await async_test_on_off_from_light(hass, cluster_on_off, entity_id)
# test turning the lights on and off from the HA
await async_test_on_off_from_hass(hass, cluster_on_off, entity_id)
# test short flashing the lights from the HA
if cluster_identify:
await async_test_flash_from_hass(hass, cluster_identify, entity_id, FLASH_SHORT)
# test turning the lights on and off from the HA
if cluster_level:
await async_test_level_on_off_from_hass(
hass, cluster_on_off, cluster_level, entity_id
)
# test getting a brightness change from the network
await async_test_on_from_light(hass, cluster_on_off, entity_id)
await async_test_dimmer_from_light(
hass, cluster_level, entity_id, 150, STATE_ON
)
# test rejoin
await async_test_off_from_hass(hass, cluster_on_off, entity_id)
clusters = [cluster_on_off]
if cluster_level:
clusters.append(cluster_level)
if cluster_color:
clusters.append(cluster_color)
await async_test_rejoin(hass, zigpy_device, clusters, reporting)
# test long flashing the lights from the HA
if cluster_identify:
await async_test_flash_from_hass(hass, cluster_identify, entity_id, FLASH_LONG)
async def async_test_on_off_from_light(hass, cluster, entity_id):
"""Test on off functionality from the light."""
# turn on at light
await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 3})
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_ON
# turn off at light
await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 3})
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_OFF
async def async_test_on_from_light(hass, cluster, entity_id):
"""Test on off functionality from the light."""
# turn on at light
await send_attributes_report(hass, cluster, {1: -1, 0: 1, 2: 2})
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_ON
async def async_test_on_off_from_hass(hass, cluster, entity_id):
"""Test on off functionality from hass."""
# turn on via UI
cluster.request.reset_mock()
await hass.services.async_call(
DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True
)
assert cluster.request.call_count == 1
assert cluster.request.await_count == 1
assert cluster.request.call_args == call(
False, ON, (), expect_reply=True, manufacturer=None, tries=1, tsn=None
)
await async_test_off_from_hass(hass, cluster, entity_id)
async def async_test_off_from_hass(hass, cluster, entity_id):
"""Test turning off the light from Home Assistant."""
# turn off via UI
cluster.request.reset_mock()
await hass.services.async_call(
DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True
)
assert cluster.request.call_count == 1
assert cluster.request.await_count == 1
assert cluster.request.call_args == call(
False, OFF, (), expect_reply=True, manufacturer=None, tries=1, tsn=None
)
async def async_test_level_on_off_from_hass(
hass, on_off_cluster, level_cluster, entity_id
):
"""Test on off functionality from hass."""
on_off_cluster.request.reset_mock()
level_cluster.request.reset_mock()
# turn on via UI
await hass.services.async_call(
DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True
)
assert on_off_cluster.request.call_count == 1
assert on_off_cluster.request.await_count == 1
assert level_cluster.request.call_count == 0
assert level_cluster.request.await_count == 0
assert on_off_cluster.request.call_args == call(
False, ON, (), expect_reply=True, manufacturer=None, tries=1, tsn=None
)
on_off_cluster.request.reset_mock()
level_cluster.request.reset_mock()
await hass.services.async_call(
DOMAIN, "turn_on", {"entity_id": entity_id, "transition": 10}, blocking=True
)
assert on_off_cluster.request.call_count == 1
assert on_off_cluster.request.await_count == 1
assert level_cluster.request.call_count == 1
assert level_cluster.request.await_count == 1
assert on_off_cluster.request.call_args == call(
False, ON, (), expect_reply=True, manufacturer=None, tries=1, tsn=None
)
assert level_cluster.request.call_args == call(
False,
4,
(zigpy.types.uint8_t, zigpy.types.uint16_t),
254,
100.0,
expect_reply=True,
manufacturer=None,
tries=1,
tsn=None,
)
on_off_cluster.request.reset_mock()
level_cluster.request.reset_mock()
await hass.services.async_call(
DOMAIN, "turn_on", {"entity_id": entity_id, "brightness": 10}, blocking=True
)
assert on_off_cluster.request.call_count == 1
assert on_off_cluster.request.await_count == 1
assert level_cluster.request.call_count == 1
assert level_cluster.request.await_count == 1
assert on_off_cluster.request.call_args == call(
False, ON, (), expect_reply=True, manufacturer=None, tries=1, tsn=None
)
assert level_cluster.request.call_args == call(
False,
4,
(zigpy.types.uint8_t, zigpy.types.uint16_t),
10,
1,
expect_reply=True,
manufacturer=None,
tries=1,
tsn=None,
)
on_off_cluster.request.reset_mock()
level_cluster.request.reset_mock()
await async_test_off_from_hass(hass, on_off_cluster, entity_id)
async def async_test_dimmer_from_light(hass, cluster, entity_id, level, expected_state):
"""Test dimmer functionality from the light."""
await send_attributes_report(
hass, cluster, {1: level + 10, 0: level, 2: level - 10 or 22}
)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == expected_state
# hass uses None for brightness of 0 in state attributes
if level == 0:
level = None
assert hass.states.get(entity_id).attributes.get("brightness") == level
async def async_test_flash_from_hass(hass, cluster, entity_id, flash):
"""Test flash functionality from hass."""
# turn on via UI
cluster.request.reset_mock()
await hass.services.async_call(
DOMAIN, "turn_on", {"entity_id": entity_id, "flash": flash}, blocking=True
)
assert cluster.request.call_count == 1
assert cluster.request.await_count == 1
assert cluster.request.call_args == call(
False,
64,
(zigpy.types.uint8_t, zigpy.types.uint8_t),
FLASH_EFFECTS[flash],
0,
expect_reply=True,
manufacturer=None,
tries=1,
tsn=None,
)
@patch(
"zigpy.zcl.clusters.lighting.Color.request",
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
)
@patch(
"zigpy.zcl.clusters.general.Identify.request",
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
)
@patch(
"zigpy.zcl.clusters.general.LevelControl.request",
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
)
@patch(
"zigpy.zcl.clusters.general.OnOff.request",
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
)
async def test_zha_group_light_entity(
hass, device_light_1, device_light_2, device_light_3, coordinator
):
"""Test the light entity for a ZHA group."""
zha_gateway = get_zha_gateway(hass)
assert zha_gateway is not None
zha_gateway.coordinator_zha_device = coordinator
coordinator._zha_gateway = zha_gateway
device_light_1._zha_gateway = zha_gateway
device_light_2._zha_gateway = zha_gateway
member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee]
members = [GroupMember(device_light_1.ieee, 1), GroupMember(device_light_2.ieee, 1)]
assert coordinator.is_coordinator
# test creating a group with 2 members
zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members)
await hass.async_block_till_done()
assert zha_group is not None
assert len(zha_group.members) == 2
for member in zha_group.members:
assert member.device.ieee in member_ieee_addresses
assert member.group == zha_group
assert member.endpoint is not None
device_1_entity_id = await find_entity_id(DOMAIN, device_light_1, hass)
device_2_entity_id = await find_entity_id(DOMAIN, device_light_2, hass)
device_3_entity_id = await find_entity_id(DOMAIN, device_light_3, hass)
assert (
device_1_entity_id != device_2_entity_id
and device_1_entity_id != device_3_entity_id
)
assert device_2_entity_id != device_3_entity_id
group_entity_id = async_find_group_entity_id(hass, DOMAIN, zha_group)
assert hass.states.get(group_entity_id) is not None
assert device_1_entity_id in zha_group.member_entity_ids
assert device_2_entity_id in zha_group.member_entity_ids
assert device_3_entity_id not in zha_group.member_entity_ids
group_cluster_on_off = zha_group.endpoint[general.OnOff.cluster_id]
group_cluster_level = zha_group.endpoint[general.LevelControl.cluster_id]
group_cluster_identify = zha_group.endpoint[general.Identify.cluster_id]
dev1_cluster_on_off = device_light_1.device.endpoints[1].on_off
dev2_cluster_on_off = device_light_2.device.endpoints[1].on_off
dev3_cluster_on_off = device_light_3.device.endpoints[1].on_off
dev1_cluster_level = device_light_1.device.endpoints[1].level
await async_enable_traffic(
hass, [device_light_1, device_light_2, device_light_3], enabled=False
)
await hass.async_block_till_done()
# test that the lights were created and that they are unavailable
assert hass.states.get(group_entity_id).state == STATE_UNAVAILABLE
# allow traffic to flow through the gateway and device
await async_enable_traffic(hass, [device_light_1, device_light_2, device_light_3])
await hass.async_block_till_done()
# test that the lights were created and are off
assert hass.states.get(group_entity_id).state == STATE_OFF
# test turning the lights on and off from the HA
await async_test_on_off_from_hass(hass, group_cluster_on_off, group_entity_id)
# test short flashing the lights from the HA
await async_test_flash_from_hass(
hass, group_cluster_identify, group_entity_id, FLASH_SHORT
)
# test turning the lights on and off from the light
await async_test_on_off_from_light(hass, dev1_cluster_on_off, group_entity_id)
# test turning the lights on and off from the HA
await async_test_level_on_off_from_hass(
hass, group_cluster_on_off, group_cluster_level, group_entity_id
)
# test getting a brightness change from the network
await async_test_on_from_light(hass, dev1_cluster_on_off, group_entity_id)
await async_test_dimmer_from_light(
hass, dev1_cluster_level, group_entity_id, 150, STATE_ON
)
# test long flashing the lights from the HA
await async_test_flash_from_hass(
hass, group_cluster_identify, group_entity_id, FLASH_LONG
)
assert len(zha_group.members) == 2
# test some of the group logic to make sure we key off states correctly
await send_attributes_report(hass, dev1_cluster_on_off, {0: 1})
await send_attributes_report(hass, dev2_cluster_on_off, {0: 1})
await hass.async_block_till_done()
# test that group light is on
assert hass.states.get(device_1_entity_id).state == STATE_ON
assert hass.states.get(device_2_entity_id).state == STATE_ON
assert hass.states.get(group_entity_id).state == STATE_ON
await send_attributes_report(hass, dev1_cluster_on_off, {0: 0})
await hass.async_block_till_done()
# test that group light is still on
assert hass.states.get(device_1_entity_id).state == STATE_OFF
assert hass.states.get(device_2_entity_id).state == STATE_ON
assert hass.states.get(group_entity_id).state == STATE_ON
await send_attributes_report(hass, dev2_cluster_on_off, {0: 0})
await hass.async_block_till_done()
# test that group light is now off
assert hass.states.get(device_1_entity_id).state == STATE_OFF
assert hass.states.get(device_2_entity_id).state == STATE_OFF
assert hass.states.get(group_entity_id).state == STATE_OFF
await send_attributes_report(hass, dev1_cluster_on_off, {0: 1})
await hass.async_block_till_done()
# test that group light is now back on
assert hass.states.get(device_1_entity_id).state == STATE_ON
assert hass.states.get(device_2_entity_id).state == STATE_OFF
assert hass.states.get(group_entity_id).state == STATE_ON
# turn it off to test a new member add being tracked
await send_attributes_report(hass, dev1_cluster_on_off, {0: 0})
await hass.async_block_till_done()
assert hass.states.get(device_1_entity_id).state == STATE_OFF
assert hass.states.get(device_2_entity_id).state == STATE_OFF
assert hass.states.get(group_entity_id).state == STATE_OFF
# add a new member and test that his state is also tracked
await zha_group.async_add_members([GroupMember(device_light_3.ieee, 1)])
await send_attributes_report(hass, dev3_cluster_on_off, {0: 1})
await hass.async_block_till_done()
assert device_3_entity_id in zha_group.member_entity_ids
assert len(zha_group.members) == 3
assert hass.states.get(device_1_entity_id).state == STATE_OFF
assert hass.states.get(device_2_entity_id).state == STATE_OFF
assert hass.states.get(device_3_entity_id).state == STATE_ON
assert hass.states.get(group_entity_id).state == STATE_ON
# make the group have only 1 member and now there should be no entity
await zha_group.async_remove_members(
[GroupMember(device_light_2.ieee, 1), GroupMember(device_light_3.ieee, 1)]
)
assert len(zha_group.members) == 1
assert hass.states.get(group_entity_id) is None
assert device_2_entity_id not in zha_group.member_entity_ids
assert device_3_entity_id not in zha_group.member_entity_ids
# make sure the entity registry entry is still there
assert zha_gateway.ha_entity_registry.async_get(group_entity_id) is not None
# add a member back and ensure that the group entity was created again
await zha_group.async_add_members([GroupMember(device_light_3.ieee, 1)])
await send_attributes_report(hass, dev3_cluster_on_off, {0: 1})
await hass.async_block_till_done()
assert len(zha_group.members) == 2
assert hass.states.get(group_entity_id).state == STATE_ON
# add a 3rd member and ensure we still have an entity and we track the new one
await send_attributes_report(hass, dev1_cluster_on_off, {0: 0})
await send_attributes_report(hass, dev3_cluster_on_off, {0: 0})
await hass.async_block_till_done()
assert hass.states.get(group_entity_id).state == STATE_OFF
# this will test that _reprobe_group is used correctly
await zha_group.async_add_members(
[GroupMember(device_light_2.ieee, 1), GroupMember(coordinator.ieee, 1)]
)
await send_attributes_report(hass, dev2_cluster_on_off, {0: 1})
await hass.async_block_till_done()
assert len(zha_group.members) == 4
assert hass.states.get(group_entity_id).state == STATE_ON
await zha_group.async_remove_members([GroupMember(coordinator.ieee, 1)])
await hass.async_block_till_done()
assert hass.states.get(group_entity_id).state == STATE_ON
assert len(zha_group.members) == 3
# remove the group and ensure that there is no entity and that the entity registry is cleaned up
assert zha_gateway.ha_entity_registry.async_get(group_entity_id) is not None
await zha_gateway.async_remove_zigpy_group(zha_group.group_id)
assert hass.states.get(group_entity_id) is None
assert zha_gateway.ha_entity_registry.async_get(group_entity_id) is None
| 36.883861 | 100 | 0.707778 | 3,488 | 24,454 | 4.649656 | 0.073968 | 0.051794 | 0.038476 | 0.04569 | 0.803428 | 0.771797 | 0.736527 | 0.713158 | 0.674991 | 0.614996 | 0 | 0.016339 | 0.201603 | 24,454 | 662 | 101 | 36.939577 | 0.814331 | 0.081786 | 0 | 0.525151 | 0 | 0.002012 | 0.044254 | 0.024438 | 0 | 0 | 0.001098 | 0 | 0.193159 | 1 | 0 | false | 0 | 0.030181 | 0 | 0.038229 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
0a9dec3cf466f8c0cb88772f0f59b53adf535e4c | 47 | py | Python | radynpy/cdf/__init__.py | Goobley/radynpy | acf685f6ad17be63065fc468e40293b6cf063081 | [
"MIT"
] | 7 | 2019-01-27T20:41:38.000Z | 2020-02-18T16:27:26.000Z | radynpy/cdf/__init__.py | grahamkerr/radynpy | 63e06c63476b4cc74568da443f71c12412b83bac | [
"MIT"
] | 3 | 2020-02-25T18:51:20.000Z | 2020-03-19T13:02:14.000Z | radynpy/cdf/__init__.py | grahamkerr/radynpy | 63e06c63476b4cc74568da443f71c12412b83bac | [
"MIT"
] | 1 | 2020-02-18T00:20:16.000Z | 2020-02-18T00:20:16.000Z | from .CdfLoader import RadynData, LazyRadynData | 47 | 47 | 0.87234 | 5 | 47 | 8.2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085106 | 47 | 1 | 47 | 47 | 0.953488 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
0aa1232f98c60100537cd3c2645a99661b2031d6 | 57 | py | Python | twitter_crawler/main.py | diffunity/kpmg-corona-blue | 93c063933981009af8d661b9b91dda5e2ebf68ab | [
"MIT"
] | 3 | 2021-10-05T07:47:03.000Z | 2021-10-05T10:32:40.000Z | twitter_crawler/main.py | diffunity/kpmg-corona-blue | 93c063933981009af8d661b9b91dda5e2ebf68ab | [
"MIT"
] | 6 | 2021-02-07T14:56:57.000Z | 2021-02-20T05:07:35.000Z | twitter_crawler/main.py | diffunity/kpmg-corona-blue | 93c063933981009af8d661b9b91dda5e2ebf68ab | [
"MIT"
] | 1 | 2021-02-20T05:59:27.000Z | 2021-02-20T05:59:27.000Z | #TODO: 5분 간격으로 크롤링하여 새로 올라운 게시물이 있으면 request를 보내는 main 함수 | 57 | 57 | 0.77193 | 12 | 57 | 3.666667 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.021739 | 0.192982 | 57 | 1 | 57 | 57 | 0.934783 | 0.982456 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 1 | null | 1 | null | true | 0 | 0 | null | null | null | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
0aaa08abbbca50db9d83fc77ebe2d36ce0ff060d | 283 | py | Python | toontown/parties/DistributedPartyWinterCogActivityAI.py | TheFamiliarScoot/open-toontown | 678313033174ea7d08e5c2823bd7b473701ff547 | [
"BSD-3-Clause"
] | 99 | 2019-11-02T22:25:00.000Z | 2022-02-03T03:48:00.000Z | toontown/parties/DistributedPartyWinterCogActivityAI.py | TheFamiliarScoot/open-toontown | 678313033174ea7d08e5c2823bd7b473701ff547 | [
"BSD-3-Clause"
] | 42 | 2019-11-03T05:31:08.000Z | 2022-03-16T22:50:32.000Z | toontown/parties/DistributedPartyWinterCogActivityAI.py | TheFamiliarScoot/open-toontown | 678313033174ea7d08e5c2823bd7b473701ff547 | [
"BSD-3-Clause"
] | 57 | 2019-11-03T07:47:37.000Z | 2022-03-22T00:41:49.000Z | from direct.directnotify import DirectNotifyGlobal
from direct.distributed.DistributedObjectAI import DistributedObjectAI
class DistributedPartyWinterCogActivityAI(DistributedObjectAI):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedPartyWinterCogActivityAI')
| 47.166667 | 95 | 0.890459 | 19 | 283 | 13.263158 | 0.578947 | 0.079365 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.063604 | 283 | 5 | 96 | 56.6 | 0.950943 | 0 | 0 | 0 | 0 | 0 | 0.123675 | 0.123675 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 1 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
0ab11e366b3a9f2a62a55acbf826430ab1cb9b72 | 409,327 | py | Python | MayaTools/General/Scripts/ART_autoRigger.py | alexwidener/ArtToolsOSX_UnrealEngine | bb9971098576af937542ffbc841f37017df63391 | [
"Xnet",
"X11"
] | null | null | null | MayaTools/General/Scripts/ART_autoRigger.py | alexwidener/ArtToolsOSX_UnrealEngine | bb9971098576af937542ffbc841f37017df63391 | [
"Xnet",
"X11"
] | null | null | null | MayaTools/General/Scripts/ART_autoRigger.py | alexwidener/ArtToolsOSX_UnrealEngine | bb9971098576af937542ffbc841f37017df63391 | [
"Xnet",
"X11"
] | null | null | null | #Author: Jeremy Ernst
#Date: 4.9.13
import maya.cmds as cmds
import maya.mel as mel
import os, ast
class AutoRigger():
def __init__(self, handCtrlSpace, progressBar):
self.handCtrlSpace = handCtrlSpace
#get access to our maya tools
toolsPath = os.path.join(cmds.internalVar(usd = True), "mayaTools.txt")
if os.path.exists(toolsPath):
f = open(toolsPath, 'r')
self.mayaToolsDir = f.readline()
f.close()
#create a progress window to track the progress of the rig build
self.progress = 0
cmds.progressBar(progressBar, edit = True, progress=self.progress, status='Creating Spine Rig')
#build the core of the rig
import Modules.ART_Core
coreNodes = Modules.ART_Core.RigCore()
#BODY CONTROL
self.buildHips()
"""
#create the rig settings node
"Rig_Settings" = cmds.group(empty = True, name = "Rig_Settings")
cmds.setAttr("Rig_Settings" + ".tx", lock = True, keyable = False)
cmds.setAttr("Rig_Settings" + ".ty", lock = True, keyable = False)
cmds.setAttr("Rig_Settings" + ".tz", lock = True, keyable = False)
cmds.setAttr("Rig_Settings" + ".rx", lock = True, keyable = False)
cmds.setAttr("Rig_Settings" + ".ry", lock = True, keyable = False)
cmds.setAttr("Rig_Settings" + ".rz", lock = True, keyable = False)
cmds.setAttr("Rig_Settings" + ".sx", lock = True, keyable = False)
cmds.setAttr("Rig_Settings" + ".sy", lock = True, keyable = False)
cmds.setAttr("Rig_Settings" + ".sz", lock = True, keyable = False)
cmds.setAttr("Rig_Settings" + ".v", lock = True, keyable = False)
#build the spine rigs
self.createDriverSkeleton()
self.buildCoreComponents()
"""
#to be replaced by modules
fkControls = self.buildFKSpine()
ikControls = self.buildIKSpine(fkControls)
#build the leg rigs
#first determine the leg style
legStyle = cmds.getAttr("SkeletonSettings_Cache.legStyle")
if legStyle == "Standard Biped":
cmds.progressBar(progressBar, edit = True, progress = 20, status='Creating Leg Rigs')
self.buildFKLegs()
self.buildIKLegs()
self.finishLegs()
cmds.progressBar(progressBar, edit = True, progress = 30, status='Creating Toe Rigs')
self.buildToes()
cmds.progressBar(progressBar, edit = True, progress = 40, status='Creating Auto Hips and Spine')
self.buildAutoHips()
self.autoSpine()
if legStyle == "Hind Leg":
pass
#build the arms
cmds.progressBar(progressBar, edit = True, progress = 50, status='Creating Arm Rigs')
spineBones = self.getSpineJoints()
lastSpine = "driver_" + spineBones[-1]
print lastSpine
import Modules.ART_Arm
reload(Modules.ART_Arm)
Modules.ART_Arm.Arm(True, "", None, "l", lastSpine, 6, True)
Modules.ART_Arm.Arm(True, "", None, "r", lastSpine, 13, True)
"""
self.buildFKArms()
self.buildIkArms()
"""
cmds.progressBar(progressBar, edit = True, progress = 60, status='Creating Finger Rigs')
self.buildFingers()
#build the neck and head rig
cmds.progressBar(progressBar, edit = True, progress = 70, status='Creating Neck and Head Rigs')
self.buildNeckAndHead()
#rig extra joints
cmds.progressBar(progressBar, edit = True, progress = 80, status='Creating Rigs for Custom Joints')
createdControls = self.rigLeafJoints()
createdJiggleNodes = self.rigJiggleJoints()
createdChainNodes = self.rigCustomJointChains()
#clean up the hierarchy
cmds.progressBar(progressBar, edit = True, progress = 90, status='Cleaning up Scene')
bodyGrp = cmds.group(empty = True, name = "body_grp")
for obj in ["spine_splineIK_curve", "splineIK_spine_01_splineIK", "body_anim_space_switcher_follow"]:
if cmds.objExists(obj):
cmds.parent(obj, bodyGrp)
if cmds.objExists("autoHips_sys_grp"):
cmds.parent("autoHips_sys_grp", "body_anim")
rigGrp = "ctrl_rig"
cmds.parent([bodyGrp, "leg_sys_grp"], rigGrp)
"""
rigGrp = cmds.group(empty = True, name = "ctrl_rig")
cmds.parent([bodyGrp, "leg_sys_grp", "Rig_Settings"], rigGrp)
cmds.parent(rigGrp, "offset_anim")
"""
#Arms
"""
if cmds.objExists("arm_rig_master_grp_l"):
cmds.setAttr("Rig_Settings.lArmMode", 1)
if cmds.objExists("lowerarm_l_roll_grp"):
cmds.parent("lowerarm_l_roll_grp", "arm_rig_master_grp_l")
cmds.parent("arm_rig_master_grp_l", "ctrl_rig")
if cmds.objExists("arm_rig_master_grp_r"):
cmds.setAttr("Rig_Settings.rArmMode", 1)
if cmds.objExists("lowerarm_r_roll_grp"):
cmds.parent("lowerarm_r_roll_grp", "arm_rig_master_grp_r")
cmds.parent("arm_rig_master_grp_r", "ctrl_rig")
if cmds.objExists("arm_rig_master_grp_r") and cmds.objExists("arm_rig_master_grp_l"):
armSysGrp = cmds.group(empty = True, name = "arm_sys_grp")
cmds.parent(armSysGrp, "ctrl_rig")
cmds.parent(["arm_rig_master_grp_r", "arm_rig_master_grp_l", "ik_wrist_l_anim_space_switcher_follow", "ik_wrist_r_anim_space_switcher_follow"], armSysGrp)
#arm twists
if cmds.objExists("upperarm_twist_grp_l"):
cmds.parent("upperarm_twist_grp_l", armSysGrp)
if cmds.objExists("upperarm_twist_grp_r"):
cmds.parent("upperarm_twist_grp_r", armSysGrp)
"""
if cmds.objExists("neck_01_fk_anim_grp"):
cmds.parent("neck_01_fk_anim_grp", "ctrl_rig")
#Fingers
if cmds.objExists("finger_sys_grp_l"):
cmds.parent("finger_sys_grp_l", "ctrl_rig")
if cmds.objExists("finger_sys_grp_r"):
cmds.parent("finger_sys_grp_r", "ctrl_rig")
#Custom Joints (leaf, jiggle, chain)
if len(createdControls) > 0:
for each in createdControls:
cmds.parent(each, "ctrl_rig")
if len(createdJiggleNodes) > 0:
for each in createdJiggleNodes:
cmds.parent(each, "ctrl_rig")
if len(createdChainNodes) > 0:
for each in createdChainNodes:
cmds.parent(each, "ctrl_rig")
cmds.parent("head_sys_grp", "ctrl_rig")
#finish grouping everything under 1 character grp
if cmds.objExists("Proxy_Geo_Skin_Grp"):
try:
cmds.parent("Proxy_Geo_Skin_Grp", "rig_grp")
except:
pass
if cmds.objExists("dynHairChain"):
try:
cmds.parent("dynHairChain", "rig_grp")
except:
pass
#add world spaces to each space switch control
self.addSpaces()
#Hide all joints
joints = cmds.ls(type = 'joint')
for joint in joints:
if cmds.getAttr(joint + ".v", settable = True):
cmds.setAttr(joint + ".v", 0)
cmds.progressBar(progressBar, edit = True, progress = 100, status='Cleaning up Scene')
#delete the joint mover
cmds.select("root_mover_grp", r = True, hi = True)
cmds.select("Skeleton_Settings", add = True)
nodes = cmds.ls(sl = True, transforms = True)
cmds.select(clear = True)
for node in nodes:
cmds.lockNode(node, lock = False)
cmds.lockNode("JointMover", lock = False)
cmds.delete("JointMover")
#find and delete junk nodes/clean scene
for obj in ["invis_legs_Rig_Settings", "invis_legs_Rig_Settings1", "invis_legs_spine_splineIK_curve", "invis_legs_spine_splineIK_curve1","invis_legs_master_anim_space_switcher_follow", "invis_legs_master_anim_space_switcher_follow1"]:
try:
cmds.select("*" + obj + "*")
selection = cmds.ls(sl = True)
for each in selection:
if cmds.objExists(each):
cmds.delete(each)
except:
pass
cmds.select(all = True)
selection = cmds.ls(sl = True)
for each in selection:
if each.find("invis_") == 0:
try:
parent = cmds.listRelatives(each, parent = True)
if parent == None:
cmds.delete(each)
except:
pass
#set default rotate Orders
self.setDefaultRotateOrders()
#end progress window
cmds.select(clear = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def setDefaultRotateOrders(self):
cmds.setAttr("body_anim.rotateOrder", 5)
cmds.setAttr("hip_anim.rotateOrder", 5)
if cmds.objExists("mid_ik_anim"):
cmds.setAttr("mid_ik_anim.rotateOrder", 5)
cmds.setAttr("chest_ik_anim.rotateOrder", 5)
cmds.setAttr("head_fk_anim.rotateOrder", 5)
for control in ["neck_01_fk_anim", "neck_02_fk_anim", "neck_03_fk_anim"]:
if cmds.objExists(control):
cmds.setAttr(control + ".rotateOrder", 5)
for control in ["spine_01_anim", "spine_02_anim", "spine_03_anim", "spine_04_anim", "spine_05_anim"]:
if cmds.objExists(control):
cmds.setAttr(control + ".rotateOrder", 5)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def addSpaces(self):
cmds.select("*_space_switcher_follow")
nodes = cmds.ls(sl = True)
spaceSwitchers = []
for node in nodes:
if node.find("invis") != 0:
spaceSwitchers.append(node)
for node in spaceSwitchers:
#create a 'world' locator to constrain to
worldLoc = cmds.spaceLocator(name = node + "_world_pos")[0]
cmds.setAttr(worldLoc + ".v", 0)
#position world loc to be in same place as node
constraint = cmds.parentConstraint(node, worldLoc)[0]
cmds.delete(constraint)
#add the constraint between worldLoc and node
if node == "spine_01_space_switcher_follow":
constraint = cmds.orientConstraint(worldLoc, node)[0]
else:
constraint = cmds.parentConstraint(worldLoc, node)[0]
#add the attr to the space switcher node for that space
spaceSwitchNode = node.rpartition("_follow")[0]
cmds.select(spaceSwitchNode)
cmds.addAttr(ln = "space_world", minValue = 0, maxValue = 1, dv = 0, keyable = True)
#connect that attr to the constraint
cmds.connectAttr(spaceSwitchNode + ".space_world", constraint + "." + worldLoc + "W0")
#parent worldLoc under the offset_anim
if worldLoc.find("master_anim") == 0:
cmds.parent(worldLoc, "rig_grp")
else:
cmds.parent(worldLoc, "offset_anim")
#SETUP SPECIAL CASES
for node in spaceSwitchers:
if node == "chest_ik_anim_space_switcher_follow":
#create a locator named world aligned
spaceLoc = cmds.spaceLocator(name = "chest_ik_world_aligned")[0]
cmds.setAttr(spaceLoc + ".v",0)
#constrain it to the chest ik anim
constraint = cmds.pointConstraint("chest_ik_anim", spaceLoc)[0]
cmds.delete(constraint)
#duplicate the locator
worldOrientLoc = cmds.duplicate(spaceLoc, name = "chest_ik_world_orient")[0]
#orient constrain the space loc to the world orient loc
cmds.orientConstraint(worldOrientLoc, spaceLoc, mo = True)
#parent the space loc under the hip anim
cmds.parent(spaceLoc, "body_anim")
#parent the worldOrientLoc under the master anim
cmds.parent(worldOrientLoc, "master_anim")
#add attr to the space switcher node
spaceSwitchNode = node.rpartition("_follow")[0]
cmds.select(spaceSwitchNode)
cmds.addAttr(ln = "space_chest_ik_world_aligned", minValue = 0, maxValue = 1, dv = 0, keyable = True)
#add constraint to the new object on the follow node
constraint = cmds.parentConstraint(spaceLoc, node, mo = True)[0]
#hook up connections
targets = cmds.parentConstraint(constraint, q = True, targetList = True)
weight = 0
for i in range(len(targets)):
if targets[i].find(spaceLoc) != -1:
weight = i
cmds.connectAttr(spaceSwitchNode + ".space_chest_ik_world_aligned", constraint + "." + spaceLoc + "W" + str(weight))
if node == "ik_wrist_l_anim_space_switcher_follow":
spaceList = ["body_anim", "head_fk_anim"]
if cmds.objExists("chest_ik_anim"):
spaceList.append("chest_ik_anim")
for spaceObj in spaceList:
spaceSwitchNode = node.rpartition("_follow")[0]
#add attr to the space switcher node
cmds.select(spaceSwitchNode)
cmds.addAttr(ln = "space_" + spaceObj, minValue = 0, maxValue = 1, dv = 0, keyable = True)
#add constraint to the new object on the follow node
constraint = cmds.parentConstraint(spaceObj, node, mo = True)[0]
#hook up connections
targets = cmds.parentConstraint(constraint, q = True, targetList = True)
weight = 0
for i in range(len(targets)):
if targets[i].find(spaceObj) != -1:
weight = i
cmds.connectAttr(spaceSwitchNode + ".space_" + spaceObj, constraint + "." + spaceObj + "W" + str(weight))
if node == "ik_wrist_r_anim_space_switcher_follow":
spaceList = ["body_anim", "head_fk_anim"]
if cmds.objExists("chest_ik_anim"):
spaceList.append("chest_ik_anim")
for spaceObj in spaceList:
spaceSwitchNode = node.rpartition("_follow")[0]
#add attr to the space switcher node
cmds.select(spaceSwitchNode)
cmds.addAttr(ln = "space_" + spaceObj, minValue = 0, maxValue = 1, dv = 0, keyable = True)
#add constraint to the new object on the follow node
constraint = cmds.parentConstraint(spaceObj, node, mo = True)[0]
#hook up connections
targets = cmds.parentConstraint(constraint, q = True, targetList = True)
weight = 0
for i in range(len(targets)):
if targets[i].find(spaceObj) != -1:
weight = i
cmds.connectAttr(spaceSwitchNode + ".space_" + spaceObj, constraint + "." + spaceObj + "W" + str(weight))
if node == "ik_elbow_l_anim_space_switcher_follow":
spaceList = ["body_anim"]
if cmds.objExists("chest_ik_anim"):
spaceList.append("chest_ik_anim")
for spaceObj in spaceList:
spaceSwitchNode = node.rpartition("_follow")[0]
#add attr to the space switcher node
cmds.select(spaceSwitchNode)
cmds.addAttr(ln = "space_" + spaceObj, minValue = 0, maxValue = 1, dv = 0, keyable = True)
#add constraint to the new object on the follow node
constraint = cmds.parentConstraint(spaceObj, node, mo = True)[0]
#hook up connections
targets = cmds.parentConstraint(constraint, q = True, targetList = True)
weight = 0
for i in range(len(targets)):
if targets[i].find(spaceObj) != -1:
weight = i
cmds.connectAttr(spaceSwitchNode + ".space_" + spaceObj, constraint + "." + spaceObj + "W" + str(weight))
if node == "ik_elbow_r_anim_space_switcher_follow":
spaceList = ["body_anim"]
if cmds.objExists("chest_ik_anim"):
spaceList.append("chest_ik_anim")
for spaceObj in spaceList:
spaceSwitchNode = node.rpartition("_follow")[0]
#add attr to the space switcher node
cmds.select(spaceSwitchNode)
cmds.addAttr(ln = "space_" + spaceObj, minValue = 0, maxValue = 1, dv = 0, keyable = True)
#add constraint to the new object on the follow node
constraint = cmds.parentConstraint(spaceObj, node, mo = True)[0]
#hook up connections
targets = cmds.parentConstraint(constraint, q = True, targetList = True)
weight = 0
for i in range(int(len(targets))):
if targets[i].find(spaceObj) != -1:
weight = i
cmds.connectAttr(spaceSwitchNode + ".space_" + spaceObj, constraint + "." + spaceObj + "W" + str(weight))
if node == "ik_foot_anim_l_space_switcher_follow":
for spaceObj in ["body_anim"]:
spaceSwitchNode = node.rpartition("_follow")[0]
#add attr to the space switcher node
cmds.select(spaceSwitchNode)
cmds.addAttr(ln = "space_" + spaceObj, minValue = 0, maxValue = 1, dv = 0, keyable = True)
#add constraint to the new object on the follow node
constraint = cmds.parentConstraint(spaceObj, node, mo = True)[0]
#hook up connections
targets = cmds.parentConstraint(constraint, q = True, targetList = True)
weight = 0
for i in range(int(len(targets))):
if targets[i].find(spaceObj) != -1:
weight = i
cmds.connectAttr(spaceSwitchNode + ".space_" + spaceObj, constraint + "." + spaceObj + "W" + str(weight))
if node == "ik_foot_anim_r_space_switcher_follow":
for spaceObj in ["body_anim"]:
spaceSwitchNode = node.rpartition("_follow")[0]
#add attr to the space switcher node
cmds.select(spaceSwitchNode)
cmds.addAttr(ln = "space_" + spaceObj, minValue = 0, maxValue = 1, dv = 0, keyable = True)
#add constraint to the new object on the follow node
constraint = cmds.parentConstraint(spaceObj, node, mo = True)[0]
#hook up connections
targets = cmds.parentConstraint(constraint, q = True, targetList = True)
weight = 0
for i in range(int(len(targets))):
if targets[i].find(spaceObj) != -1:
weight = i
cmds.connectAttr(spaceSwitchNode + ".space_" + spaceObj, constraint + "." + spaceObj + "W" + str(weight))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildCoreComponents(self):
#builds the master, the root, the hips/body
#BODY CONTROL
self.buildHips()
#MASTER CONTROL
masterControl = self.createControl("circle", 150, "master_anim")
constraint = cmds.pointConstraint("root", masterControl)[0]
cmds.delete(constraint)
cmds.makeIdentity(masterControl, apply = True)
cmds.setAttr(masterControl + ".overrideEnabled", 1)
cmds.setAttr(masterControl + ".overrideColor", 18)
spaceSwitchFollow = cmds.group(empty = True, name = masterControl + "_space_switcher_follow")
constraint = cmds.parentConstraint("root", spaceSwitchFollow)[0]
cmds.delete(constraint)
spaceSwitcher = cmds.group(empty = True, name = masterControl + "_space_switcher")
constraint = cmds.parentConstraint("root", spaceSwitcher)[0]
cmds.delete(constraint)
cmds.parent(spaceSwitcher, spaceSwitchFollow)
cmds.parent(masterControl, spaceSwitcher)
cmds.makeIdentity(masterControl, apply = True)
#OFFSET CONTROL
offsetControl = self.createControl("square", 140, "offset_anim")
constraint = cmds.pointConstraint("root", offsetControl)[0]
cmds.delete(constraint)
cmds.parent(offsetControl, masterControl)
cmds.makeIdentity(offsetControl, apply = True)
cmds.setAttr(offsetControl + ".overrideEnabled", 1)
cmds.setAttr(offsetControl + ".overrideColor", 17)
#ROOT ANIM
rootControl = self.createControl("sphere", 10, "root_anim")
constraint = cmds.parentConstraint("driver_root", rootControl)[0]
cmds.delete(constraint)
cmds.parent(rootControl, masterControl)
cmds.makeIdentity(rootControl, apply = True)
cmds.parentConstraint(rootControl, "driver_root")
cmds.setAttr(rootControl + ".overrideEnabled", 1)
cmds.setAttr(rootControl + ".overrideColor", 30)
for attr in [".sx", ".sy", ".sz", ".v"]:
cmds.setAttr(masterControl + attr, lock = True, keyable = False)
cmds.setAttr(offsetControl + attr, lock = True, keyable = False)
cmds.setAttr(rootControl + attr, lock = True, keyable = False)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildHips(self):
#create the grp and position and orient it correctly
bodyGrp = cmds.group(empty = True, name = "body_anim_grp")
bodyCtrl = self.createControl("square", 100, "body_anim")
constraint = cmds.parentConstraint("pelvis", bodyGrp)[0]
cmds.delete(constraint)
#world alignment
for attr in [".rx", ".ry", ".rz"]:
print cmds.getAttr(bodyGrp + attr)
if cmds.getAttr(bodyGrp + attr) < 45:
if cmds.getAttr(bodyGrp + attr) > 0:
cmds.setAttr(bodyGrp + attr, 0)
if cmds.getAttr(bodyGrp + attr) >= 80:
if cmds.getAttr(bodyGrp + attr) < 90:
cmds.setAttr(bodyGrp + attr, 90)
if cmds.getAttr(bodyGrp + attr) > 90:
if cmds.getAttr(bodyGrp + attr) < 100:
cmds.setAttr(bodyGrp + attr, 90)
if cmds.getAttr(bodyGrp + attr) <= -80:
if cmds.getAttr(bodyGrp + attr) > -90:
cmds.setAttr(bodyGrp + attr, -90)
if cmds.getAttr(bodyGrp + attr) > -90:
if cmds.getAttr(bodyGrp + attr) < -100:
cmds.setAttr(bodyGrp + attr, -90)
for attr in [".rx", ".ry", ".rz"]:
print cmds.getAttr(bodyGrp + attr)
#create space switcher
spaceSwitcherFollow = cmds.duplicate(bodyGrp, name = "body_anim_space_switcher_follow")[0]
spaceSwitcher = cmds.duplicate(bodyGrp, name = "body_anim_space_switcher")[0]
cmds.parent(spaceSwitcher, spaceSwitcherFollow)
cmds.parent(bodyGrp, spaceSwitcher)
#create temp duplicate and orient control to joint
tempDupe = cmds.duplicate(bodyCtrl)[0]
constraint = cmds.parentConstraint("pelvis", bodyCtrl)[0]
cmds.delete(constraint)
#parent control to grp
cmds.parent(bodyCtrl, bodyGrp)
constraint = cmds.orientConstraint(tempDupe, bodyCtrl)[0]
cmds.delete(constraint)
cmds.makeIdentity(bodyCtrl, t = 1, r = 1, s = 1, apply = True)
#clean up body control creation
cmds.delete(tempDupe)
#set control color
cmds.setAttr(bodyCtrl + ".overrideEnabled", 1)
cmds.setAttr(bodyCtrl + ".overrideColor", 17)
#lock attrs
for attr in [".sx", ".sy", ".sz", ".v"]:
cmds.setAttr(bodyCtrl + attr, lock = True, keyable = False)
#build pelvis
self.buildPelvisControl()
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildPelvisControl(self):
#create the grp and position and orient it correctly
hipGrp = cmds.group(empty = True, name = "hip_anim_grp")
hipCtrl = self.createControl("circle", 60, "hip_anim")
constraint = cmds.parentConstraint("pelvis", hipGrp)[0]
cmds.delete(constraint)
#create temp duplicate and orient control to joint
tempDupe = cmds.duplicate(hipCtrl)[0]
constraint = cmds.parentConstraint("pelvis", hipCtrl)[0]
cmds.delete(constraint)
#parent control to grp
cmds.parent(hipCtrl, hipGrp)
constraint = cmds.orientConstraint(tempDupe, hipCtrl)[0]
cmds.delete(constraint)
cmds.makeIdentity(hipCtrl, t = 1, r = 1, s = 1, apply = True)
#parent the grp to the body anim
cmds.parent(hipGrp, "body_anim")
#clean up body control creation
cmds.delete(tempDupe)
#set control color
cmds.setAttr(hipCtrl + ".overrideEnabled", 1)
cmds.setAttr(hipCtrl + ".overrideColor", 18)
#lock attrs
for attr in [".sx", ".sy", ".sz", ".v"]:
cmds.setAttr(hipCtrl + attr, lock = True, keyable = False)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildFKSpine(self):
#find the number of spine bones from the skeleton settings
spineJoints = self.getSpineJoints()
fkControls = []
parent = None
for joint in spineJoints:
if joint == "spine_01":
#add space switcher node to base of spine
spaceSwitcherFollow = cmds.group(empty = True, name = joint + "_space_switcher_follow")
constraint = cmds.parentConstraint(joint, spaceSwitcherFollow)[0]
cmds.delete(constraint)
spaceSwitcher = cmds.duplicate(spaceSwitcherFollow, name = joint + "_space_switcher")[0]
cmds.parent(spaceSwitcher, spaceSwitcherFollow)
#create an empty group in the same space as the joint
group = cmds.group(empty = True, name = joint + "_anim_grp")
constraint = cmds.parentConstraint(joint, group)[0]
cmds.delete(constraint)
#create an additional layer of group that has zeroed attrs
offsetGroup = cmds.group(empty = True, name = joint + "_anim_offset_grp")
constraint = cmds.parentConstraint(joint, offsetGroup)[0]
cmds.delete(constraint)
cmds.parent(offsetGroup, group)
#create a control object in the same space as the joint
control = self.createControl("circle", 45, joint + "_anim")
tempDupe = cmds.duplicate(control)[0]
constraint = cmds.parentConstraint(joint, control)[0]
cmds.delete(constraint)
fkControls.append(control)
#parent the control object to the group
cmds.parent(control, offsetGroup)
constraint = cmds.orientConstraint(tempDupe, control, skip = ["x", "z"])[0]
cmds.delete(constraint)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
#setup hierarchy
if parent != None:
cmds.parent(group, parent, absolute = True)
else:
cmds.parent(group, spaceSwitcher)
cmds.parent(spaceSwitcherFollow, "body_anim")
#set the parent to be the current spine control
parent = control
#clean up
cmds.delete(tempDupe)
for attr in [".sx", ".sy", ".sz", ".v"]:
cmds.setAttr(control + attr, lock = True, keyable = False)
#set the control's color
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", 18)
#create length attr on spine controls. need to find up axis for control first
upAxis = self.getUpAxis(control)
cmds.aliasAttr("length", control + ".translate" + upAxis)
return fkControls
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildIKSpine(self, fkControls):
numSpineBones = cmds.getAttr("Skeleton_Settings.numSpineBones")
if numSpineBones > 2:
#duplicate the spine joints we'll need for the spline IK
spineJoints = self.getSpineJoints()
parent = None
rigJoints = []
for joint in spineJoints:
spineBone = cmds.duplicate(joint, parentOnly = True, name = "splineIK_" + joint)[0]
if parent != None:
cmds.parent(spineBone, parent)
else:
cmds.parent(spineBone, world = True)
parent = spineBone
rigJoints.append(str(spineBone))
for joint in rigJoints:
twistJoint = cmds.duplicate(joint, name = "twist_" + joint, parentOnly = True)[0]
cmds.parent(twistJoint, joint)
#create the spline IK
ikNodes = cmds.ikHandle(sj = str(rigJoints[0]), ee = str(rigJoints[len(rigJoints) - 1]), sol = "ikSplineSolver", createCurve = True, simplifyCurve = True, parentCurve = False, name = str(rigJoints[0]) + "_splineIK")
ikHandle = ikNodes[0]
ikCurve = ikNodes[2]
ikCurve = cmds.rename(ikCurve, "spine_splineIK_curve")
cmds.setAttr(ikCurve + ".inheritsTransform", 0)
cmds.setAttr(ikHandle + ".v", 0)
cmds.setAttr(ikCurve + ".v", 0)
#create the three joints to skin the curve to
botJoint = cmds.duplicate(rigJoints[0], name = "spine_splineIK_bottom_joint", parentOnly = True)[0]
topJoint = cmds.duplicate(rigJoints[len(rigJoints) - 1], name = "spine_splineIK_top_joint", parentOnly = True)[0]
midJoint = cmds.duplicate(topJoint, name = "spine_splineIK_mid_joint", parentOnly = True)[0]
cmds.parent([botJoint, topJoint,midJoint], world = True)
constraint = cmds.pointConstraint([botJoint, topJoint], midJoint)[0]
cmds.delete(constraint)
#skin the joints to the curve
cmds.select([botJoint, topJoint, midJoint])
skin = cmds.skinCluster( [botJoint, topJoint, midJoint], ikCurve, toSelectedBones = True )[0]
#skin weight the curve
curveShape = cmds.listRelatives(ikCurve, shapes = True)[0]
numSpans = cmds.getAttr(curveShape + ".spans")
degree = cmds.getAttr(curveShape + ".degree")
numCVs = numSpans + degree
#this should always be the case, but just to be safe
if numCVs == 4:
cmds.skinPercent(skin, ikCurve + ".cv[0]", transformValue = [(botJoint, 1.0)])
cmds.skinPercent(skin, ikCurve + ".cv[1]", transformValue = [(botJoint, 0.5), (midJoint, 0.5)])
cmds.skinPercent(skin, ikCurve + ".cv[2]", transformValue = [(midJoint, 0.5), (topJoint, 0.5)])
cmds.skinPercent(skin, ikCurve + ".cv[3]", transformValue = [(topJoint, 1.0)])
#create the controls
#TOP CTRL
topCtrl = self.createControl("circle", 50, "chest_ik_anim")
#set the control's color
cmds.setAttr(topCtrl + ".overrideEnabled", 1)
cmds.setAttr(topCtrl + ".overrideColor", 17)
#position the control
constraint = cmds.pointConstraint(topJoint, topCtrl)[0]
cmds.delete(constraint)
#create the control grp
topCtrlGrp = cmds.group(empty = True, name = topCtrl + "_grp")
constraint = cmds.parentConstraint(topJoint, topCtrlGrp)[0]
cmds.delete(constraint)
#create the top control driver group
topCtrlDriver = cmds.duplicate(topCtrlGrp, name = "chest_ik_anim_driver_grp")
#create the space switcher group
spaceSwitcherFollow = cmds.group(empty = True, name = topCtrl + "_space_switcher_follow")
constraint = cmds.parentConstraint(topCtrlGrp, spaceSwitcherFollow)[0]
cmds.delete(constraint)
spaceSwitcher = cmds.duplicate(spaceSwitcherFollow, parentOnly = True, name = topCtrl + "_space_switcher")[0]
#parent objects
cmds.parent(spaceSwitcher, spaceSwitcherFollow)
cmds.parent(topCtrlGrp, spaceSwitcher)
cmds.parent(topCtrlDriver, topCtrlGrp)
cmds.parent(topCtrl, topCtrlDriver)
cmds.makeIdentity(topCtrl, t = 1, r = 1, s = 1, apply = True)
cmds.parent(topJoint, topCtrl)
#MID CTRL
midCtrl = self.createControl("circle", 45, "mid_ik_anim")
#set the control's color
cmds.setAttr(midCtrl + ".overrideEnabled", 1)
cmds.setAttr(midCtrl + ".overrideColor", 18)
#position the control
constraint = cmds.pointConstraint(midJoint, midCtrl)[0]
cmds.delete(constraint)
#create the control grp
midCtrlGrp = cmds.group(empty = True, name = midCtrl + "_grp")
constraint = cmds.parentConstraint(midJoint, midCtrlGrp)[0]
cmds.delete(constraint)
#mid control driver grp
midCtrlDriver = cmds.duplicate(midCtrlGrp, name = "mid_ik_anim_driver_grp")
midCtrlTranslateDriver = cmds.duplicate(midCtrlGrp, name = "mid_ik_anim_translate_driver_grp")
#parent objects
cmds.parent(midCtrl, midCtrlDriver)
cmds.parent(midCtrlDriver, midCtrlTranslateDriver)
cmds.parent(midCtrlTranslateDriver, midCtrlGrp)
cmds.makeIdentity(midCtrl, t = 1, r = 1, s = 1, apply = True)
cmds.parent(midJoint, midCtrl)
cmds.parent(botJoint, "hip_anim")
#ADDING STRETCH
#add the attr to the top ctrl
cmds.select(topCtrl)
cmds.addAttr(longName='stretch', defaultValue=0, minValue=0, maxValue=1, keyable = True)
cmds.addAttr(longName='squash', defaultValue=0, minValue=0, maxValue=1, keyable = True)
#create the curveInfo node#find
cmds.select(ikCurve)
curveInfoNode = cmds.arclen(cmds.ls(sl = True), ch = True )
originalLength = cmds.getAttr(curveInfoNode + ".arcLength")
#create the multiply/divide node that will get the scale factor
divideNode = cmds.shadingNode("multiplyDivide", asUtility = True)
divideNode_Inverse = cmds.shadingNode("multiplyDivide", asUtility = True)
cmds.setAttr(divideNode + ".operation", 2)
cmds.setAttr(divideNode + ".input2X", originalLength)
cmds.setAttr(divideNode_Inverse + ".operation", 2)
cmds.setAttr(divideNode_Inverse + ".input1X", originalLength)
#create the blendcolors node
blenderNode = cmds.shadingNode("blendColors", asUtility = True)
cmds.setAttr(blenderNode + ".color2R", 1)
blenderNode_Inverse = cmds.shadingNode("blendColors", asUtility = True)
cmds.setAttr(blenderNode_Inverse + ".color2R", 1)
#connect attrs
cmds.connectAttr(curveInfoNode + ".arcLength", divideNode + ".input1X")
cmds.connectAttr(curveInfoNode + ".arcLength", divideNode_Inverse + ".input2X")
cmds.connectAttr(divideNode + ".outputX", blenderNode + ".color1R")
cmds.connectAttr(divideNode_Inverse + ".outputX", blenderNode_Inverse + ".color1R")
cmds.connectAttr(topCtrl + ".stretch", blenderNode + ".blender")
cmds.connectAttr(topCtrl + ".squash", blenderNode_Inverse + ".blender")
upAxis = self.getUpAxis(topCtrl)
if upAxis == "X":
axisB = "Y"
axisC = "Z"
if upAxis == "Y":
axisB = "X"
axisC = "Z"
if upAxis == "Z":
axisB = "X"
axisC = "Y"
for i in range(len(rigJoints) - 2):
children = cmds.listRelatives(rigJoints[i], children = True)
for child in children:
if child.find("twist") != -1:
twistJoint = child
cmds.connectAttr(blenderNode_Inverse + ".outputR", twistJoint + ".scale" + axisB)
cmds.connectAttr(blenderNode_Inverse + ".outputR", twistJoint + ".scale" + axisC)
cmds.connectAttr(blenderNode + ".outputR", rigJoints[0] + ".scale" + upAxis)
#add twist amount attrs and setup
cmds.select(topCtrl)
cmds.addAttr(longName='twist_amount', defaultValue=1, minValue=0, keyable = True)
#find number of spine joints and divide 1 by numSpineJoints
num = len(spineJoints)
val = 1.0/float(num)
twistamount = val
locGrp = cmds.group(empty = True, name = "spineIK_twist_grp")
cmds.parent(locGrp, "body_anim")
for i in range(int(num - 1)):
#create a locator that will be orient constrained between the body and chest
locator = cmds.spaceLocator(name = spineJoints[i] + "_twistLoc")[0]
group = cmds.group(empty = True, name = spineJoints[i] + "_twistLocGrp")
constraint = cmds.parentConstraint(spineJoints[i], locator)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint(spineJoints[i], group)[0]
cmds.delete(constraint)
cmds.parent(locator, group)
cmds.parent(group, locGrp)
cmds.setAttr(locator + ".v", 0, lock = True)
#duplicate the locator and parent it under the group. This will be the locator that takes the rotation x twist amount and gives us the final value
orientLoc = cmds.duplicate(locator, name = spineJoints[i] + "_orientLoc")[0]
#create constraints between body/chest
constraint = cmds.orientConstraint(["body_anim", topCtrl], locator)[0]
#set weights on constraint
firstValue = 1 - twistamount
secondValue = 1 - firstValue
cmds.setAttr(constraint + ".body_animW0", firstValue)
cmds.setAttr(constraint + "." + topCtrl + "W1", secondValue)
#factor in twist amount
twistMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = spineJoints[i] + "_twist_amount")
#expose the twistAmount on the control as an attr
cmds.connectAttr(topCtrl + ".twist_amount", twistMultNode + ".input2X")
cmds.connectAttr(topCtrl + ".twist_amount", twistMultNode + ".input2Y")
cmds.connectAttr(topCtrl + ".twist_amount", twistMultNode + ".input2Z")
cmds.connectAttr(locator + ".rotate", twistMultNode + ".input1")
cmds.connectAttr(twistMultNode + ".output", orientLoc + ".rotate")
#constrain the spine joint to the orientLoc
if upAxis == "X":
skipped = ["y", "z"]
if upAxis == "Y":
skipped = ["x", "z"]
if upAxis == "Z":
skipped = ["x", "y"]
cmds.orientConstraint(orientLoc, "twist_splineIK_" + spineJoints[i], skip = skipped)
twistamount = twistamount + val
#parent the components to the body anim
cmds.parent(midCtrlGrp, "body_anim")
cmds.parent(midCtrl, world = True)
cmds.parent(midJoint, world = True)
for attr in [".rx", ".ry", ".rz"]:
cmds.setAttr(midCtrlGrp + attr, 0)
cmds.parent(midCtrl, midCtrlDriver)
cmds.makeIdentity(midCtrl, t = 1, r = 1, s = 0, apply = True)
cmds.parent(midJoint, midCtrl)
cmds.parent(spaceSwitcherFollow, "body_anim")
cmds.parent(rigJoints[0], "body_anim")
#world alignment
cmds.parent(topCtrl, world = True)
cmds.parent(topJoint, world = True)
for attr in [".rx", ".ry", ".rz"]:
if cmds.getAttr(spaceSwitcherFollow + attr) < 45:
if cmds.getAttr(spaceSwitcherFollow + attr) > 0:
cmds.setAttr(spaceSwitcherFollow + attr, 0)
if cmds.getAttr(spaceSwitcherFollow + attr) >= 80:
if cmds.getAttr(spaceSwitcherFollow + attr) < 90:
cmds.setAttr(spaceSwitcherFollow + attr, 90)
if cmds.getAttr(spaceSwitcherFollow + attr) > 90:
if cmds.getAttr(spaceSwitcherFollow + attr) < 100:
cmds.setAttr(spaceSwitcherFollow + attr, 90)
if cmds.getAttr(spaceSwitcherFollow + attr) <= -80:
if cmds.getAttr(spaceSwitcherFollow + attr) > -90:
cmds.setAttr(spaceSwitcherFollow + attr, -90)
if cmds.getAttr(spaceSwitcherFollow + attr) > -90:
if cmds.getAttr(spaceSwitcherFollow + attr) < -100:
cmds.setAttr(spaceSwitcherFollow + attr, -90)
cmds.parent(topCtrl, topCtrlDriver)
cmds.makeIdentity(topCtrl, t = 1, r = 1, s = 0, apply = True)
cmds.parent(topJoint, topCtrl)
#hookup spine driver joints
driverJoints = []
for joint in rigJoints:
driverJoint = joint.partition("splineIK_")[2]
driverJoint = "driver_" + driverJoint
driverJoints.append(driverJoint)
#hookup spine to driver
self.hookupSpine(rigJoints, fkControls)
#control driver joints
children = cmds.listRelatives(rigJoints[len(rigJoints) -1], children = True)
for child in children:
if child.find("twist") != -1:
twistJoint = child
topSpineJointConstraint = cmds.pointConstraint(topJoint, twistJoint, mo = True)[0]
topSpineBone = twistJoint.partition("twist_")[2]
cmds.pointConstraint(topSpineBone, twistJoint)[0]
#connect attr on top spine joint constraint
target = cmds.pointConstraint(topSpineJointConstraint, q = True, weightAliasList = True)[0]
cmds.connectAttr(topCtrl + ".stretch", topSpineJointConstraint + "." + target)
#create stretch meter attr
cmds.select(topCtrl)
cmds.addAttr(longName='stretchFactor',keyable = True)
cmds.connectAttr(divideNode + ".outputX", topCtrl + ".stretchFactor")
cmds.setAttr(topCtrl + ".stretchFactor", lock = True)
cmds.select(midCtrl)
cmds.addAttr(longName='stretchFactor',keyable = True)
cmds.connectAttr(topCtrl + ".stretchFactor", midCtrl + ".stretchFactor")
cmds.setAttr(midCtrl + ".stretchFactor", lock = True)
#lock and hide attrs that should not be keyable
for control in [topCtrl, midCtrl]:
for attr in [".sx", ".sy", ".sz", ".v"]:
cmds.setAttr(control + attr, keyable = False, lock = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildFKLegs(self):
#need to create the leg joints for each side based on the driver thigh, calf, and foot
for side in ["l", "r"]:
ball = False
#create joints
fkThighJoint = cmds.duplicate("driver_thigh_" + side, name = "fk_leg_thigh_" + side, parentOnly = True)[0]
fkCalfJoint = cmds.duplicate("driver_calf_" + side, name = "fk_leg_calf_" + side, parentOnly = True)[0]
fkFootJoint = cmds.duplicate("driver_foot_" + side, name = "fk_leg_foot_" + side, parentOnly = True)[0]
if cmds.objExists("driver_ball_" + side):
ball = True
fkBallJoint = cmds.duplicate("driver_ball_" + side, name = "fk_leg_ball_" + side, parentOnly = True)[0]
for joint in [fkThighJoint, fkCalfJoint, fkFootJoint]:
cmds.parent(joint, world = True)
if ball:
cmds.parent(fkBallJoint, fkFootJoint)
cmds.parent(fkFootJoint, fkCalfJoint)
cmds.parent(fkCalfJoint, fkThighJoint)
cmds.makeIdentity(fkThighJoint, t = 0, r = 1, s = 0, apply = True)
#create controls for each joint
#THIGH
fkThighCtrl = self.createControl("circle", 30, "fk_thigh_" + side + "_anim")
cmds.setAttr(fkThighCtrl + ".ry", -90)
cmds.makeIdentity(fkThighCtrl, r = 1, apply =True)
fkThighCtrlGrp = cmds.group(empty = True, name = "fk_thigh_" + side + "_anim_grp")
constraint = cmds.parentConstraint(fkThighJoint, fkThighCtrlGrp)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint(fkThighJoint, fkThighCtrl)[0]
cmds.delete(constraint)
fkThighOrientGrp = cmds.duplicate(fkThighCtrlGrp, parentOnly = True, name = "fk_thigh_" + side + "_orient_grp")
fkThighWorldNode = cmds.duplicate(fkThighOrientGrp, parentOnly = True, name = "fk_thigh_" + side + "_world")
cmds.orientConstraint(fkThighWorldNode, fkThighOrientGrp)
cmds.parent(fkThighWorldNode, "body_anim")
cmds.parent(fkThighCtrl, fkThighCtrlGrp)
cmds.parent(fkThighCtrlGrp, fkThighOrientGrp)
#get the distance between the hip and knee
thighPos = cmds.xform("driver_thigh_" + side, q = True, ws = True, t = True)
kneePos = cmds.xform("driver_calf_" + side, q = True, ws = True, t = True)
dist = (thighPos[2] - kneePos[2]) / 2
#move the ctrl to the position of dist
upAxis = self.getUpAxis(fkThighCtrl)
if side == "l":
cmds.setAttr(fkThighCtrl + ".translate" + upAxis, dist * -1)
else:
cmds.setAttr(fkThighCtrl + ".translate" + upAxis, dist)
#get the pivot of the thigh and set the pivot of the ctrl to that position
piv = cmds.xform(fkThighJoint, q = True, ws = True, rotatePivot = True)
cmds.xform(fkThighCtrl, ws = True, piv = (piv[0], piv[1], piv[2]))
#lock attrs that should not be animated
cmds.setAttr(fkThighCtrl + ".tx", lock = True, keyable = False)
cmds.setAttr(fkThighCtrl + ".ty", lock = True, keyable = False)
cmds.setAttr(fkThighCtrl + ".tz", lock = True, keyable = False)
cmds.setAttr(fkThighCtrl + ".sx", lock = True, keyable = False)
cmds.setAttr(fkThighCtrl + ".sy", lock = True, keyable = False)
cmds.setAttr(fkThighCtrl + ".sz", lock = True, keyable = False)
cmds.setAttr(fkThighCtrl + ".v", lock = True, keyable = False)
#CALF
fkCalfCtrl = self.createControl("semiCircle", 5, "fk_calf_" + side + "_anim")
cmds.makeIdentity(fkCalfCtrl, s = 1, apply = True)
cmds.setAttr(fkCalfCtrl + ".sx", .5)
cmds.setAttr(fkCalfCtrl + ".sy", .75)
cmds.setAttr(fkCalfCtrl + ".rx", 180)
cmds.setAttr(fkCalfCtrl + ".ry", -90)
cmds.makeIdentity(fkCalfCtrl, s = 1, apply = True)
fkCalfCtrlGrp = cmds.group(empty = True, name = "fk_calf_" + side + "_anim_grp")
constraint = cmds.parentConstraint(fkCalfJoint, fkCalfCtrlGrp)[0]
cmds.delete(constraint)
constraint = cmds.pointConstraint(fkCalfJoint, fkCalfCtrl)[0]
cmds.delete(constraint)
cmds.parent(fkCalfCtrl, fkCalfCtrlGrp)
#get the pivot of the calf and set the pivot of the ctrl to that position
piv = cmds.xform(fkCalfJoint, q = True, ws = True, rotatePivot = True)
cmds.xform(fkCalfCtrl, ws = True, piv = (piv[0], piv[1], piv[2]))
#parent the fk ctrl grp to the thigh anim
cmds.parent(fkCalfCtrlGrp, fkThighCtrl)
cmds.makeIdentity(fkCalfCtrl, r = 1, apply = True)
#lock attrs that should not be animated
cmds.setAttr(fkCalfCtrl + ".tx", lock = True, keyable = False)
cmds.setAttr(fkCalfCtrl + ".ty", lock = True, keyable = False)
cmds.setAttr(fkCalfCtrl + ".tz", lock = True, keyable = False)
cmds.setAttr(fkCalfCtrl + ".sx", lock = True, keyable = False)
cmds.setAttr(fkCalfCtrl + ".sy", lock = True, keyable = False)
cmds.setAttr(fkCalfCtrl + ".sz", lock = True, keyable = False)
cmds.setAttr(fkCalfCtrl + ".v", lock = True, keyable = False)
#FOOT
fkFootCtrl = self.createControl("circle", 17, "fk_foot_" + side + "_anim")
cmds.setAttr(fkFootCtrl + ".ry", -90)
cmds.makeIdentity(fkFootCtrl, r = 1, apply =True)
fkFootCtrlGrp = cmds.group(empty = True, name = "fk_foot_" + side + "_anim_grp")
constraint = cmds.parentConstraint(fkFootJoint, fkFootCtrlGrp)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint(fkFootJoint, fkFootCtrl)[0]
cmds.delete(constraint)
cmds.parent(fkFootCtrl, fkFootCtrlGrp)
#get the pivot of the thigh and set the pivot of the ctrl to that position
piv = cmds.xform(fkFootJoint, q = True, ws = True, rotatePivot = True)
cmds.xform(fkFootCtrl, ws = True, piv = (piv[0], piv[1], piv[2]))
#parent the fk ctrl grp to the thigh anim
cmds.parent(fkFootCtrlGrp, fkCalfCtrl)
#lock attrs that should not be animated
cmds.setAttr(fkFootCtrl + ".tx", lock = True, keyable = False)
cmds.setAttr(fkFootCtrl + ".ty", lock = True, keyable = False)
cmds.setAttr(fkFootCtrl + ".tz", lock = True, keyable = False)
cmds.setAttr(fkFootCtrl + ".sx", lock = True, keyable = False)
cmds.setAttr(fkFootCtrl + ".sy", lock = True, keyable = False)
cmds.setAttr(fkFootCtrl + ".sz", lock = True, keyable = False)
cmds.setAttr(fkFootCtrl + ".v", lock = True, keyable = False)
if ball:
#BALL
fkBallCtrl = self.createControl("arrowOnBall", 2, "fk_ball_" + side + "_anim")
if side == "l":
cmds.setAttr(fkBallCtrl + ".rx", -90)
cmds.makeIdentity(fkBallCtrl, t = 1, r = 1, s = 1, apply = True)
else:
cmds.setAttr(fkBallCtrl + ".rx", 90)
cmds.makeIdentity(fkBallCtrl, t = 1, r = 1, s = 1, apply = True)
fkBallCtrlGrp = cmds.group(empty = True, name = "fk_ball_" + side + "_anim_grp")
constraint = cmds.parentConstraint(fkBallJoint, fkBallCtrlGrp)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint(fkBallJoint, fkBallCtrl)[0]
cmds.delete(constraint)
cmds.parent(fkBallCtrl, fkBallCtrlGrp)
#get the pivot of the thigh and set the pivot of the ctrl to that position
piv = cmds.xform(fkBallJoint, q = True, ws = True, rotatePivot = True)
cmds.xform(fkBallCtrl, ws = True, piv = (piv[0], piv[1], piv[2]))
#parent the fk ctrl grp to the thigh anim
cmds.parent(fkBallCtrlGrp, fkFootCtrl)
#lock attrs that should not be animated
cmds.setAttr(fkBallCtrl + ".tx", lock = True, keyable = False)
cmds.setAttr(fkBallCtrl + ".ty", lock = True, keyable = False)
cmds.setAttr(fkBallCtrl + ".tz", lock = True, keyable = False)
cmds.setAttr(fkBallCtrl + ".sx", lock = True, keyable = False)
cmds.setAttr(fkBallCtrl + ".sy", lock = True, keyable = False)
cmds.setAttr(fkBallCtrl + ".sz", lock = True, keyable = False)
cmds.setAttr(fkBallCtrl + ".v", lock = True, keyable = False)
#hook up leg joints to follow ctrls
cmds.orientConstraint(fkThighCtrl, fkThighJoint)
cmds.orientConstraint(fkCalfCtrl, fkCalfJoint)
cmds.orientConstraint(fkFootCtrl, fkFootJoint)
if ball:
cmds.orientConstraint(fkBallCtrl, fkBallJoint)
#color the controls
if side == "l":
color = 6
else:
color = 13
cmds.setAttr(fkThighCtrl + ".overrideEnabled", 1)
cmds.setAttr(fkThighCtrl + ".overrideColor", color)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildIKLegs(self):
#need to create the leg joints for each side based on the driver thigh, calf, and foot
for side in ["l", "r"]:
#create joints
ikThighJoint = cmds.duplicate("driver_thigh_" + side, name = "ik_leg_thigh_" + side, parentOnly = True)[0]
ikCalfJoint = cmds.duplicate("driver_calf_" + side, name = "ik_leg_calf_" + side, parentOnly = True)[0]
ikFootJoint = cmds.duplicate("driver_foot_" + side, name = "ik_leg_foot_" + side, parentOnly = True)[0]
cmds.setAttr(ikThighJoint + ".v", 0)
for joint in [ikThighJoint, ikCalfJoint, ikFootJoint]:
cmds.parent(joint, world = True)
cmds.parent(ikFootJoint, ikCalfJoint)
cmds.parent(ikCalfJoint, ikThighJoint)
cmds.makeIdentity(ikThighJoint, t = 0, r = 1, s = 0, apply = True)
#create the 2 joint chain for the no flip setup
cmds.select(clear = True)
beginJoint = cmds.joint(name = "noflip_begin_joint_" + side)
cmds.select(clear = True)
endJoint = cmds.joint(name = "noflip_end_joint_" + side)
cmds.select(clear = True)
cmds.setAttr(beginJoint + ".v", 0)
beginPos = cmds.xform(ikThighJoint, q = True, ws = True, t = True)
cmds.xform(beginJoint, ws = True, t = (beginPos[0], 0, beginPos[2]))
endPos = cmds.xform(ikFootJoint, q = True, ws = True, t = True)
cmds.xform(endJoint, ws = True, relative = True, t = (endPos[0], 0, endPos[2]))
cmds.parent(endJoint, beginJoint)
cmds.makeIdentity(beginJoint, t = 0, r = 1, s = 0, apply = True)
#set preferred angle
cmds.setAttr(beginJoint + ".preferredAngleX", -90)
#apply a RP IK solver to the 2 bone chain
ikNodes = cmds.ikHandle(name = "noflip_chain_ikHandle_" + side, solver = "ikRPsolver", sj = beginJoint, ee = endJoint)
for node in ikNodes:
cmds.setAttr(node + ".v", 0)
#create a locator(target loc) and group it
targetLoc = cmds.spaceLocator(name = "noflip_target_loc_" + side)[0]
targetGrp = cmds.group(empty = True, name = "noflip_target_loc_grp_" + side)
cmds.setAttr(targetLoc + ".v", 0)
constraint = cmds.pointConstraint(beginJoint, targetGrp)[0]
cmds.delete(constraint)
cmds.parent(targetLoc, targetGrp)
constraint = cmds.pointConstraint(endJoint, targetLoc)
cmds.delete(constraint)
cmds.parent(ikNodes[0], targetLoc)
#create the foot control
footCtrl = self.createControl("foot", 1, ("ik_foot_anim_" + side))
footCtrlGrp = cmds.group(empty = True, name = "ik_foot_anim_grp_" + side)
constraint = cmds.pointConstraint(ikFootJoint, footCtrlGrp)[0]
cmds.delete(constraint)
#position the foot control
footCtrlPos = cmds.xform("ball_mover_" + side + "_grp", q = True, ws = True, t = True)
cmds.xform(footCtrl, ws = True, t = (footCtrlPos[0], 0, 0))
constraint = cmds.pointConstraint("ball_mover_" + side + "_grp", footCtrl)[0]
cmds.delete(constraint)
cmds.makeIdentity(footCtrl, t=1, r=1, s=1, apply = True)
if side == "r":
cmds.setAttr(footCtrl + ".sx", -1)
cmds.makeIdentity(footCtrl, t=1, r=1, s=1, apply = True)
cmds.xform(footCtrl, ws = True, piv = [endPos[0], endPos[1], endPos[2]])
footCtrlSpaceSwitcherFollow = cmds.duplicate(footCtrlGrp, po = True, name = "ik_foot_anim_" + side + "_space_switcher_follow")[0]
footCtrlSpaceSwitcher = cmds.duplicate(footCtrlGrp, po = True, name = "ik_foot_anim_" + side + "_space_switcher")[0]
cmds.parent(footCtrlSpaceSwitcher, footCtrlSpaceSwitcherFollow)
cmds.parent(footCtrlGrp, footCtrlSpaceSwitcher)
cmds.parent(footCtrl, footCtrlGrp)
cmds.makeIdentity(footCtrl, t=1, r=1, s=1, apply = True)
#create the noflip pole vector loc
scale = self.getScaleFactor()
noflipVectorLoc = cmds.spaceLocator(name = "noflip_pv_loc_" + side)[0]
noflipVectorGrp = cmds.group(name = "noflip_pv_loc_grp_" + side, empty = True)
constraint = cmds.pointConstraint([beginJoint, endJoint], noflipVectorLoc)[0]
cmds.delete(constraint)
constraint = cmds.pointConstraint(targetLoc, noflipVectorGrp)[0]
cmds.delete(constraint)
cmds.setAttr(noflipVectorLoc + ".v", 0)
noflipVectorLocPos = cmds.xform(footCtrl + "_end_loc", q = True, ws = True, t = True)
if side == "l":
cmds.setAttr(noflipVectorLoc + ".ty", noflipVectorLocPos[1])
else:
cmds.setAttr(noflipVectorLoc + ".ty", noflipVectorLocPos[1] * -1)
cmds.makeIdentity(noflipVectorLoc, t = 1, r = 1, s = 1, apply = True)
cmds.parent(noflipVectorLoc, noflipVectorGrp)
if side == "l":
cmds.setAttr(noflipVectorLoc + ".ty", (200 * scale))
else:
cmds.setAttr(noflipVectorLoc + ".ty", (-200 * scale))
cmds.makeIdentity(noflipVectorLoc, t = 1, r = 1, s = 1, apply = True)
cmds.parentConstraint(endJoint, noflipVectorGrp)
#duplicate the targetGrp to create our aim vector locator
aimGrp = cmds.duplicate(targetGrp, name = "noflip_aim_grp_" + side, parentOnly = True)[0]
aimSoftGrp = cmds.duplicate(targetGrp, name = "noflip_aim_soft_grp_" + side, parentOnly = True)[0]
aimLoc = cmds.duplicate(targetLoc, name = "noflip_aim_loc_" + side, parentOnly = True)[0]
cmds.parent(aimSoftGrp, aimGrp)
cmds.parent(aimLoc, aimSoftGrp)
cmds.setAttr(aimGrp + ".v", 0)
if side == "r":
cmds.setAttr(aimGrp + ".ry", 90)
else:
cmds.setAttr(aimGrp + ".ry", -90)
#connectAttrs of targetLoc and aimLoc
cmds.connectAttr(targetLoc + ".tx", aimLoc + ".tx")
cmds.connectAttr(targetLoc + ".tz", aimLoc + ".tz")
#pole vector constraint between aimLoc and ikNodes[0] (2bone chain ik handle)
cmds.poleVectorConstraint(aimLoc, ikNodes[0])
if side == "l":
cmds.setAttr(ikNodes[0] + ".twist", 180)
twistAmt = cmds.getAttr(beginJoint + ".rz")
cmds.setAttr(ikNodes[0] + ".twist", twistAmt * -1)
#create RP IK on the actual IK leg chain
#set preferred angle first
cmds.setAttr(ikThighJoint + ".preferredAngleZ", 90)
cmds.setAttr(ikCalfJoint + ".preferredAngleZ", 90)
ikNodesLeg = cmds.ikHandle(name = "foot_ikHandle_" + side, solver = "ikRPsolver", sj = ikThighJoint, ee = ikFootJoint)
footIK = ikNodesLeg[0]
cmds.setAttr(footIK + ".v", 0)
cmds.parent(footIK, targetLoc)
#create pole vector constraint between knee loc and full ik leg rp ik handle
cmds.poleVectorConstraint(noflipVectorLoc, footIK)
#set limits on the aimLoc in Z space
minTz = cmds.getAttr(aimLoc + ".tz")
maxTz = cmds.xform(aimGrp, q = True, ws = True, t = True)[0]
if side == "l":
maxTz = maxTz * -1
cmds.transformLimits(aimLoc, etz = (True, True), tz = (minTz, maxTz))
#create the twist control
kneeCtrl = self.createControl("arrow", 2, ("ik_knee_anim_" + side))
constraint = cmds.pointConstraint(ikCalfJoint, kneeCtrl)[0]
cmds.delete(constraint)
kneeCtrlGrp = cmds.group(name = "ik_knee_anim_grp_" + side, empty = True)
constraint = cmds.parentConstraint(ikCalfJoint, kneeCtrlGrp)[0]
cmds.delete(constraint)
cmds.parent(kneeCtrl, kneeCtrlGrp)
cmds.makeIdentity(kneeCtrl, t = 1, r = 1, s = 1, apply = True)
upAxis = self.getUpAxis(kneeCtrl)
cmds.pointConstraint(ikCalfJoint, kneeCtrlGrp, mo = True)
cmds.setAttr(kneeCtrl + ".overrideEnabled", 1)
cmds.setAttr(kneeCtrl + ".overrideDisplayType", 2)
#Create foot rig
#create joints for ball and toe in IK leg skeleton
cmds.select(clear = True)
ikBallJoint = cmds.joint(name = "ik_leg_ball_" + side)
cmds.select(clear = True)
ikToeJoint = cmds.joint(name = "ik_leg_toe_" + side)
cmds.select(clear = True)
#position joints
constraint = cmds.parentConstraint("ball_" + side + "_lra", ikBallJoint)[0]
cmds.delete(constraint)
constraint = cmds.pointConstraint("jointmover_toe_" + side + "_end", ikToeJoint)[0]
cmds.delete(constraint)
constraint = cmds.orientConstraint(ikBallJoint, ikToeJoint)[0]
cmds.delete(constraint)
#parent joints into IK leg hierarchy
cmds.parent(ikToeJoint, ikBallJoint)
cmds.parent(ikBallJoint, ikFootJoint)
cmds.makeIdentity(ikBallJoint, r = 1, apply = True)
#create SC IK for ankle to ball and ball to toe
ballIKNodes = cmds.ikHandle(name = "ikHandle_ball_" + side, solver = "ikSCsolver", sj = ikFootJoint, ee = ikBallJoint)
toeIKNodes = cmds.ikHandle(name = "ikHandle_toe_" + side, solver = "ikSCsolver", sj = ikBallJoint, ee = ikToeJoint)
cmds.setAttr(ballIKNodes[0] + ".v", 0)
cmds.setAttr(toeIKNodes[0] + ".v", 0)
#create the locators we need for all of the pivot points
toeTipPivot = cmds.spaceLocator(name = "ik_foot_toe_tip_pivot_" + side)[0]
insidePivot = cmds.spaceLocator(name = "ik_foot_inside_pivot_" + side)[0]
outsidePivot = cmds.spaceLocator(name = "ik_foot_outside_pivot_" + side)[0]
heelPivot = cmds.spaceLocator(name = "ik_foot_heel_pivot_" + side)[0]
toePivot = cmds.spaceLocator(name = "ik_foot_toe_pivot_" + side)[0]
ballPivot = cmds.spaceLocator(name = "ik_foot_ball_pivot_" + side)[0]
masterBallPivot = cmds.spaceLocator(name = "master_foot_ball_pivot_" + side)[0]
#create the controls
heelControl = self.createControl("arrowOnBall", 1.5, "heel_ctrl_" + side)
toeWiggleControl = self.createControl("arrowOnBall", 2, "toe_wiggle_ctrl_" + side)
toeControl = self.createControl("arrowOnBall", 1.5, "toe_tip_ctrl_" + side)
if side == "l":
cmds.setAttr(toeControl + ".rx", -90)
cmds.setAttr(toeControl + ".rz", -90)
cmds.makeIdentity(toeControl, t = 1, r = 1, s = 1, apply = True)
else:
cmds.setAttr(toeControl + ".rx", 90)
cmds.setAttr(toeControl + ".rz", -90)
cmds.makeIdentity(toeControl, t = 1, r = 1, s = 1, apply = True)
if side == "l":
cmds.setAttr(toeWiggleControl + ".rx", -90)
cmds.makeIdentity(toeWiggleControl, t = 1, r = 1, s = 1, apply = True)
else:
cmds.setAttr(toeWiggleControl + ".rx", 90)
cmds.makeIdentity(toeWiggleControl, t = 1, r = 1, s = 1, apply = True)
cmds.setAttr(heelControl + ".rx", -90)
cmds.makeIdentity(heelControl, t = 1, r = 1, s = 1, apply = True)
#position and orient controls
constraint = cmds.parentConstraint("jointmover_" + side + "_heel_loc", heelControl)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint("ball_" + side + "_lra", toeWiggleControl)[0]
cmds.delete(constraint)
constraint = cmds.pointConstraint("jointmover_toe_" + side + "_end", toeControl)[0]
cmds.delete(constraint)
constraint = cmds.orientConstraint(toeWiggleControl, toeControl)[0]
cmds.delete(constraint)
#position the pivots
constraint = cmds.pointConstraint(heelControl, heelPivot)[0]
cmds.delete(constraint)
constraint = cmds.orientConstraint(heelControl, heelPivot)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint(toeWiggleControl, ballPivot)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint(toeControl, toeTipPivot)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint(toeControl, toePivot)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint("inside_pivot_" + side + "_mover", insidePivot)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint("outside_pivot_" + side + "_mover", outsidePivot)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint(ballPivot, masterBallPivot)[0]
cmds.delete(constraint)
#create groups for each pivot and parent the pivot to the corresponding group
for piv in [heelPivot, ballPivot, toeTipPivot, toePivot, insidePivot, outsidePivot, masterBallPivot]:
pivGrp = cmds.group(empty = True, name = piv + "_grp")
constraint = cmds.parentConstraint(piv, pivGrp)[0]
cmds.delete(constraint)
cmds.parent(piv, pivGrp)
shape = cmds.listRelatives(piv, shapes = True)[0]
cmds.setAttr(shape + ".v", 0)
#create groups for each control and parent the control to the corresponding group
for ctrl in [heelControl, toeWiggleControl, toeControl]:
grp = cmds.group(empty = True, name = ctrl + "_grp")
constraint = cmds.parentConstraint(ctrl, grp)[0]
cmds.delete(constraint)
cmds.parent(ctrl, grp)
if side == "r":
if ctrl == heelControl:
cmds.setAttr(grp + ".rx", (cmds.getAttr(grp + ".rx")) *1)
cmds.setAttr(grp + ".ry", (cmds.getAttr(grp + ".ry")) *1)
#setup pivot hierarchy
cmds.parent(toeWiggleControl + "_grp", toePivot)
cmds.parent(ballPivot + "_grp", toePivot)
cmds.parent(toePivot + "_grp", heelPivot)
cmds.parent(heelPivot + "_grp", outsidePivot)
cmds.parent(outsidePivot + "_grp", insidePivot)
cmds.parent(insidePivot + "_grp", toeTipPivot)
#setup foot roll
cmds.setAttr(heelControl + ".rz", 0)
cmds.setAttr(heelPivot + ".rz", 0)
cmds.setAttr(toePivot + ".rz", 0)
cmds.setAttr(ballPivot + ".rz", 0)
cmds.setDrivenKeyframe([heelPivot + ".rz", toePivot + ".rz", ballPivot + ".rz"], cd = heelControl + ".rz", itt = "linear", ott = "linear")
if side == "l":
cmds.setAttr(heelControl + ".rz", -90)
cmds.setAttr(heelPivot + ".rz", 0)
cmds.setAttr(toePivot + ".rz", 0)
cmds.setAttr(ballPivot + ".rz", -90)
cmds.setDrivenKeyframe([heelPivot + ".rz", toePivot + ".rz", ballPivot + ".rz"], cd = heelControl + ".rz", itt = "linear", ott = "linear")
cmds.setAttr(heelControl + ".rz", 90)
cmds.setAttr(heelPivot + ".rz", 90)
cmds.setAttr(toePivot + ".rz", 0)
cmds.setAttr(ballPivot + ".rz", 0)
cmds.setDrivenKeyframe([heelPivot + ".rz", toePivot + ".rz", ballPivot + ".rz"], cd = heelControl + ".rz", itt = "linear", ott = "linear")
cmds.setAttr(heelControl + ".rz", 0)
cmds.setAttr(heelPivot + ".rz", 0)
cmds.setAttr(toePivot + ".rz", 0)
cmds.setAttr(ballPivot + ".rz", 0)
if side == "r":
cmds.setAttr(heelControl + ".rz", -90)
cmds.setAttr(heelPivot + ".rz", 0)
cmds.setAttr(toePivot + ".rz", 0)
cmds.setAttr(ballPivot + ".rz", -90)
cmds.setDrivenKeyframe([heelPivot + ".rz", toePivot + ".rz", ballPivot + ".rz"], cd = heelControl + ".rz", itt = "linear", ott = "linear")
cmds.setAttr(heelControl + ".rz", 90)
cmds.setAttr(heelPivot + ".rz", 90)
cmds.setAttr(toePivot + ".rz", 0)
cmds.setAttr(ballPivot + ".rz", 0)
cmds.setDrivenKeyframe([heelPivot + ".rz", toePivot + ".rz", ballPivot + ".rz"], cd = heelControl + ".rz", itt = "linear", ott = "linear")
cmds.setAttr(heelControl + ".rz", 0)
cmds.setAttr(heelPivot + ".rz", 0)
cmds.setAttr(toePivot + ".rz", 0)
cmds.setAttr(ballPivot + ".rz", 0)
#setup heel rotate X and Y
if side == "l":
cmds.connectAttr(heelControl + ".rx", ballPivot + ".ry")
if side == "r":
heelControlRXMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = heelControl + "_RX_MultNode")
cmds.connectAttr(heelControl + ".rx", heelControlRXMultNode + ".input1X")
cmds.setAttr(heelControlRXMultNode + ".input2X", -1)
cmds.connectAttr(heelControlRXMultNode + ".outputX", ballPivot + ".ry")
if side == "l":
heelControlRYMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = heelControl + "_RY_MultNode")
cmds.connectAttr(heelControl + ".ry", heelControlRYMultNode + ".input1X")
cmds.setAttr(heelControlRYMultNode + ".input2X", -1)
cmds.connectAttr(heelControlRYMultNode + ".outputX", ballPivot + ".rx")
else:
cmds.connectAttr(heelControl + ".ry", ballPivot + ".rx")
#setup toe control Y and Z rotates
cmds.connectAttr(toeControl + ".ry", toeTipPivot + ".ry")
cmds.connectAttr(toeControl + ".rz", toeTipPivot + ".rz")
#setup the toe control RX (side to side)
if side == "l":
cmds.setAttr(toeControl + ".rx", 0)
cmds.setAttr(insidePivot + ".rx", 0)
cmds.setAttr(outsidePivot + ".rx", 0)
cmds.setDrivenKeyframe([insidePivot + ".rx", outsidePivot + ".rx"], cd = toeControl + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(toeControl + ".rx", -90)
cmds.setAttr(insidePivot + ".rx", 0)
cmds.setAttr(outsidePivot + ".rx", -90)
cmds.setDrivenKeyframe([insidePivot + ".rx", outsidePivot + ".rx"], cd = toeControl + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(toeControl + ".rx", 90)
cmds.setAttr(insidePivot + ".rx", 90)
cmds.setAttr(outsidePivot + ".rx", 0)
cmds.setDrivenKeyframe([insidePivot + ".rx", outsidePivot + ".rx"], cd = toeControl + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(toeControl + ".rx", 0)
cmds.setAttr(insidePivot + ".rx", 0)
cmds.setAttr(outsidePivot + ".rx", 0)
if side == "r":
cmds.setAttr(toeControl + ".rx", 0)
cmds.setAttr(insidePivot + ".rx", 0)
cmds.setAttr(outsidePivot + ".rx", 0)
cmds.setDrivenKeyframe([insidePivot + ".rx", outsidePivot + ".rx"], cd = toeControl + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(toeControl + ".rx", -90)
cmds.setAttr(insidePivot + ".rx", 0)
cmds.setAttr(outsidePivot + ".rx", 90)
cmds.setDrivenKeyframe([insidePivot + ".rx", outsidePivot + ".rx"], cd = toeControl + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(toeControl + ".rx", 90)
cmds.setAttr(insidePivot + ".rx", -90)
cmds.setAttr(outsidePivot + ".rx", 0)
cmds.setDrivenKeyframe([insidePivot + ".rx", outsidePivot + ".rx"], cd = toeControl + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(toeControl + ".rx", 0)
cmds.setAttr(insidePivot + ".rx", 0)
cmds.setAttr(outsidePivot + ".rx", 0)
#parent the IK nodes into the foot rig setup
cmds.parent(footIK, ballPivot)
cmds.parent(ballIKNodes[0], ballPivot)
cmds.parent(toeIKNodes[0], toeWiggleControl)
cmds.pointConstraint(footCtrl, targetLoc, mo = True)
cmds.parent([toeTipPivot + "_grp", heelControl + "_grp", toeControl + "_grp"], masterBallPivot)
cmds.parent(masterBallPivot + "_grp", footCtrl)
#add the heel pivot and ball pivot attrs to the foot control
cmds.select(heelControl)
cmds.addAttr(longName= ( "heelPivot" ), defaultValue=0, keyable = True)
cmds.addAttr(longName= ( "ballPivot" ), defaultValue=0, keyable = True)
#setup heel and ball pivot
if side == "r":
heelPivotMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = heelPivot + "_MultNode")
cmds.connectAttr(heelControl + ".heelPivot", heelPivotMultNode + ".input1X")
cmds.setAttr(heelPivotMultNode + ".input2X", -1)
cmds.connectAttr(heelPivotMultNode + ".outputX", heelPivot + ".rx")
else:
cmds.connectAttr(heelControl + ".heelPivot", heelPivot + ".rx")
cmds.connectAttr(heelControl + ".ballPivot", masterBallPivot + ".ry")
#clean up the hierarchy
ctrlGrp = cmds.group(name = "leg_ctrl_grp_" + side, empty = True)
cmds.parent([ikThighJoint, targetGrp, aimGrp, noflipVectorGrp], ctrlGrp)
legGroup = cmds.group(name = "leg_group_" + side, empty = True)
constraint = cmds.pointConstraint("driver_pelvis", legGroup)[0]
cmds.delete(constraint)
cmds.parent([footCtrlSpaceSwitcherFollow, beginJoint, ctrlGrp], legGroup)
cmds.orientConstraint("body_anim_grp", ctrlGrp, mo = True)
cmds.pointConstraint("body_anim", ctrlGrp, mo = True)
#constrain aimGrp
cmds.pointConstraint("body_anim", aimGrp, mo = True)
cmds.orientConstraint("offset_anim", aimGrp, mo = True)
#cmds.parentConstraint("driver_pelvis", beginJoint, mo = True)
ikGrp = cmds.group(name = "ik_leg_grp_" + side, empty = True)
cmds.parent(ikGrp, legGroup)
cmds.parent([kneeCtrlGrp, footCtrlSpaceSwitcherFollow], ikGrp)
#color the controls
if side == "l":
color = 6
else:
color = 13
for control in [footCtrl]:
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", color)
#connect ik twist attr to ik leg twist
cmds.select(footCtrl)
cmds.addAttr(longName=("knee_twist"), at = 'double', keyable = True)
if side == "r":
cmds.connectAttr(footCtrl + ".knee_twist", footIK + ".twist")
else:
twistMultNode = cmds.shadingNode("multiplyDivide", name = "ik_leg_" + side + "_twistMultNode", asUtility = True)
cmds.connectAttr(footCtrl + ".knee_twist", twistMultNode + ".input1X")
cmds.setAttr(twistMultNode + ".input2X", -1)
cmds.connectAttr(twistMultNode + ".outputX", footIK + ".twist")
#add stretchy IK to legs
cmds.select(footCtrl)
cmds.addAttr(longName=("stretch"), at = 'double',min = 0, max = 1, dv = 0, keyable = True)
cmds.addAttr(longName=("squash"), at = 'double',min = 0, max = 1, dv = 0, keyable = True)
cmds.addAttr(longName=("toeCtrlVis"), at = 'bool', dv = 0, keyable = True)
stretchMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "ikLeg_stretchToggleMultNode_" + side)
#need to get the total length of the leg chain
totalDist = abs(cmds.getAttr(ikCalfJoint + ".tx" ) + cmds.getAttr(ikFootJoint + ".tx"))
#create a distanceBetween node
distBetween = cmds.shadingNode("distanceBetween", asUtility = True, name = side + "_ik_leg_distBetween")
#get world positions of upper arm and ik
baseGrp = cmds.group(empty = True, name = "ik_leg_base_grp_" + side)
endGrp = cmds.group(empty = True, name = "ik_leg_end_grp_" + side)
cmds.pointConstraint(ikThighJoint, baseGrp)
cmds.pointConstraint(footCtrl, endGrp)
#hook in group translates into distanceBetween node inputs
cmds.connectAttr(baseGrp + ".translate", distBetween + ".point1")
cmds.connectAttr(endGrp + ".translate", distBetween + ".point2")
#create a condition node that will compare original length to current length
#if second term is greater than, or equal to the first term, the chain needs to stretch
ikLegCondition = cmds.shadingNode("condition", asUtility = True, name = side + "_ik_leg_stretch_condition")
cmds.setAttr(ikLegCondition + ".operation", 3)
cmds.connectAttr(distBetween + ".distance", ikLegCondition + ".secondTerm")
cmds.setAttr(ikLegCondition + ".firstTerm", totalDist)
#hook up the condition node's return colors
cmds.setAttr(ikLegCondition + ".colorIfTrueR", totalDist)
cmds.connectAttr(distBetween + ".distance", ikLegCondition + ".colorIfFalseR")
#create the mult/divide node(set to divide) that will take the original creation length as a static value in input2x, and the connected length into 1x.
legDistMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "leg_dist_multNode_" + side)
cmds.setAttr(legDistMultNode + ".operation", 2) #divide
cmds.setAttr(legDistMultNode + ".input2X", totalDist)
cmds.connectAttr(ikLegCondition + ".outColorR", legDistMultNode + ".input1X")
#create a stretch toggle mult node that multiplies the stretch factor by the bool of the stretch attr. (0 or 1), this way our condition reads
#if this result is greater than the original length(impossible if stretch bool is off, since result will be 0), than take this result and plug it
#into the scale of our IK arm joints
stretchToggleCondition = cmds.shadingNode("condition", asUtility = True, name = "leg_stretch_toggle_condition_" + side)
cmds.setAttr(stretchToggleCondition + ".operation", 0)
cmds.connectAttr(footCtrl + ".stretch", stretchToggleCondition + ".firstTerm")
cmds.setAttr(stretchToggleCondition + ".secondTerm", 1)
cmds.connectAttr(legDistMultNode + ".outputX", stretchToggleCondition + ".colorIfTrueR")
cmds.setAttr(stretchToggleCondition + ".colorIfFalseR", 1)
#set up the squash nodes
squashMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = side + "_ik_leg_squash_mult")
cmds.setAttr(squashMultNode + ".operation", 2)
cmds.setAttr(squashMultNode + ".input1X", totalDist)
cmds.connectAttr(ikLegCondition + ".outColorR", squashMultNode + ".input2X")
#create a stretch toggle mult node that multiplies the stretch factor by the bool of the stretch attr. (0 or 1), this way our condition reads
#if this result is greater than the original length(impossible if stretch bool is off, since result will be 0), than take this result and plug it
#into the scale of our IK arm joints
squashToggleCondition = cmds.shadingNode("condition", asUtility = True, name = "leg_squash_toggle_condition_" + side)
cmds.setAttr(squashToggleCondition + ".operation", 0)
cmds.connectAttr(footCtrl + ".squash", squashToggleCondition + ".firstTerm")
cmds.setAttr(squashToggleCondition + ".secondTerm", 1)
cmds.connectAttr(squashMultNode + ".outputX", squashToggleCondition + ".colorIfTrueR")
cmds.setAttr(squashToggleCondition + ".colorIfFalseR", 1)
#connect to arm scale
cmds.connectAttr(stretchToggleCondition + ".outColorR", ikThighJoint + ".sx")
cmds.connectAttr(stretchToggleCondition + ".outColorR", ikCalfJoint + ".sx")
cmds.connectAttr(squashToggleCondition + ".outColorR", ikCalfJoint + ".sy")
cmds.connectAttr(squashToggleCondition + ".outColorR", ikCalfJoint + ".sz")
cmds.connectAttr(squashToggleCondition + ".outColorR", ikThighJoint + ".sy")
cmds.connectAttr(squashToggleCondition + ".outColorR", ikThighJoint + ".sz")
#add base and end groups to arm grp
cmds.parent([baseGrp, endGrp], ctrlGrp)
#lock attrs on control that shouldn't be animated
for control in [toeControl, heelControl, toeWiggleControl]:
cmds.setAttr(control + ".tx", lock = True, keyable = False)
cmds.setAttr(control + ".ty", lock = True, keyable = False)
cmds.setAttr(control + ".tz", lock = True, keyable = False)
cmds.setAttr(control + ".sx", lock = True, keyable = False)
cmds.setAttr(control + ".sy", lock = True, keyable = False)
cmds.setAttr(control + ".sz", lock = True, keyable = False)
cmds.setAttr(control + ".v", lock = True, keyable = False)
#lock attrs on foot control that should not be animated
cmds.setAttr(footCtrl + ".sx", lock = True, keyable = False)
cmds.setAttr(footCtrl + ".sy", lock = True, keyable = False)
cmds.setAttr(footCtrl + ".sz", lock = True, keyable = False)
cmds.setAttr(footCtrl + ".v", lock = True, keyable = False)
#lock attrs on knee control that should not be animated
cmds.connectAttr(footCtrl + ".knee_twist", kneeCtrl + ".rx")
cmds.setAttr(kneeCtrl + ".rx", lock = False, keyable = False)
cmds.setAttr(kneeCtrl + ".ry", lock = True, keyable = False)
cmds.setAttr(kneeCtrl + ".rz", lock = True, keyable = False)
cmds.setAttr(kneeCtrl + ".tx", lock = True, keyable = False)
cmds.setAttr(kneeCtrl + ".ty", lock = True, keyable = False)
cmds.setAttr(kneeCtrl + ".tz", lock = True, keyable = False)
cmds.setAttr(kneeCtrl + ".sx", lock = True, keyable = False)
cmds.setAttr(kneeCtrl + ".sy", lock = True, keyable = False)
cmds.setAttr(kneeCtrl + ".sz", lock = True, keyable = False)
cmds.setAttr(kneeCtrl + ".v", lock = True, keyable = False)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildFingers(self):
#find out which finger joints need to be rigged
for side in ["l", "r"]:
#create a list to hold all ctrl groups that are created
ctrlGroups = []
ikGrps = []
joints = []
fkOrients = []
metaJoints = []
ikJoints = []
children = cmds.listRelatives("driver_hand_" + side, children = True, type = 'joint')
allChildren = cmds.listRelatives("driver_hand_" + side, allDescendents = True, type = 'joint')
#find out how many finger joints we have for each finger
thumbMeta = [False, None]
indexMeta = [False, None]
middleMeta = [False, None]
ringMeta = [False, None]
pinkyMeta = [False, None]
numThumbJoints = [0, "thumb"]
numIndexJoints = [0, "index"]
numMiddleJoints = [0, "middle"]
numRingJoints = [0, "ring"]
numPinkyJoints = [0, "pinky"]
if allChildren:
for finger in allChildren:
#find if metatarsals exist
if finger.find("meta") != -1:
if finger.partition("driver_")[2].find("index") == 0:
indexMeta = [True, "index"]
if finger.partition("driver_")[2].find("middle") == 0:
middleMeta = [True, "middle"]
if finger.partition("driver_")[2].find("ring") == 0:
ringMeta = [True, "ring"]
if finger.partition("driver_")[2].find("pinky") == 0:
pinkyMeta = [True, "pinky"]
#get num fingers -meta
if finger.partition("driver_")[2].find("thumb") == 0:
numThumbJoints[0] += 1
if finger.partition("driver_")[2].find("index") == 0:
numIndexJoints[0] += 1
if finger.partition("driver_")[2].find("middle") == 0:
numMiddleJoints[0] += 1
if finger.partition("driver_")[2].find("ring") == 0:
numRingJoints[0] += 1
if finger.partition("driver_")[2].find("pinky") == 0:
numPinkyJoints[0] += 1
#subtract metacarpals (only if they exist!)
if indexMeta[0] == True:
numIndexJoints[0] -= 1
if middleMeta[0] == True:
numMiddleJoints[0] -= 1
if ringMeta[0] == True:
numRingJoints[0] -= 1
if pinkyMeta[0] == True:
numPinkyJoints[0] -= 1
#duplicate the driver joints to be used as the rig joints
if children:
for child in children:
for mode in ["fk", "ik"]:
dupeChildNodes = cmds.duplicate(child, name = "temp")
#parent root joint of each finger to world if not already child of world
parent = cmds.listRelatives(dupeChildNodes[0], parent = True)[0]
if parent != None:
cmds.parent(dupeChildNodes[0], world = True)
#rename duped joints
for node in dupeChildNodes:
if node == "temp":
niceName = child.partition("driver_")[2]
joint = cmds.rename(node, "rig_" + mode + "_" + niceName)
if mode == "ik":
ikJoints.append(joint)
else:
joints.append(joint)
else:
niceName = node.partition("driver_")[2]
cmds.rename("rig_*|" + node, "rig_" + mode + "_" + niceName)
#if the metacarpal fingers exist, create a control for them
for meta in [indexMeta, middleMeta, ringMeta, pinkyMeta]:
if meta[0] == True:
#create the control object for the metacarpal
ctrlName = meta[1]
ctrlName = ctrlName + "_metacarpal_ctrl_" + side
control = self.createControl("square", 1, ctrlName)
constraint = cmds.parentConstraint("rig_fk_" + meta[1] + "_metacarpal_" + side, control)[0]
cmds.delete(constraint)
cmds.setAttr(control + ".sx", 0)
cmds.setAttr(control + ".sy", 15)
cmds.setAttr(control + ".sz", 15)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
#create the group node and parent ctrl to it
ctrlGrp = cmds.group(empty = True, name = ctrlName + "_grp")
constraint = cmds.parentConstraint("rig_fk_" + meta[1] + "_metacarpal_" + side, ctrlGrp)[0]
metaJoints.append(ctrlGrp)
cmds.delete(constraint)
cmds.parent(control, ctrlGrp)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
#parent constrain the rig joint to the control
cmds.parentConstraint(control, "rig_fk_" + meta[1] + "_metacarpal_" + side, mo = True)
cmds.parentConstraint(control, "rig_ik_" + meta[1] + "_metacarpal_" + side, mo = True)
#lock attrs on control that shouldn't be animated
cmds.setAttr(control + ".tx", lock = True, keyable = False)
cmds.setAttr(control + ".ty", lock = True, keyable = False)
cmds.setAttr(control + ".tz", lock = True, keyable = False)
cmds.setAttr(control + ".sx", lock = True, keyable = False)
cmds.setAttr(control + ".sy", lock = True, keyable = False)
cmds.setAttr(control + ".sz", lock = True, keyable = False)
cmds.setAttr(control + ".v", lock = True, keyable = False)
#color the controls
if side == "l":
color = 6
else:
color = 13
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", color)
#Create the FK orient joints
#first create a group for the IK handles to go into. Then setup the constraints on this group and set driven keys
ikHandlesGrp = cmds.group(empty = True, name = "fkOrient_ikHandles_" + side + "_grp")
constraint = cmds.parentConstraint("ik_wrist_" + side + "_anim", "fk_wrist_" + side + "_anim", ikHandlesGrp, mo = True)[0]
cmds.setAttr("Rig_Settings" + "." + side + "ArmMode", 0)
cmds.setAttr(constraint + ".ik_wrist_" + side + "_anim" + "W0", 0)
cmds.setAttr(constraint + ".fk_wrist_" + side + "_anim" + "W1", 1)
cmds.setDrivenKeyframe([constraint + ".ik_wrist_" + side + "_anim" + "W0", constraint + ".fk_wrist_" + side + "_anim" + "W1"], cd = "Rig_Settings" + "." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setAttr("Rig_Settings" + "." + side + "ArmMode", 1)
cmds.setAttr(constraint + ".ik_wrist_" + side + "_anim" + "W0", 1)
cmds.setAttr(constraint + ".fk_wrist_" + side + "_anim" + "W1", 0)
cmds.setDrivenKeyframe([constraint + ".ik_wrist_" + side + "_anim" + "W0", constraint + ".fk_wrist_" + side + "_anim" + "W1"], cd = "Rig_Settings" + "." + side + "ArmMode", itt = "linear", ott = "linear")
for fingers in [numIndexJoints, numMiddleJoints, numRingJoints, numPinkyJoints, numThumbJoints]:
if fingers[0] > 0:
#setup metaCtrl name
if fingers[1] == "thumb":
metaCtrl = fingers[1] + "_01_" + side
else:
metaCtrl = fingers[1] + "_metacarpal_ctrl_" + side
#create the base and end joint
baseJoint = cmds.duplicate("rig_fk_" + fingers[1] + "_01_" + side, po = True, name = "rig_fkOrient_" + fingers[1] + "_01_" + side)[0]
endJoint = cmds.duplicate("rig_fk_" + fingers[1] + "_0" + str(fingers[0]) + "_" + side, po = True, name = "rig_fkOrient_" + fingers[1] + "_0" + str(fingers[0]) + "_" + side)[0]
#position the end joint
scaleFactor = self.getScaleFactor()
if side == "l":
cmds.parent(endJoint, "rig_fk_" + fingers[1] + "_0" + str(fingers[0]) + "_" + side)
cmds.setAttr(endJoint + ".tx", 5 * scaleFactor)
else:
cmds.parent(endJoint, "rig_fk_" + fingers[1] + "_0" + str(fingers[0]) + "_" + side)
cmds.setAttr(endJoint + ".tx", -5 * scaleFactor)
#parent the end joint to the base joint
cmds.parent(endJoint, baseJoint)
#create SC ik handles for each chain
ikNodes = cmds.ikHandle(sol = "ikSCsolver", name = baseJoint + "_ikHandle", sj = baseJoint, ee = endJoint)[0]
cmds.parent(ikNodes, ikHandlesGrp)
cmds.setAttr(ikNodes + ".v", 0)
#parent our orient joint to the metacarpal if it exists
if cmds.objExists(metaCtrl):
if fingers[1] == "thumb":
fkOrients.append(baseJoint)
else:
cmds.parent(baseJoint, metaCtrl)
else:
fkOrients.append(baseJoint)
#Create FK controls for the fingers
fkControls = []
for fingers in [numIndexJoints, numMiddleJoints, numRingJoints, numPinkyJoints, numThumbJoints]:
for i in range(int(fingers[0])):
#create an FK control per finger
ctrlName = fingers[1] + "_finger_fk_ctrl_" + str(i + 1) + "_" + side
control = self.createControl("circle", 3, ctrlName)
ctrlGrp = cmds.group(empty = True, name = control + "_grp")
metaCtrl = fingers[1] + "_metacarpal_ctrl_" + side
if cmds.objExists(metaCtrl) == False:
if (i + 1) == 1:
ctrlGroups.append(ctrlGrp)
#add the created control to the controls list
fkControls.append(control)
#position control
constraint = cmds.parentConstraint("rig_fk_" + fingers[1] + "_0" + str(i+1) + "_" + side, control)[0]
grpConstraint = cmds.parentConstraint("rig_fk_" + fingers[1] + "_0" + str(i+1) + "_" + side, ctrlGrp)[0]
cmds.delete([constraint, grpConstraint])
cmds.makeIdentity(control, t = 0, r = 1, s = 0, apply = True)
#duplicate the ctrl group to create the driven group
drivenGrp = cmds.duplicate(ctrlGrp, parentOnly = True, name = control + "_driven_grp")[0]
ctrlGroups.append(drivenGrp)
cmds.parent(drivenGrp, ctrlGrp)
#parent control to grp
cmds.parent(control, drivenGrp)
cmds.makeIdentity(control, t = 0, r = 1, s = 0, apply = True)
cmds.setAttr(control + ".ry", -90)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
#constrain finger joint to control
cmds.parentConstraint(control, "rig_fk_" + fingers[1] + "_0" + str(i+1) + "_" + side, mo = True)
#if we aren't the root of the finger chain, then parent our ctrlGrp to the previous fk control
if i != 0:
cmds.parent(ctrlGrp, ctrlParent)
else:
#if the control grp is the root of the finger chain, need to parent the ctrl grp to the metaCtrl
if cmds.objExists(metaCtrl):
cmds.parent(ctrlGrp, metaCtrl)
#setup set driven keys for the orientation options
cmds.select(control)
cmds.addAttr(longName= ( "sticky" ), defaultValue=0, minValue=0, maxValue=1, keyable = True)
#setup the constraint between the fk finger orient joint and the ctrlGrp
constraint = cmds.parentConstraint("rig_fkOrient_" + fingers[1] + "_01_" + side, ctrlGrp, mo = True)[0]
#set driven keyframes on constraint
cmds.setAttr(control + ".sticky", 1)
cmds.setAttr(constraint + "." + "rig_fkOrient_" + fingers[1] + "_01_" + side + "W0", 1)
cmds.setDrivenKeyframe(constraint + "." + "rig_fkOrient_" + fingers[1] + "_01_" + side + "W0", cd = control + ".sticky", itt = "linear", ott = "linear")
cmds.setAttr(control + ".sticky", 0)
cmds.setAttr(constraint + "." + "rig_fkOrient_" + fingers[1] + "_01_" + side + "W0", 0)
cmds.setDrivenKeyframe(constraint + "." + "rig_fkOrient_" + fingers[1] + "_01_" + side + "W0", cd = control + ".sticky", itt = "linear", ott = "linear")
if fingers[1] == "thumb":
cmds.setAttr(control + ".sticky", 1)
else:
cmds.setAttr(control + ".sticky", 0)
ctrlGroups.append(ctrlGrp)
#if the meta carpal does not exist, simply parent the root group under the base joint
else:
ctrlGroups.append(ctrlGrp)
constraint = cmds.parentConstraint("rig_fkOrient_" + fingers[1] + "_01_" + side, ctrlGrp, mo = True)[0]
#setup set driven keys for the orientation options
cmds.select(control)
cmds.addAttr(longName= ( "sticky" ), defaultValue=0, minValue=0, maxValue=1, keyable = True)
#set driven keyframes on constraint
cmds.setAttr(control + ".sticky", 1)
cmds.setAttr(constraint + "." + "rig_fkOrient_" + fingers[1] + "_01_" + side + "W0", 1)
cmds.setDrivenKeyframe(constraint + "." + "rig_fkOrient_" + fingers[1] + "_01_" + side + "W0", cd = control + ".sticky", itt = "linear", ott = "linear")
cmds.setAttr(control + ".sticky", 0)
cmds.setAttr(constraint + "." + "rig_fkOrient_" + fingers[1] + "_01_" + side + "W0", 0)
cmds.setDrivenKeyframe(constraint + "." + "rig_fkOrient_" + fingers[1] + "_01_" + side + "W0", cd = control + ".sticky", itt = "linear", ott = "linear")
#set the control parent for the next ctrl in the chain to the current control
ctrlParent = control
#lock attrs on control that shouldn't be animated
cmds.setAttr(control + ".sx", lock = True, keyable = False)
cmds.setAttr(control + ".sy", lock = True, keyable = False)
cmds.setAttr(control + ".sz", lock = True, keyable = False)
cmds.setAttr(control + ".v", lock = True, keyable = False)
#color the controls
if side == "l":
color = 6
else:
color = 13
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", color)
#setup the hand roll feature
#create our 4 locators(pivots) and position
pinkyPiv = cmds.spaceLocator(name = "hand_" + side + "_pinky_pivot")[0]
thumbPiv = cmds.spaceLocator(name = "hand_" + side + "_thumb_pivot")[0]
midPiv = cmds.spaceLocator(name = "hand_" + side + "_mid_pivot")[0]
tipPiv = cmds.spaceLocator(name = "hand_" + side + "_tip_pivot")[0]
for piv in [pinkyPiv, thumbPiv, midPiv, tipPiv]:
cmds.setAttr(piv + ".v", 0)
constraint = cmds.parentConstraint(side + "_hand_pinky_pivot", pinkyPiv)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint(side + "_hand_thumb_pivot", thumbPiv)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint(side + "_hand_mid_pivot", midPiv)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint(side + "_hand_tip_pivot", tipPiv)[0]
cmds.delete(constraint)
#create the control groups for the pivots so our values are zeroed
for each in [pinkyPiv, thumbPiv, midPiv, tipPiv]:
group = cmds.group(empty = True, name = each + "_grp")
constraint = cmds.parentConstraint(each, group)[0]
cmds.delete(constraint)
cmds.parent(each, group)
#setup hierarchy
cmds.parent(thumbPiv + "_grp", pinkyPiv)
cmds.parent(tipPiv + "_grp", thumbPiv)
cmds.parent(midPiv + "_grp", tipPiv)
#parent the arm IK handles under the midPiv locator
cmds.parent(["arm_ikHandle_" + side, "invis_arm_ikHandle_" + side], midPiv)
cmds.parent(pinkyPiv + "_grp", "ik_wrist_" + side + "_anim")
#add attrs to the IK hand control (side, roll, tip pivot)
cmds.select("ik_wrist_" + side + "_anim")
cmds.addAttr(longName= ( "side" ), defaultValue=0, keyable = True)
cmds.addAttr(longName= ( "mid_bend" ), defaultValue=0, keyable = True)
cmds.addAttr(longName= ( "mid_swivel" ), defaultValue=0, keyable = True)
cmds.addAttr(longName= ( "tip_pivot" ), defaultValue=0, keyable = True)
cmds.addAttr(longName= ( "tip_swivel" ), defaultValue=0, keyable = True)
#hook up attrs to pivot locators
cmds.connectAttr("ik_wrist_" + side + "_anim.mid_bend", midPiv + ".rz")
cmds.connectAttr("ik_wrist_" + side + "_anim.tip_pivot", tipPiv + ".rz")
cmds.connectAttr("ik_wrist_" + side + "_anim.mid_swivel", midPiv + ".ry")
cmds.connectAttr("ik_wrist_" + side + "_anim.tip_swivel", tipPiv + ".ry")
#set driven keys for the side to side attr
if side == "l":
thumbVal = 180
pinkyVal = -180
else:
thumbVal = 180
pinkyVal = -180
cmds.setAttr("ik_wrist_" + side + "_anim.side", 0)
cmds.setAttr(pinkyPiv + ".rx", 0)
cmds.setAttr(thumbPiv + ".rx", 0)
cmds.setDrivenKeyframe([pinkyPiv + ".rx", thumbPiv + ".rx"], cd = "ik_wrist_" + side + "_anim.side", itt = "linear", ott = "linear")
cmds.setAttr("ik_wrist_" + side + "_anim.side", 180)
cmds.setAttr(pinkyPiv + ".rx", pinkyVal)
cmds.setAttr(thumbPiv + ".rx", 0)
cmds.setDrivenKeyframe([pinkyPiv + ".rx", thumbPiv + ".rx"], cd = "ik_wrist_" + side + "_anim.side", itt = "linear", ott = "linear")
cmds.setAttr("ik_wrist_" + side + "_anim.side", -180)
cmds.setAttr(pinkyPiv + ".rx", 0)
cmds.setAttr(thumbPiv + ".rx", thumbVal)
cmds.setDrivenKeyframe([pinkyPiv + ".rx", thumbPiv + ".rx"], cd = "ik_wrist_" + side + "_anim.side", itt = "linear", ott = "linear")
cmds.setAttr("ik_wrist_" + side + "_anim.side", 0)
#If there are enough finger joints on each finger, create IK rig
ikCtrls = []
poleVectorLocs = []
modeGrps = []
for fingers in [numIndexJoints, numMiddleJoints, numRingJoints, numPinkyJoints, numThumbJoints]:
if fingers[0] == 3:
#set preferred angles on joints so IK will create properly
cmds.setAttr("rig_ik_" + fingers[1] + "_01_" + side + ".preferredAngleZ", 45)
cmds.setAttr("rig_ik_" + fingers[1] + "_02_" + side + ".preferredAngleZ", 45)
cmds.setAttr("rig_ik_" + fingers[1] + "_03_" + side + ".preferredAngleZ", 45)
#create a tip joint
tipJoint = cmds.duplicate("rig_ik_" + fingers[1] + "_03_" + side, po = True, name = "rig_ik_" + fingers[1] + "_tip_" + side)[0]
cmds.parent(tipJoint, "rig_ik_" + fingers[1] + "_03_" + side)
#position tip joint
if side == "l":
cmds.setAttr(tipJoint + ".tx", 5 * scaleFactor)
else:
cmds.setAttr(tipJoint + ".tx", -5 * scaleFactor)
#create the IK handle
ikNodes = cmds.ikHandle(sol = "ikRPsolver", name = fingers[1] + "_" + side + "_ikHandle", sj = "rig_ik_" + fingers[1] + "_01_" + side, ee = "rig_ik_" + fingers[1] + "_03_" + side)[0]
ikTipNodes = cmds.ikHandle(sol = "ikSCsolver", name = fingers[1] + "_" + side + "_end_ikHandle", sj = "rig_ik_" + fingers[1] + "_03_" + side, ee = tipJoint)[0]
cmds.setAttr(ikNodes + ".v", 0)
cmds.parent(ikTipNodes, ikNodes)
#create a pole vector locator and position it
poleVector = cmds.spaceLocator(name = fingers[1] + "_" + side + "_poleVector")[0]
constraint = cmds.parentConstraint("rig_ik_" + fingers[1] + "_02_" + side, poleVector)[0]
cmds.delete(constraint)
#color the control
if side == "l":
color = 6
else:
color = 13
cmds.setAttr(poleVector + ".overrideEnabled", 1)
cmds.setAttr(poleVector + ".overrideColor", color)
#create a pole vector group
pvGrp = cmds.group(empty = True, name = poleVector + "_grp")
constraint = cmds.parentConstraint(poleVector, pvGrp)[0]
cmds.delete(constraint)
#parent to the joint, and move out away from finger
cmds.parent(poleVector, "rig_ik_" + fingers[1] + "_02_" + side)
if side == "l":
cmds.setAttr(poleVector + ".ty", -20 * scaleFactor)
else:
cmds.setAttr(poleVector + ".ty", 20 * scaleFactor)
cmds.makeIdentity(poleVector, t =1, r =1, s = 1, apply = True)
cmds.parent(poleVector, pvGrp, absolute = True)
cmds.makeIdentity(poleVector, t =1, r =1, s = 1, apply = True)
#lock pole vector attrs
for attr in [".sx", ".sy", ".sz", ".v"]:
if attr == ".v":
cmds.setAttr(poleVector + attr, keyable = False)
else:
cmds.setAttr(poleVector + attr, lock = True, keyable = False)
#create the IK finger controls
ctrlName = fingers[1] + "_" + side + "_ik_anim"
control = self.createControl("circle", 3, ctrlName)
ctrlGrp = cmds.group(empty = True, name = control + "_grp")
ikCtrls.append(control)
#position control
constraint = cmds.parentConstraint(tipJoint, control)[0]
grpConstraint = cmds.parentConstraint(tipJoint, ctrlGrp)[0]
cmds.delete([constraint, grpConstraint])
cmds.makeIdentity(control, t = 0, r = 1, s = 0, apply = True)
#parent control to grp
cmds.parent(control, ctrlGrp)
cmds.makeIdentity(control, t = 0, r = 1, s = 0, apply = True)
cmds.setAttr(control + ".ry", -90)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
cmds.parent(ikNodes, control)
#setup the pole vector constraint and add the locator to the poleVectorLocs list
cmds.poleVectorConstraint(poleVector, ikNodes)
poleVectorLocs.append(pvGrp)
#add attr to show pole vector control
cmds.select(control)
cmds.addAttr(longName= ( "poleVectorVis" ), defaultValue=0, minValue=0, maxValue=1, keyable = True)
cmds.connectAttr(control + ".poleVectorVis", poleVector + ".v")
#create a tip locator with finger mode attrs
fingerModeCtrl = cmds.spaceLocator(name = fingers[1] + "_finger_" + side + "_mode_anim")[0]
fingerModeCtrlGrp = cmds.group(empty = True, name = fingers[1] + "_finger_" + side + "_mode_grp")
modeGrps.append(fingerModeCtrlGrp)
cmds.setAttr(fingerModeCtrl + ".v", 0)
constraint = cmds.parentConstraint(tipJoint, fingerModeCtrl)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint(tipJoint, fingerModeCtrlGrp)[0]
cmds.delete(constraint)
cmds.parent(fingerModeCtrl, fingerModeCtrlGrp)
#lock attrs
for attr in [".tx", ".ty", ".tz", ".rx", ".ry", ".rz", ".sx", ".sy", ".sz", ".v"]:
cmds.setAttr(fingerModeCtrl + attr, lock = True, keyable = False)
for attr in [".sx", ".sy", ".sz", ".v"]:
cmds.setAttr(control + attr, lock = True, keyable = False)
#scale up the fingerModeCtrl
shape = cmds.listRelatives(fingerModeCtrl, shapes = True)[0]
cmds.setAttr(shape + ".localScaleX", 3 * scaleFactor)
cmds.setAttr(shape + ".localScaleY", 3 * scaleFactor)
cmds.setAttr(shape + ".localScaleZ", 3 * scaleFactor)
#constrain the fingerModeCtrlGrp to the driver base knuckle
if cmds.objExists("driver_" + fingers[1] + "_03_" + side):
cmds.parentConstraint("driver_" + fingers[1] + "_03_" + side, fingerModeCtrlGrp, mo = True)
else:
if cmds.objExists("driver_" + fingers[1] + "_02_" + side):
cmds.parentConstraint("driver_" + fingers[1] + "_02_" + side, fingerModeCtrlGrp, mo = True)
else:
if cmds.objExists("driver_" + fingers[1] + "_01_" + side):
cmds.parentConstraint("driver_" + fingers[1] + "_01_" + side, fingerModeCtrlGrp, mo = True)
#color the control
if side == "l":
color = 6
else:
color = 13
cmds.setAttr(fingerModeCtrl + ".overrideEnabled", 1)
cmds.setAttr(fingerModeCtrl + ".overrideColor", color)
#add attr for finger mode(fk/ik) on both IK and FK control
cmds.select(fingerModeCtrl)
cmds.addAttr(longName= "FK_IK", defaultValue = 0, minValue = 0, maxValue = 1, keyable = True)
#take all of the pole vector groups and add them to a master pv group
masterPvGrp = cmds.group(empty = True, name = "fingers_" + side + "_poleVectors_grp")
for pv in poleVectorLocs:
cmds.parent(pv, masterPvGrp, absolute = True)
#create a global IK control if there are any IK fingers
if ikCtrls:
ctrlName = side + "_global_ik_anim"
control = self.createControl("square", 20, ctrlName)
ctrlGrp = cmds.group(empty = True, name = control + "_grp")
#position control
constraint = cmds.pointConstraint(midPiv, control)[0]
grpConstraint = cmds.pointConstraint(midPiv, ctrlGrp)[0]
cmds.delete([constraint, grpConstraint])
#parent control to grp
cmds.parent(control, ctrlGrp)
#cmds.setAttr(control + ".rx", -90)
#freeze rots
cmds.makeIdentity(control, t = 0, r = 1, s = 0, apply = True)
#translate down in y 13
cmds.setAttr(control + ".tz", -13)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
cmds.parent(control, world = True)
constraint = cmds.pointConstraint(control, ctrlGrp)[0]
cmds.delete(constraint)
cmds.parent(control, ctrlGrp)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
#create a space switcher grp
spaceSwitcherFollow = cmds.duplicate(ctrlGrp, po = True, name = ctrlName + "_space_switcher_follow")[0]
spaceSwitcher = cmds.duplicate(ctrlGrp, po = True, name = ctrlName + "_space_switcher")[0]
cmds.parent(spaceSwitcher, spaceSwitcherFollow)
cmds.parent(ctrlGrp, spaceSwitcher)
cmds.parent(spaceSwitcherFollow, "ik_wrist_" + side + "_anim")
#parent ik control grps to this global control
for ctrl in ikCtrls:
parent = cmds.listRelatives(ctrl, parent = True)[0]
cmds.parent(parent, control)
#parent constrain the master pv group to the global control
cmds.parentConstraint(control, masterPvGrp, mo = True)
#lock attrs
for attr in [".sx", ".sy", ".sz", ".v"]:
cmds.setAttr(control + attr, lock = True, keyable = False)
#clean up hand rig hierarchy
jointsGrp = cmds.group(empty = True, name = "hand_fk_joints_grp_" + side)
handDrivenGrp = cmds.group(empty = True, name = "hand_driven_grp_" + side)
handDrivenGrpMaster = cmds.group(empty = True, name = "hand_driven_grp_master_" + side)
fkCtrlGrp = cmds.group(empty = True, name = "fk_finger_controls_" + side + "_grp")
constraint = cmds.parentConstraint("ik_wrist_" + side + "_anim", handDrivenGrpMaster)
cmds.delete(constraint)
#create fk hand match node
fkHandMatchNode = cmds.duplicate(handDrivenGrpMaster, po = True, name = "hand_match_loc_" + side)[0]
cmds.parent(fkHandMatchNode, handDrivenGrpMaster)
#find aim axis of arm chain to determine offset value
vector1 = cmds.xform("driver_lowerarm_" + side, q = True, ws = True, t = True)
vector2 = cmds.xform("driver_hand_" + side, q = True, ws = True, t = True)
aimAxis = self.normalizeSubVector(vector1, vector2)
axis = None
offset = 0
for item in [ ["X", ".rx"], ["-X", ".rx"], ["Y", ".ry"], ["-Y", ".ry"], ["Z", ".rz"], ["-Z", ".rz"]]:
if aimAxis == item[0]:
axis = item[1]
if item[0].find("-") == 0:
offset = 90
else:
offset = -90
cmds.setAttr(fkHandMatchNode + axis, offset)
cmds.parent(handDrivenGrp, handDrivenGrpMaster)
for joint in joints:
cmds.parent(joint, jointsGrp)
for control in ctrlGroups:
parent = cmds.listRelatives(control, parent = True)
if parent == None:
cmds.parent(control, fkCtrlGrp)
for control in metaJoints:
cmds.parent(control, handDrivenGrp)
for control in fkOrients:
cmds.parent(control, handDrivenGrp)
for joint in ikJoints:
cmds.parent(joint, handDrivenGrp)
#constrain the master grp to the fk and ik hand joints
fingerSysGrp = cmds.group(empty = True, name = "finger_sys_grp_" + side)
cmds.parent(fkCtrlGrp, handDrivenGrp)
cmds.parent([jointsGrp, ikHandlesGrp, masterPvGrp, handDrivenGrpMaster], fingerSysGrp)
if len(modeGrps) > 0:
cmds.parent(modeGrps, fingerSysGrp)
constraint = cmds.parentConstraint(["fk_wrist_" + side + "_anim", "ik_hand_" + side], handDrivenGrpMaster, mo = True)[0]
cmds.setAttr("Rig_Settings" + "." + side + "ArmMode", 0)
cmds.setAttr(constraint + ".fk_wrist_" + side + "_animW0", 1)
cmds.setAttr(constraint + ".ik_hand_" + side + "W1", 0)
cmds.setDrivenKeyframe([constraint + ".fk_wrist_" + side + "_animW0", constraint + ".ik_hand_" + side + "W1"], cd = "Rig_Settings" + "." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setAttr("Rig_Settings" + "." + side + "ArmMode", 1)
cmds.setAttr(constraint + ".fk_wrist_" + side + "_animW0", 0)
cmds.setAttr(constraint + ".ik_hand_" + side + "W1", 1)
cmds.setDrivenKeyframe([constraint + ".fk_wrist_" + side + "_animW0", constraint + ".ik_hand_" + side + "W1"], cd = "Rig_Settings" + "." + side + "ArmMode", itt = "linear", ott = "linear")
#Constrain the driver joints to the fk and ik joints
for fingers in [numIndexJoints, numMiddleJoints, numRingJoints, numPinkyJoints, numThumbJoints]:
for i in range(int(fingers[0])):
driverJoint = "driver_" + fingers[1] + "_0" + str(i + 1) + "_" + side
fkJoint = "rig_fk_" + fingers[1] + "_0" + str(i + 1) + "_" + side
ikJoint = "rig_ik_" + fingers[1] + "_0" + str(i + 1) + "_" + side
#set driven keys on constraint
if cmds.objExists(fingers[1] + "_" + side + "_ik_anim"):
constraint = cmds.parentConstraint([fkJoint, ikJoint], driverJoint)[0]
ikCtrl = fingers[1] + "_finger_" + side + "_mode_anim"
#set driven keyframes on constraint
cmds.setAttr(ikCtrl + "." + "FK_IK", 0)
cmds.setAttr(constraint + "." + fkJoint + "W0", 1)
cmds.setAttr(constraint + "." + ikJoint + "W1", 0)
cmds.setDrivenKeyframe([constraint + "." + fkJoint + "W0", constraint + "." + ikJoint + "W1"], cd = ikCtrl + "." + "FK_IK", itt = "linear", ott = "linear")
cmds.setAttr(ikCtrl + "." + "FK_IK", 1)
cmds.setAttr(constraint + "." + fkJoint + "W0", 0)
cmds.setAttr(constraint + "." + ikJoint + "W1", 1)
cmds.setDrivenKeyframe([constraint + "." + fkJoint + "W0", constraint + "." + ikJoint + "W1"], cd = ikCtrl + "." + "FK_IK", itt = "linear", ott = "linear")
cmds.setAttr(ikCtrl + "." + "FK_IK", 0)
#setup driven keys for fk/ik control visibility
ikCtrl = fingers[1] + "_finger_" + side + "_mode_anim"
cmds.setAttr(ikCtrl + "." + "FK_IK", 0)
cmds.setAttr(fingers[1] + "_finger_fk_ctrl_1_" + side + "_grp.v" , 1)
cmds.setAttr(fingers[1] + "_" + side + "_ik_anim_grp.v", 0)
cmds.setDrivenKeyframe([fingers[1] + "_finger_fk_ctrl_1_" + side + "_grp.v", fingers[1] + "_" + side + "_ik_anim_grp.v"], cd = ikCtrl + "." + "FK_IK", itt = "linear", ott = "linear")
cmds.setAttr(ikCtrl + "." + "FK_IK", 1)
cmds.setAttr(fingers[1] + "_finger_fk_ctrl_1_" + side + "_grp.v" , 0)
cmds.setAttr(fingers[1] + "_" + side + "_ik_anim_grp.v", 1)
cmds.setDrivenKeyframe([fingers[1] + "_finger_fk_ctrl_1_" + side + "_grp.v", fingers[1] + "_" + side + "_ik_anim_grp.v"], cd = ikCtrl + "." + "FK_IK", itt = "linear", ott = "linear")
cmds.setAttr(ikCtrl + "." + "FK_IK", 0)
else:
constraint = cmds.parentConstraint([fkJoint], driverJoint)[0]
#constrain the driver metacarpals(if they exist) to the ik and fk ones
for metacarpal in [thumbMeta, indexMeta, middleMeta, ringMeta, pinkyMeta]:
if metacarpal[0] == True:
driverJoint = "driver_" + metacarpal[1] + "_metacarpal_" + side
fkJoint = "rig_fk_" + metacarpal[1] + "_metacarpal_" + side
ikJoint = "rig_ik_" + metacarpal[1] + "_metacarpal_" + side
if cmds.objExists(metacarpal[1] + "_" + side + "_ik_anim"):
constraint = cmds.parentConstraint([fkJoint, ikJoint], driverJoint)[0]
ikCtrl = metacarpal[1] + "_finger_" + side + "_mode_anim"
constraint = cmds.parentConstraint([fkJoint, ikJoint], driverJoint)[0]
#set driven keyframes on constraint
cmds.setAttr(ikCtrl + "." + "FK_IK", 0)
cmds.setAttr(constraint + "." + fkJoint + "W0", 1)
cmds.setAttr(constraint + "." + ikJoint + "W1", 0)
cmds.setDrivenKeyframe([constraint + "." + fkJoint + "W0", constraint + "." + ikJoint + "W1"], cd = ikCtrl + "." + "FK_IK", itt = "linear", ott = "linear")
cmds.setAttr(ikCtrl + "." + "FK_IK", 1)
cmds.setAttr(constraint + "." + fkJoint + "W0", 0)
cmds.setAttr(constraint + "." + ikJoint + "W1", 1)
cmds.setDrivenKeyframe([constraint + "." + fkJoint + "W0", constraint + "." + ikJoint + "W1"], cd = ikCtrl + "." + "FK_IK", itt = "linear", ott = "linear")
cmds.setAttr(ikCtrl + "." + "FK_IK", 0)
else:
constraint = cmds.parentConstraint([fkJoint], driverJoint)[0]
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildToes(self):
#find out which toe joints need to be rigged
for side in ["l", "r"]:
#create a list to hold all ctrl groups that are created
ctrlGroups = []
ikGrps = []
joints = []
if cmds.objExists("driver_ball_" + side):
children = cmds.listRelatives("driver_ball_" + side, children = True, type = 'joint')
allToes = cmds.listRelatives("driver_ball_" + side, allDescendents = True, type = 'joint')
#find out how many toe joints we have for each toe
bigToeMeta = [False, None]
indexMeta = [False, None]
middleMeta = [False, None]
ringMeta = [False, None]
pinkyMeta = [False, None]
numBigToes = 0
numIndexToes = [0, "index"]
numMiddleToes = [0, "middle"]
numRingToes = [0, "ring"]
numPinkyToes = [0, "pinky"]
if allToes:
for toe in allToes:
#find if metatarsals exist
if toe.find("meta") != -1:
if toe.partition("driver_")[2].find("bigtoe") == 0:
bigToeMeta = [True, "bigtoe"]
if toe.partition("driver_")[2].find("index") == 0:
indexMeta = [True, "index"]
if toe.partition("driver_")[2].find("middle") == 0:
middleMeta = [True, "middle"]
if toe.partition("driver_")[2].find("ring") == 0:
ringMeta = [True, "ring"]
if toe.partition("driver_")[2].find("pinky") == 0:
pinkyMeta = [True, "pinky"]
#get num toes -meta
if toe.partition("driver_")[2].find("bigtoe") == 0:
numBigToes += 1
if toe.partition("driver_")[2].find("index") == 0:
numIndexToes[0] += 1
if toe.partition("driver_")[2].find("middle") == 0:
numMiddleToes[0] += 1
if toe.partition("driver_")[2].find("ring") == 0:
numRingToes[0] += 1
if toe.partition("driver_")[2].find("pinky") == 0:
numPinkyToes[0] += 1
#subtract metatarsals (only if they exist!)
if bigToeMeta[0] == True:
numBigToes -= 1
if indexMeta[0] == True:
numIndexToes[0] -= 1
if middleMeta[0] == True:
numMiddleToes[0] -= 1
if ringMeta[0] == True:
numRingToes[0] -= 1
if pinkyMeta[0] == True:
numPinkyToes[0] -= 1
#duplicate the driver joints to be used as the rig joints
if children:
for child in children:
dupeChildNodes = cmds.duplicate(child, name = "temp")
#parent root joint of each toe to world if not already child of world
parent = cmds.listRelatives(dupeChildNodes[0], parent = True)[0]
if parent != None:
cmds.parent(dupeChildNodes[0], world = True)
#rename duped joints
for node in dupeChildNodes:
if node == "temp":
niceName = child.partition("driver_")[2]
joint = cmds.rename(node, "rig_" + niceName)
joints.append(joint)
else:
niceName = node.partition("driver_")[2]
cmds.rename("rig_*|" + node, "rig_" + niceName)
#if the metacarpal toes exist, create a control for them
for meta in [bigToeMeta, indexMeta, middleMeta, ringMeta, pinkyMeta]:
if meta[0] == True:
#create the control object for the metacarpal
ctrlName = meta[1]
ctrlName = ctrlName + "_metatarsal_ctrl_" + side
control = self.createControl("square", 1, ctrlName)
constraint = cmds.parentConstraint("rig_" + meta[1] + "_metatarsal_" + side, control)[0]
cmds.delete(constraint)
cmds.makeIdentity(control, t = 0, r = 1, s = 0, apply = True)
cmds.setAttr(control + ".rz", -90)
cmds.setAttr(control + ".sz", 15)
#create the group node and parent ctrl to it
ctrlGrp = cmds.group(empty = True, name = ctrlName + "_grp")
ctrlGroups.append(ctrlGrp)
constraint = cmds.parentConstraint("rig_" + meta[1] + "_metatarsal_" + side, ctrlGrp)[0]
cmds.delete(constraint)
cmds.parent(control, ctrlGrp)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
cmds.setAttr(control + ".sz", 0)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
#parent the rig joint to the control
cmds.parentConstraint(control, "rig_" + meta[1] + "_metatarsal_" + side, mo = True)
#lock attrs on control that shouldn't be animated
cmds.setAttr(control + ".tx", lock = True, keyable = False)
cmds.setAttr(control + ".ty", lock = True, keyable = False)
cmds.setAttr(control + ".tz", lock = True, keyable = False)
cmds.setAttr(control + ".sx", lock = True, keyable = False)
cmds.setAttr(control + ".sy", lock = True, keyable = False)
cmds.setAttr(control + ".sz", lock = True, keyable = False)
cmds.setAttr(control + ".v", lock = True, keyable = False)
#color the controls
if side == "l":
color = 5
else:
color = 12
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", color)
#if the number of toes(aside from metacarpals) is 1 or 2, just create fk controls for each toe and setup hierarchy
toeControls = []
for toes in [numIndexToes, numMiddleToes, numRingToes, numPinkyToes]:
if toes[0] < 3:
for i in range(int(toes[0])):
#create an FK control per toe
ctrlName = toes[1] + "_toe_fk_ctrl_" + str(i + 1) + "_" + side
control = self.createControl("circle", 3, ctrlName)
ctrlGrp = cmds.group(empty = True, name = control + "_grp")
metaCtrl = toes[1] + "_metatarsal_ctrl_" + side
if cmds.objExists(metaCtrl) == False:
if (i + 1) == 1:
ctrlGroups.append(ctrlGrp)
toeControls.append(control)
#position control
if i == 0:
constraint = cmds.parentConstraint("rig_" + toes[1] + "_proximal_phalange_" + side, control)[0]
grpConstraint = cmds.parentConstraint("rig_" + toes[1] + "_proximal_phalange_" + side, ctrlGrp)[0]
cmds.delete([constraint, grpConstraint])
cmds.parentConstraint(control, "rig_" + toes[1] + "_proximal_phalange_" + side, mo = True)
if i == 1:
constraint = cmds.parentConstraint("rig_" + toes[1] + "_middle_phalange_" + side, control)[0]
grpConstraint = cmds.parentConstraint("rig_" + toes[1] + "_middle_phalange_" + side, ctrlGrp)[0]
cmds.delete([constraint, grpConstraint])
cmds.parentConstraint(control, "rig_" + toes[1] + "_middle_phalange_" + side, mo = True)
if i == 2:
constraint = cmds.parentConstraint("rig_" + toes[1] + "_distal_phalange_" + side, control)[0]
grpConstraint = cmds.parentConstraint("rig_" + toes[1] + "_distal_phalange_" + side, ctrlGrp)[0]
cmds.delete([constraint, grpConstraint])
cmds.parentConstraint(control, "rig_" + toes[1] + "_distal_phalange_" + side, mo = True)
cmds.makeIdentity(control, t = 0, r = 1, s = 0, apply = True)
cmds.setAttr(control + ".rz", -90)
#duplicate the ctrl group to create the driven group
drivenGrp = cmds.duplicate(ctrlGrp, parentOnly = True, name = control + "_driven_grp")[0]
ctrlGroups.append(drivenGrp)
cmds.parent(drivenGrp, ctrlGrp)
#parent control to grp
cmds.parent(control, drivenGrp)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
#if we aren't the root of the toe chain, then parent our ctrlGrp to the previous fk control
if i != 0:
cmds.parent(ctrlGrp, ctrlParent)
else:
if cmds.objExists(toes[1] + "_metatarsal_ctrl_" + side):
cmds.parent(ctrlGrp, toes[1] + "_metatarsal_ctrl_" + side)
ctrlParent = control
#lock attrs on control that shouldn't be animated
cmds.setAttr(control + ".tx", lock = True, keyable = False)
cmds.setAttr(control + ".ty", lock = True, keyable = False)
cmds.setAttr(control + ".tz", lock = True, keyable = False)
cmds.setAttr(control + ".sx", lock = True, keyable = False)
cmds.setAttr(control + ".sy", lock = True, keyable = False)
cmds.setAttr(control + ".sz", lock = True, keyable = False)
cmds.setAttr(control + ".v", lock = True, keyable = False)
#if the number of toes(aside from metacarpals) is 3, setup a singular rp IK chain, and a SC IK chain for toe 3 and a newly created toe tip joint
else:
#take the end joint and duplicate it
tipJoint = cmds.duplicate("rig_" + toes[1] + "_distal_phalange_" + side, parentOnly = True, name = "rig_" + toes[1] + "_tip_" + side)[0]
cmds.parent(tipJoint, "rig_" + toes[1] + "_distal_phalange_" + side)
#move tip joint out a bit
if side == "r":
cmds.setAttr(tipJoint + ".tx", -5)
else:
cmds.setAttr(tipJoint + ".tx", 5)
#create RP IK handle from base knuckle to distal
toeRpIkNodes = cmds.ikHandle(sol = "ikRPsolver", name = toes[1] + "_RP_ikHandle_" + side, sj = "rig_" + toes[1] + "_proximal_phalange_" + side, ee = "rig_" + toes[1] + "_distal_phalange_" + side)
toeScIkNodes = cmds.ikHandle(sol = "ikSCsolver", name = toes[1] + "_SC_ikHandle_" + side, sj = "rig_" + toes[1] + "_distal_phalange_" + side, ee = tipJoint)
cmds.setAttr(toeRpIkNodes[0] + ".v", 0)
cmds.setAttr(toeScIkNodes[0] + ".v", 0)
#parent SC IK to RP IK
cmds.parent(toeScIkNodes[0], toeRpIkNodes[0])
#create an IK control
control = self.createControl("circle", 3, toes[1] + "_ik_ctrl_" + side)
ctrlGrp = cmds.group(empty = True, name = control + "_grp")
ikGrps.append(ctrlGrp)
toeControls.append(control)
#position control
constraint = cmds.parentConstraint("rig_" + toes[1] + "_distal_phalange_" + side, control)[0]
grpConstraint = cmds.parentConstraint("rig_" + toes[1] + "_distal_phalange_" + side, ctrlGrp)[0]
cmds.delete([constraint, grpConstraint])
#create dummy group so IK controls on both sides behave the same (dummy group will have 180 offset if right side)
dummyGrp = cmds.duplicate(ctrlGrp, parentOnly = True, name = ctrlGrp + "_dummy")[0]
spaceSwitcherFollow = cmds.duplicate(ctrlGrp, parentOnly = True, name = ctrlGrp + "_space_switcher_follow")[0]
spaceSwitcher = cmds.duplicate(ctrlGrp, parentOnly = True, name = ctrlGrp + "_space_switcher")[0]
cmds.parent(spaceSwitcher, spaceSwitcherFollow)
cmds.parent(dummyGrp, spaceSwitcher)
cmds.parent(spaceSwitcherFollow, ctrlGrp)
#parent ctrl to group
if side == "r":
cmds.setAttr(dummyGrp + ".ry", 180)
cmds.parent(control, dummyGrp)
cmds.setAttr(control + ".ry", -90)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
cmds.parent(toeRpIkNodes[0], control)
#lock attrs on control that should not be animated
for attr in [".sx", ".sy", ".sz", ".v"]:
cmds.setAttr(control + attr, lock = True, keyable = False)
#need to do the bigToe separately since it will only have 3 toes max anyway, and bigToesNum == 2 will mean IK setup, and anything less than 2 == FK setup
if numBigToes < 2:
for i in range(int(numBigToes)):
#create an FK control per toe
ctrlName = "bigtoe_toe_fk_ctrl_" + str(i + 1) + "_" + side
control = self.createControl("circle", 8, ctrlName)
ctrlGrp = cmds.group(empty = True, name = control + "_grp")
toeControls.append(control)
metaCtrl = "bigtoe_metatarsal_ctrl_" + side
if cmds.objExists(metaCtrl) == False:
if (i + 1) == 1:
ctrlGroups.append(ctrlGrp)
#position control
if i == 0:
constraint = cmds.parentConstraint("rig_bigtoe_proximal_phalange_" + side, control)[0]
grpConstraint = cmds.parentConstraint("rig_bigtoe_proximal_phalange_" + side, ctrlGrp)[0]
cmds.delete([constraint, grpConstraint])
cmds.parentConstraint(control, "rig_bigtoe_proximal_phalange_" + side, mo = True)
if i == 1:
constraint = cmds.parentConstraint("rig_bigtoe_distal_phalange_" + side, control)[0]
grpConstraint = cmds.parentConstraint("rig_bigtoe_distal_phalange_" + side, ctrlGrp)[0]
cmds.delete([constraint, grpConstraint])
cmds.parentConstraint(control, "rig_bigtoe_distal_phalange_" + side, mo = True)
cmds.makeIdentity(control, t = 0, r = 1, s = 0, apply = True)
cmds.setAttr(control + ".rz", -90)
#duplicate the ctrl group to create the driven group
drivenGrp = cmds.duplicate(ctrlGrp, parentOnly = True, name = control + "_driven_grp")[0]
ctrlGroups.append(drivenGrp)
cmds.parent(drivenGrp, ctrlGrp)
#parent control to grp
cmds.parent(control, drivenGrp)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
#if we aren't the root of the toe chain, then parent our ctrlGrp to the previous fk control
if i != 0:
cmds.parent(ctrlGrp, ctrlParent)
else:
if cmds.objExists("bigtoe_metatarsal_ctrl_" + side):
cmds.parent(ctrlGrp, "bigtoe_metatarsal_ctrl_" + side)
ctrlParent = control
#lock attrs on control that shouldn't be animated
cmds.setAttr(control + ".tx", lock = True, keyable = False)
cmds.setAttr(control + ".ty", lock = True, keyable = False)
cmds.setAttr(control + ".tz", lock = True, keyable = False)
cmds.setAttr(control + ".sx", lock = True, keyable = False)
cmds.setAttr(control + ".sy", lock = True, keyable = False)
cmds.setAttr(control + ".sz", lock = True, keyable = False)
cmds.setAttr(control + ".v", lock = True, keyable = False)
else:
#take the end joint and duplicate it
tipJoint = cmds.duplicate("rig_bigtoe_distal_phalange_" + side, parentOnly = True, name = "rig_bigtoe_tip_" + side)[0]
cmds.parent(tipJoint, "rig_bigtoe_distal_phalange_" + side)
#since the toe has 1 less knuckle, we need another tip
tipJointEnd = cmds.duplicate(tipJoint, parentOnly = True, name = "rig_bigtoe_tip_end_" + side)[0]
cmds.parent(tipJointEnd, tipJoint)
#move tip joint out a bit
if side == "r":
cmds.setAttr(tipJoint + ".tx", -5)
cmds.setAttr(tipJointEnd + ".tx", -5)
else:
cmds.setAttr(tipJoint + ".tx", 5)
cmds.setAttr(tipJointEnd + ".tx", 5)
#set preferred angles on 1rst and 2nd knuckle
cmds.setAttr("rig_bigtoe_proximal_phalange_" + side + ".preferredAngleZ", 45)
cmds.setAttr("rig_bigtoe_distal_phalange_" + side + ".preferredAngleZ", -45)
#create RP IK handle from base knuckle to distal
toeRpIkNodes = cmds.ikHandle(sol = "ikRPsolver", name = "bigtoe_RP_ikHandle_" + side, sj = "rig_bigtoe_proximal_phalange_" + side, ee = tipJoint)
toeScIkNodes = cmds.ikHandle(sol = "ikSCsolver", name = "bigtoe_SC_ikHandle_" + side, sj = "rig_bigtoe_distal_phalange_" + side, ee = tipJointEnd)
cmds.setAttr(toeRpIkNodes[0] + ".v", 0)
cmds.setAttr(toeScIkNodes[0] + ".v", 0)
#parent SC IK to RP IK
cmds.parent(toeScIkNodes[0], toeRpIkNodes[0])
#create an IK control
control = self.createControl("circle", 6, "bigtoe_ik_ctrl_" + side)
ctrlGrp = cmds.group(empty = True, name = control + "_grp")
ikGrps.append(ctrlGrp)
toeControls.append(control)
#position control
constraint = cmds.parentConstraint("rig_bigtoe_distal_phalange_" + side, control)[0]
grpConstraint = cmds.parentConstraint("rig_bigtoe_distal_phalange_" + side, ctrlGrp)[0]
cmds.delete([constraint, grpConstraint])
#create dummy group so IK controls on both sides behave the same (dummy group will have 180 offset if right side)
dummyGrp = cmds.duplicate(ctrlGrp, parentOnly = True, name = ctrlGrp + "_dummy")[0]
spaceSwitcherFollow = cmds.duplicate(ctrlGrp, parentOnly = True, name = ctrlGrp + "_space_switcher_follow")[0]
spaceSwitcher = cmds.duplicate(ctrlGrp, parentOnly = True, name = ctrlGrp + "_space_switcher")[0]
cmds.parent(spaceSwitcher, spaceSwitcherFollow)
cmds.parent(dummyGrp, spaceSwitcher)
cmds.parent(spaceSwitcherFollow, ctrlGrp)
#parent ctrl to group
if side == "r":
cmds.setAttr(dummyGrp + ".ry", 180)
cmds.parent(control, dummyGrp)
cmds.setAttr(control + ".ry", -90)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
cmds.parent(toeRpIkNodes[0], control)
#lock attrs on control that should not be animated
for attr in [".sx", ".sy", ".sz", ".v"]:
cmds.setAttr(control + attr, lock = True, keyable = False)
#setup global control for toes
#if toes < 3, rotation based(spread, curl) (grp node above FK control)
fkControls = []
ikControls = []
for control in toeControls:
if control.find("fk") != -1:
fkControls.append(control)
if control.find("ik") != -1:
ikControls.append(control)
#IF AlL TOES ARE IK TOES
if len(fkControls) == 0:
#create a control at the tip of the toes that will globally move the IK controls
if joints:
control = self.createControl("square", 1, "ik_global_ctrl_" + side)
constraint = cmds.parentConstraint("ikHandle_toe_" + side, control)[0]
cmds.delete(constraint)
cmds.makeIdentity(control, t = 0, r = 1, s = 0, apply = True)
cmds.setAttr(control + ".rz", -90)
cmds.setAttr(control + ".sz", 5)
#add a spread attr to the global control
cmds.select(control)
cmds.addAttr(longName='spread', defaultValue=0, minValue=0, maxValue=10, keyable = True)
#create the group node and parent ctrl to it
ctrlGrp = cmds.group(empty = True, name = "ik_global_ctrl_" + side + "_grp")
ctrlGroups.append(ctrlGrp)
constraint = cmds.parentConstraint("ikHandle_toe_" + side, ctrlGrp)[0]
cmds.delete(constraint)
#create a space switcher node
spaceSwitcherFollow = cmds.duplicate(ctrlGrp, parentOnly = True, name = ctrlGrp + "_space_switcher_follow")[0]
spaceSwitcher = cmds.duplicate(ctrlGrp, parentOnly = True, name = ctrlGrp + "_space_switcher")[0]
cmds.parent(spaceSwitcher, spaceSwitcherFollow)
cmds.parent(spaceSwitcherFollow, ctrlGrp)
cmds.parent(control, spaceSwitcher)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
cmds.setAttr(control + ".rx", 90)
cmds.setAttr(control + ".scale", 2.5, 2.5, 2.5, type = 'double3')
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
#set the pivot to be at the base of the toes
pivPos = cmds.xform("jointmover_knuckle_base_" + side, q = True, ws = True, t = True)
cmds.xform(control, ws = True, piv = (pivPos[0], pivPos[1], pivPos[2]))
cmds.xform(ctrlGrp, ws = True, piv = (pivPos[0], pivPos[1], pivPos[2]))
cmds.xform(spaceSwitcher, ws = True, piv = (pivPos[0], pivPos[1], pivPos[2]))
#lock attrs on control that are not needed to be animated
cmds.setAttr(control + ".sx", lock = True, keyable = False)
cmds.setAttr(control + ".sy", lock = True, keyable = False)
cmds.setAttr(control + ".sz", lock = True, keyable = False)
cmds.setAttr(control + ".v", lock = True, keyable = False)
#parent the IK groups to the global grp and also set driven keys for toe spread
for grp in ikGrps:
#create a driven group
group = cmds.group(empty = True, name = grp + "_driven")
if grp.find("index") != -1:
constraint = cmds.parentConstraint("rig_index_proximal_phalange_" + side, group)[0]
cmds.delete(constraint)
cmds.parent(group, "index_ik_ctrl_" + side + "_grp_space_switcher")
cmds.parent("index_ik_ctrl_" + side + "_grp_dummy", group)
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 10)
cmds.setAttr(group + ".ry", -9)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
if grp.find("middle") != -1:
constraint = cmds.parentConstraint("rig_middle_proximal_phalange_" + side, group)[0]
cmds.delete(constraint)
cmds.parent(group, "middle_ik_ctrl_" + side + "_grp_space_switcher")
cmds.parent("middle_ik_ctrl_" + side + "_grp_dummy", group)
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 10)
cmds.setAttr(group + ".ry", 9.5)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
if grp.find("ring") != -1:
constraint = cmds.parentConstraint("rig_ring_proximal_phalange_" + side, group)[0]
cmds.delete(constraint)
cmds.parent(group, "ring_ik_ctrl_" + side + "_grp_space_switcher")
cmds.parent("ring_ik_ctrl_" + side + "_grp_dummy", group)
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 10)
cmds.setAttr(group + ".ry", 17)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
if grp.find("pinky") != -1:
constraint = cmds.parentConstraint("rig_pinky_proximal_phalange_" + side, group)[0]
cmds.delete(constraint)
cmds.parent(group, "pinky_ik_ctrl_" + side + "_grp_space_switcher")
cmds.parent("pinky_ik_ctrl_" + side + "_grp_dummy", group)
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 10)
cmds.setAttr(group + ".ry", 32)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
if grp.find("bigtoe") != -1:
constraint = cmds.parentConstraint("rig_bigtoe_proximal_phalange_" + side, group)[0]
cmds.delete(constraint)
cmds.parent(group, "bigtoe_ik_ctrl_" + side + "_grp_space_switcher")
cmds.parent("bigtoe_ik_ctrl_" + side + "_grp_dummy", group)
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 10)
cmds.setAttr(group + ".ry", -15)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
cmds.parent(grp, control)
#color the control
if side == "l":
color = 5
else:
color = 12
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", color)
#IF ALL TOES ARE FK TOES
if len(ikControls) == 0:
if joints:
#create a control at the tip of the toes that will give the user some handy global controls, like curl, spread, etc.
control = self.createControl("square", 1, "fk_global_ctrl_" + side)
constraint = cmds.parentConstraint("ikHandle_toe_" + side, control)[0]
cmds.delete(constraint)
cmds.makeIdentity(control, t = 0, r = 1, s = 0, apply = True)
cmds.setAttr(control + ".rz", -90)
cmds.setAttr(control + ".sz", 5)
#create the group node and parent ctrl to it
ctrlGrp = cmds.group(empty = True, name = "fk_global_ctrl_" + side + "_grp")
ctrlGroups.append(ctrlGrp)
constraint = cmds.parentConstraint("ikHandle_toe_" + side, ctrlGrp)[0]
cmds.delete(constraint)
cmds.parent(control, ctrlGrp)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
cmds.setAttr(control + ".rx", 90)
cmds.setAttr(control + ".scale", 2.5, 2.5, 2.5, type = 'double3')
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
#lock attrs on control that are not needed to be animated
cmds.setAttr(control + ".tx", lock = True, keyable = False)
cmds.setAttr(control + ".ty", lock = True, keyable = False)
cmds.setAttr(control + ".tz", lock = True, keyable = False)
cmds.setAttr(control + ".sx", lock = True, keyable = False)
cmds.setAttr(control + ".sy", lock = True, keyable = False)
cmds.setAttr(control + ".sz", lock = True, keyable = False)
cmds.setAttr(control + ".v", lock = True, keyable = False)
#find all driven fk grps
drivenGroups = []
for group in ctrlGroups:
if group.find("driven") != -1:
if group.find("fk") != -1:
drivenGroups.append(group)
#add a spread attr to the global control
cmds.select(control)
cmds.addAttr(longName='spread', defaultValue=0, minValue=0, maxValue=10, keyable = True)
#setup driven keys for the fk group nodes
for group in drivenGroups:
#Curl
cmds.setAttr(control + ".rz", 0)
cmds.setAttr(group + ".rz", 0)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rz", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rz", -180)
cmds.setAttr(group + ".rz", -90)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rz", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rz", 90)
cmds.setAttr(group + ".rz", 45)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rz", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rz", 0)
cmds.setAttr(group + ".rz", 0)
#Toe Lean
#only the base knuckle
if group.partition("ctrl_")[2].find("1") == 0:
cmds.setAttr(control + ".ry", 0)
cmds.setAttr(group + ".ry", 0)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".ry", itt = "linear", ott = "linear")
cmds.setAttr(control + ".ry", 45)
cmds.setAttr(group + ".ry", 45)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".ry", itt = "linear", ott = "linear")
cmds.setAttr(control + ".ry", -45)
cmds.setAttr(group + ".ry", -45)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".ry", itt = "linear", ott = "linear")
cmds.setAttr(control + ".ry", 0)
cmds.setAttr(group + ".ry", 0)
#Toe Tilt
if group.find("pinky") != -1:
if group.find("1") != -1:
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 60)
cmds.setAttr(group + ".rz", 65)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", -60)
cmds.setAttr(group + ".rz", -65)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
if group.find("2") != -1:
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 60)
cmds.setAttr(group + ".rz", -60)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", -60)
cmds.setAttr(group + ".rz", 60)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
if group.find("ring") != -1:
if group.find("1") != -1:
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 60)
cmds.setAttr(group + ".rz", 45)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", -60)
cmds.setAttr(group + ".rz", -45)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
if group.find("2") != -1:
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 60)
cmds.setAttr(group + ".rz", -30)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", -60)
cmds.setAttr(group + ".rz", 30)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
if group.find("middle") != -1:
if group.find("1") != -1:
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 60)
cmds.setAttr(group + ".rz", 50)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", -60)
cmds.setAttr(group + ".rz", -50)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
if group.find("2") != -1:
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 60)
cmds.setAttr(group + ".rz", -40)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", -60)
cmds.setAttr(group + ".rz", 40)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
if group.find("index") != -1:
if group.find("1") != -1:
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 60)
cmds.setAttr(group + ".rz", 35)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", -60)
cmds.setAttr(group + ".rz", -35)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
if group.find("2") != -1:
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 60)
cmds.setAttr(group + ".rz", -25)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", -60)
cmds.setAttr(group + ".rz", 25)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
if group.find("bigtoe") != -1:
if group.find("1") != -1:
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 60)
cmds.setAttr(group + ".rz", 5)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", -60)
cmds.setAttr(group + ".rz", -5)
cmds.setDrivenKeyframe(group + ".rz", cd = control + ".rx", itt = "linear", ott = "linear")
cmds.setAttr(control + ".rx", 0)
cmds.setAttr(group + ".rz", 0)
#toe spread
if group.find("bigtoe") != -1:
if group.find("1") != -1:
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 10)
cmds.setAttr(group + ".ry", -15)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
if group.find("index") != -1:
if group.find("1") != -1:
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 10)
cmds.setAttr(group + ".ry", -9)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
if group.find("middle") != -1:
if group.find("1") != -1:
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 10)
cmds.setAttr(group + ".ry", 9.5)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
if group.find("ring") != -1:
if group.find("1") != -1:
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 10)
cmds.setAttr(group + ".ry", 17)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
if group.find("pinky") != -1:
if group.find("1") != -1:
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 10)
cmds.setAttr(group + ".ry", 32)
cmds.setDrivenKeyframe(group + ".ry", cd = control + ".spread", itt = "linear", ott = "linear")
cmds.setAttr(control + ".spread", 0)
cmds.setAttr(group + ".ry", 0)
if len(ikControls) and len(fkControls) > 0:
for group in ikGrps:
ctrlGroups.append(group)
#need to hook into foot rig, both fk and ik. To do this, we'll group up the toe controls for each side, and parent under the driver ball
if joints:
masterGrp = cmds.group(empty = True, name = "toe_rig_" + side + "_grp")
jointsGrp = cmds.group(empty = True, name = "toe_rig_joints_" + side + "_grp")
cmds.parent(joints, jointsGrp)
cmds.parent(jointsGrp, masterGrp)
for group in ctrlGroups:
if group.find("driven") == -1:
cmds.parent(group, masterGrp)
cmds.parentConstraint("driver_ball_" + side, masterGrp, mo = True)
#parent toe groups to leg sys grp
cmds.parent(masterGrp, "leg_sys_grp")
#Need to constrain driver joints to rig joints
for toe in allToes:
rigToe = toe.partition("driver_")[2]
rigToe = "rig_" + rigToe
cmds.parentConstraint(rigToe, toe)
#color the controls
for control in toeControls:
if side == "l":
color = 5
else:
color = 12
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", color)
#lastly, hook up toe control visibility to foot control attribute
cmds.connectAttr("ik_foot_anim_" + side + ".toeCtrlVis", masterGrp + ".v")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def finishLegs(self):
ball = False
for side in ["l", "r"]:
#organize joints
legJointGrp = cmds.group(empty = True, name = "leg_joints_grp_" + side)
constraint = cmds.parentConstraint("driver_thigh_" + side, legJointGrp)[0]
cmds.delete(constraint)
cmds.parent(["fk_leg_thigh_" + side, "ik_leg_thigh_" + side, "fk_thigh_" + side + "_orient_grp"], legJointGrp)
cmds.parent(legJointGrp, "leg_group_" + side)
#create invisible legs that will drive the hips
filePath = os.path.join(self.mayaToolsDir, "General", "ART", "invis_legs.mb")
cmds.select("leg_group_" + side, replace = True)
cmds.file(filePath, es = True, type = "mayaBinary", force = True)
invisLegNodes = cmds.file(filePath, i = True, returnNewNodes = True, renameAll = True)
#clean up import
for node in invisLegNodes:
if node.find("body_anim_space") != -1:
if cmds.objExists(node):
cmds.delete(node)
#constrain the no flip begin joint to the driver pelvis
cmds.parentConstraint("driver_pelvis", "noflip_begin_joint_" + side, mo = True)
cmds.parentConstraint("hip_anim", "invis_legs_leg_group_" + side, mo = True)
#connect real knee vector to invis knee vector
cmds.connectAttr("noflip_pv_loc_" + side + ".translate", ("invis_legs_noflip_pv_loc_" + side + ".translate"))
cmds.delete("invis_legs_ik_knee_anim_grp_" + side)
if cmds.objExists("invis_legs_ik_leg_" + side + "_twistMultNode"):
cmds.delete("invis_legs_ik_leg_" + side + "_twistMultNode")
if side == "r":
cmds.disconnectAttr("invis_legs_ik_foot_anim_" + side + ".knee_twist", "invis_legs_foot_ikHandle_" + side + ".twist")
cmds.connectAttr("foot_ikHandle_" + side + ".twist", "invis_legs_foot_ikHandle_" + side + ".twist")
#make sure leg orients are good
tempConstraint = cmds.orientConstraint("fk_thigh_" + side + "_orient_grp", "invis_legs_fk_thigh_" + side + "_orient_grp")[0]
cmds.delete(tempConstraint)
#point constraint invis target loc to real foot control so invis IK goes with real foot. delete invis foot
cmds.parentConstraint("ik_foot_anim_" + side, ("invis_legs_ik_foot_anim_" + side))
#drive invis fk thigh with real
cmds.connectAttr("fk_thigh_" + side + "_anim.rotate", "invis_legs_fk_thigh_" + side + "_anim.rotate")
cmds.connectAttr("fk_calf_" + side + "_anim.rotate", "invis_legs_fk_calf_" + side + "_anim.rotate")
#hide invisible legs
cmds.setAttr("invis_legs_leg_group_" + side + ".v", 0)
parent = cmds.listRelatives("invis_legs_leg_group_" + side, parent = True)
cmds.parent("invis_legs_leg_group_" + side, "leg_group_" + side)
if parent:
cmds.delete(parent)
#create result joints
thighJoint = cmds.duplicate("driver_thigh_" + side, name = "result_leg_thigh_" + side, parentOnly = True)[0]
calfJoint = cmds.duplicate("driver_calf_" + side, name = "result_leg_calf_" + side, parentOnly = True)[0]
footJoint = cmds.duplicate("driver_foot_" + side, name = "result_leg_foot_" + side, parentOnly = True)[0]
if cmds.objExists("driver_ball_" + side):
ball = True
ballJoint = cmds.duplicate("driver_ball_" + side, name = "result_leg_ball_" + side, parentOnly = True)[0]
for joint in [thighJoint, calfJoint, footJoint]:
cmds.parent(joint, world = True)
if ball:
cmds.parent(ballJoint, world = True)
cmds.parent(footJoint, calfJoint)
cmds.parent(calfJoint, thighJoint)
if ball:
cmds.parent(ballJoint, footJoint)
cmds.makeIdentity(thighJoint, t = 0, r = 1, s = 0, apply = True)
#create IK fix joints so that all the orients of the leg systems match up
ikFixThighJoint = cmds.duplicate("driver_thigh_" + side, name = "ikFix_leg_thigh_" + side, parentOnly = True)[0]
ikFixCalfJoint = cmds.duplicate("driver_calf_" + side, name = "ikFix_leg_calf_" + side, parentOnly = True)[0]
ikFixFootJoint = cmds.duplicate("driver_foot_" + side, name = "ikFix_leg_foot_" + side, parentOnly = True)[0]
if cmds.objExists("driver_ball_" + side):
ball = True
ikFixBallJoint = cmds.duplicate("driver_ball_" + side, name = "ikFix_leg_ball_" + side, parentOnly = True)[0]
for joint in [ikFixThighJoint, ikFixCalfJoint, ikFixFootJoint]:
cmds.parent(joint, world = True)
if ball:
cmds.parent(ikFixBallJoint, world = True)
cmds.parent(ikFixFootJoint, ikFixCalfJoint)
cmds.parent(ikFixCalfJoint, ikFixThighJoint)
if ball:
cmds.parent(ikFixBallJoint, ikFixFootJoint)
cmds.makeIdentity(ikFixThighJoint, t = 0, r = 1, s = 0, apply = True)
cmds.parentConstraint("ik_leg_thigh_" + side, ikFixThighJoint, mo = True)
cmds.parentConstraint("ik_leg_calf_" + side, ikFixCalfJoint, mo = True)
cmds.parentConstraint("ik_leg_foot_" + side, ikFixFootJoint, mo = True)
if ball:
cmds.parentConstraint("ik_leg_ball_" + side, ikFixBallJoint, mo = True)
#constrain result joints to fk and ik joints
thighConstraint = cmds.parentConstraint(["fk_leg_thigh_" + side, ikFixThighJoint], thighJoint, mo = True)[0]
calfConstraint = cmds.parentConstraint(["fk_leg_calf_" + side, ikFixCalfJoint], calfJoint, mo = True)[0]
footConstraint = cmds.parentConstraint(["fk_leg_foot_" + side, ikFixFootJoint], footJoint, mo = True)[0]
if ball:
ballConstraint = cmds.parentConstraint(["fk_leg_ball_" + side, ikFixBallJoint], ballJoint, mo = True)[0]
#add switch attr
cmds.select("Rig_Settings")
cmds.addAttr(longName=(side + "LegMode"), at = 'enum', en = "fk:ik:", keyable = True)
#connect up attr to constraints
constraints =[thighConstraint, calfConstraint, footConstraint]
if ball:
constraints.append(ballConstraint)
reverseNode = cmds.shadingNode("reverse", asUtility = True, name = "legSwitcher_reverse_node_" + side)
cmds.connectAttr("Rig_Settings" + "." + side + "LegMode", reverseNode + ".inputX")
for constraint in constraints:
targets = cmds.parentConstraint(constraint, q = True, weightAliasList = True)
cmds.connectAttr("Rig_Settings" + "." + side + "LegMode", constraint + "." + targets[1])
cmds.connectAttr(reverseNode + ".outputX", constraint + "." + targets[0])
#connect up visibility of controls to leg mode
cmds.connectAttr("Rig_Settings" + "." + side + "LegMode", "ik_leg_grp_" + side + ".v")
cmds.connectAttr(reverseNode + ".outputX", "fk_thigh_" + side + "_anim_grp.v")
#set default to IK
cmds.setAttr("Rig_Settings" + "." + side + "LegMode", 1)
#constrain driver legs to result legs
cmds.parentConstraint(thighJoint, "driver_thigh_" + side, mo = True)
cmds.parentConstraint(calfJoint, "driver_calf_" + side, mo = True)
cmds.parentConstraint(footJoint, "driver_foot_" + side, mo = True)
if ball:
cmds.parentConstraint(ballJoint, "driver_ball_" + side, mo = True)
#create blend nodes for the scale
scaleBlendColors_UpLeg = cmds.shadingNode("blendColors", asUtility = True, name = side + "_up_leg_scale_blend")
cmds.connectAttr("ik_leg_thigh_" + side + ".scale", scaleBlendColors_UpLeg + ".color1")
cmds.connectAttr("fk_thigh_" + side + "_anim.scale", scaleBlendColors_UpLeg + ".color2")
cmds.connectAttr(scaleBlendColors_UpLeg + ".output", "driver_thigh_" + side + ".scale")
scaleBlendColors_LoLeg = cmds.shadingNode("blendColors", asUtility = True, name = side + "_lo_leg_scale_blend")
cmds.connectAttr("ik_leg_calf_" + side + ".scale", scaleBlendColors_LoLeg + ".color1")
cmds.connectAttr("fk_calf_" + side + "_anim.scale", scaleBlendColors_LoLeg + ".color2")
cmds.connectAttr(scaleBlendColors_LoLeg + ".output", "driver_calf_" + side + ".scale")
scaleBlendColors_Foot = cmds.shadingNode("blendColors", asUtility = True, name = side + "_foot_scale_blend")
cmds.connectAttr("ik_leg_foot_" + side + ".scale", scaleBlendColors_Foot + ".color1")
cmds.connectAttr("fk_foot_" + side + "_anim.scale", scaleBlendColors_Foot + ".color2")
cmds.connectAttr(scaleBlendColors_Foot + ".output", "driver_foot_" + side + ".scale")
#set limits
cmds.select("driver_thigh_" + side)
cmds.transformLimits(sy = (.05, 2.0), sz = (.05, 2.0), esy = [False, True], esz = [False, True])
cmds.select("driver_calf_" + side)
cmds.transformLimits(sy = (.05, 2.0), sz = (.05, 2.0), esy = [False, True], esz = [False, True])
#clean up legs hiearchy
cmds.parent([thighJoint, ikFixThighJoint], legJointGrp)
cmds.setAttr("leg_ctrl_grp_" + side + ".v", 0)
#hide stuff
cmds.setAttr(ikFixThighJoint + ".v", 0)
cmds.setAttr("fk_leg_thigh_" + side + ".v", 0)
#Setup Twist Joints if the user selected them
if side == "l":
if cmds.getAttr("Skeleton_Settings.leftUpperLegTwist") > 0:
self.buildThighTwist("l")
if cmds.getAttr("Skeleton_Settings.leftLowerLegTwist") > 0:
self.buildCalfTwist("l")
if side == "r":
if cmds.getAttr("Skeleton_Settings.rightUpperLegTwist") > 0:
self.buildThighTwist("r")
if cmds.getAttr("Skeleton_Settings.rightLowerLegTwist") > 0:
self.buildCalfTwist("r")
#clean up the leg hierarchy. group all leg systems under 1 group
legMasterGrp = cmds.group(empty = True, name = "leg_sys_grp")
cmds.parent(["leg_group_l", "leg_group_r"], legMasterGrp)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildCalfTwist(self, side):
if side == "l":
color = 5
else:
color = 12
#create our roll group
rollGrp = cmds.group(empty = True, name = "calf_" + side + "_roll_grp")
cmds.parentConstraint("driver_calf_" + side, rollGrp)
#create our twist joint and twist mod joint
cmds.select(clear = True)
twistJoint = cmds.joint(name = "calf_" + side + "_twist_joint")
cmds.select(clear = True)
constraint = cmds.parentConstraint("driver_calf_twist_01_" + side, twistJoint)[0]
cmds.delete(constraint)
cmds.parent(twistJoint, rollGrp)
cmds.makeIdentity(twistJoint, t = 0, r = 1, s = 0, apply = True)
#twist mod joint
twistMod = cmds.duplicate(twistJoint, po = True, name = "calf_" + side + "_twist_mod")[0]
cmds.parent(twistMod, twistJoint)
#create the manual twist control
twistCtrl = self.createControl("circle", 15, "calf_" + side + "_twist_anim")
cmds.setAttr(twistCtrl + ".ry", -90)
cmds.makeIdentity(twistCtrl, r = 1, apply =True)
constraint = cmds.parentConstraint(twistMod, twistCtrl)[0]
cmds.delete(constraint)
twistCtrlGrp = cmds.group(empty = True, name = "calf_" + side + "_twist_anim_grp")
constraint = cmds.parentConstraint(twistMod, twistCtrlGrp)[0]
cmds.delete(constraint)
cmds.parent(twistCtrl, twistCtrlGrp)
cmds.parent(twistCtrlGrp, twistMod)
cmds.makeIdentity(twistCtrl, t = 1, r = 1, s = 1, apply = True)
cmds.setAttr(twistCtrl + ".overrideEnabled", 1)
cmds.setAttr(twistCtrl + ".overrideColor", color)
for attr in [".sx", ".sy", ".sz"]:
cmds.setAttr(twistCtrl + attr, lock = True, keyable = False)
cmds.setAttr(twistCtrl + ".v", keyable = False)
#add attr on clavicle anim for manual twist control visibility
cmds.select("hip_anim")
cmds.addAttr(longName=(side + "CalfTwistCtrlVis"), at = 'bool', dv = 0, keyable = True)
cmds.connectAttr("hip_anim." + side + "CalfTwistCtrlVis", twistCtrl + ".v")
cmds.connectAttr("hip_anim." + side + "CalfTwistCtrlVis", twistMod + ".v")
cmds.connectAttr("hip_anim." + side + "CalfTwistCtrlVis", twistJoint + ".v")
cmds.setAttr(twistMod + ".radius", .01)
cmds.setAttr(twistJoint + ".radius", .01)
#setup a simple relationship of foot rotateX value into mult node. input2X is driven by an attr on rig settings for twist amt(default is .5). Output into twist joint
twistMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "calf_twist_" + side + "_mult_node")
#add attr to rig settings
cmds.select("Rig_Settings")
cmds.addAttr(longName= ( side + "CalfTwistAmount" ), defaultValue=.5, minValue=0, maxValue=1, keyable = True)
#connect output of driver hand into input1x
cmds.connectAttr("driver_foot_" + side + ".rx", twistMultNode + ".input1X")
#connect attr into input2x
cmds.connectAttr("Rig_Settings." + side + "CalfTwistAmount", twistMultNode + ".input2X")
#connect output into driver calf twist
cmds.connectAttr(twistMultNode + ".outputX", twistJoint + ".rx")
#constrain driver joint to twist joint
cmds.parentConstraint(twistCtrl, "driver_calf_twist_01_" + side, mo = True)
#if there is more than 1 roll bone, set those up now:
if side == "l":
sideName = "left"
else:
sideName = "right"
data = cmds.getAttr("SkeletonSettings_Cache." + sideName + "LegOptions_numCalfTwistBones")
numRolls = ast.literal_eval(data)[0]
if numRolls > 1:
for i in range(int(numRolls)):
if i == 1:
cmds.setAttr("Rig_Settings." + side + "CalfTwistAmount", .75)
cmds.select("Rig_Settings")
cmds.addAttr(longName= ( side + "CalfTwist2Amount" ), defaultValue=.5, minValue=0, maxValue=1, keyable = True)
#create the manual twist control setup
twistMod = cmds.duplicate("driver_calf_twist_0" + str(i + 1) + "_" + side , po = True, name = "calf_" + side + "_twist2_mod")[0]
cmds.parent(twistMod, rollGrp)
#create the manual twist control
twistCtrl = self.createControl("circle", 15, "calf_" + side + "_twist2_anim")
cmds.setAttr(twistCtrl + ".ry", -90)
cmds.makeIdentity(twistCtrl, r = 1, apply =True)
constraint = cmds.parentConstraint(twistMod, twistCtrl)[0]
cmds.delete(constraint)
twistCtrlGrp = cmds.group(empty = True, name = "calf_" + side + "_twist2_anim_grp")
constraint = cmds.parentConstraint(twistMod, twistCtrlGrp)[0]
cmds.delete(constraint)
cmds.parent(twistCtrl, twistCtrlGrp)
cmds.parent(twistCtrlGrp, twistMod)
cmds.makeIdentity(twistCtrl, t = 1, r = 1, s = 1, apply = True)
cmds.connectAttr("hip_anim." + side + "CalfTwistCtrlVis", twistCtrl + ".v")
cmds.connectAttr("hip_anim." + side + "CalfTwistCtrlVis", twistMod + ".v")
for attr in [".sx", ".sy", ".sz"]:
cmds.setAttr(twistCtrl + attr, lock = True, keyable = False)
cmds.setAttr(twistCtrl + ".v", keyable = False)
cmds.setAttr(twistCtrl + ".overrideEnabled", 1)
cmds.setAttr(twistCtrl + ".overrideColor", color)
#drive the twist joint
twistMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "calf_twist_2_" + side + "_mult_node")
cmds.connectAttr("driver_calf_twist_01_" + side + ".rx", twistMultNode + ".input1X")
cmds.connectAttr("Rig_Settings." + side + "CalfTwist2Amount", twistMultNode + ".input2X")
cmds.connectAttr(twistMultNode + ".outputX", twistCtrlGrp + ".rx")
cmds.parentConstraint(twistCtrl, "driver_calf_twist_0" + str(i + 1) + "_" + side, mo = True)
if i == 2:
cmds.select("Rig_Settings")
cmds.addAttr(longName= ( side + "CalfTwist3Amount" ), defaultValue=.25, minValue=0, maxValue=1, keyable = True)
#create the manual twist control setup
twistMod = cmds.duplicate("driver_calf_twist_0" + str(i + 1) + "_" + side , po = True, name = "calf_" + side + "_twist3_mod")[0]
cmds.parent(twistMod, rollGrp)
#create the manual twist control
twistCtrl = self.createControl("circle", 15, "calf_" + side + "_twist3_anim")
cmds.setAttr(twistCtrl + ".ry", -90)
cmds.makeIdentity(twistCtrl, r = 1, apply =True)
constraint = cmds.parentConstraint(twistMod, twistCtrl)[0]
cmds.delete(constraint)
twistCtrlGrp = cmds.group(empty = True, name = "calf_" + side + "_twist3_anim_grp")
constraint = cmds.parentConstraint(twistMod, twistCtrlGrp)[0]
cmds.delete(constraint)
cmds.parent(twistCtrl, twistCtrlGrp)
cmds.parent(twistCtrlGrp, twistMod)
cmds.makeIdentity(twistCtrl, t = 1, r = 1, s = 1, apply = True)
cmds.connectAttr("hip_anim." + side + "CalfTwistCtrlVis", twistCtrl + ".v")
cmds.connectAttr("hip_anim." + side + "CalfTwistCtrlVis", twistMod + ".v")
for attr in [".sx", ".sy", ".sz"]:
cmds.setAttr(twistCtrl + attr, lock = True, keyable = False)
cmds.setAttr(twistCtrl + ".v", keyable = False)
cmds.setAttr(twistCtrl + ".overrideEnabled", 1)
cmds.setAttr(twistCtrl + ".overrideColor", color)
#drive the twist joint
twistMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "calf_twist_3_" + side + "_mult_node")
cmds.connectAttr("driver_calf_twist_01_" + side + ".rx", twistMultNode + ".input1X")
cmds.connectAttr("Rig_Settings." + side + "CalfTwist3Amount", twistMultNode + ".input2X")
cmds.connectAttr(twistMultNode + ".outputX", twistCtrlGrp + ".rx")
cmds.parentConstraint(twistCtrl, "driver_calf_twist_0" + str(i + 1) + "_" + side, mo = True)
#clean up hierarchy
cmds.parent(rollGrp, "leg_group_" + side)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildForearmTwist(self, side):
if side == "l":
color = 5
else:
color = 12
#create our roll group
rollGrp = cmds.group(empty = True, name = "lowerarm_" + side + "_roll_grp")
cmds.parentConstraint("driver_lowerarm_" + side, rollGrp)
#create our twist joint and twist mod joint
cmds.select(clear = True)
twistJoint = cmds.joint(name = "lowerarm_" + side + "_twist_joint")
cmds.select(clear = True)
constraint = cmds.parentConstraint("driver_lowerarm_twist_01_" + side, twistJoint)[0]
cmds.delete(constraint)
cmds.parent(twistJoint, rollGrp)
#twist mod joint
twistMod = cmds.duplicate(twistJoint, po = True, name = "lowerarm_" + side + "_twist_mod")[0]
cmds.parent(twistMod, twistJoint)
#create the manual twist control
twistCtrl = self.createControl("circle", 15, "lowerarm_" + side + "_twist_anim")
cmds.setAttr(twistCtrl + ".ry", -90)
cmds.makeIdentity(twistCtrl, r = 1, apply =True)
constraint = cmds.parentConstraint(twistMod, twistCtrl)[0]
cmds.delete(constraint)
twistCtrlGrp = cmds.group(empty = True, name = "lowerarm_" + side + "_twist_anim_grp")
constraint = cmds.parentConstraint(twistMod, twistCtrlGrp)[0]
cmds.delete(constraint)
cmds.parent(twistCtrl, twistCtrlGrp)
cmds.parent(twistCtrlGrp, twistMod)
cmds.makeIdentity(twistCtrl, t = 1, r = 1, s = 1, apply = True)
cmds.setAttr(twistCtrl + ".overrideEnabled", 1)
cmds.setAttr(twistCtrl + ".overrideColor", color)
for attr in [".sx", ".sy", ".sz"]:
cmds.setAttr(twistCtrl + attr, lock = True, keyable = False)
cmds.setAttr(twistCtrl + ".v", keyable = False)
#add attr on clavicle anim for manual twist control visibility
cmds.select("clavicle_" + side + "_anim")
cmds.addAttr(longName=("twistCtrlVisLower"), at = 'bool', dv = 0, keyable = True)
cmds.connectAttr("clavicle_" + side + "_anim.twistCtrlVisLower", twistCtrl + ".v")
cmds.connectAttr("clavicle_" + side + "_anim.twistCtrlVisLower", twistMod + ".v")
cmds.connectAttr("clavicle_" + side + "_anim.twistCtrlVisLower", twistJoint + ".v")
cmds.setAttr(twistMod + ".radius", .01)
cmds.setAttr(twistJoint + ".radius", .01)
#setup a simple relationship of foot rotateX value into mult node. input2X is driven by an attr on rig settings for twist amt(default is .5). Output into twist joint
twistMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "forearm_twist_" + side + "_mult_node")
#add attr to rig settings
cmds.select("Rig_Settings")
cmds.addAttr(longName= ( side + "ForearmTwistAmount" ), defaultValue=.5, minValue=0, maxValue=1, keyable = True)
#connect output of driver hand into input1x
cmds.connectAttr("driver_hand_" + side + ".rx", twistMultNode + ".input1X")
#connect attr into input2x
cmds.connectAttr("Rig_Settings." + side + "ForearmTwistAmount", twistMultNode + ".input2X")
#connect output into driver calf twist
cmds.connectAttr(twistMultNode + ".outputX", twistJoint + ".rx")
#constrain driver joint to twist joint
cmds.parentConstraint(twistCtrl, "driver_lowerarm_twist_01_" + side, mo = True)
#if there is more than 1 roll bone, set those up now:
if side == "l":
sideName = "left"
else:
sideName = "right"
data = cmds.getAttr("SkeletonSettings_Cache." + sideName + "ArmOptions_numLowArmTwistBones")
numRolls = ast.literal_eval(data)[0]
if numRolls > 1:
for i in range(int(numRolls)):
if i == 1:
cmds.setAttr("Rig_Settings." + side + "ForearmTwistAmount", .75)
cmds.select("Rig_Settings")
cmds.addAttr(longName= ( side + "ForearmTwist2Amount" ), defaultValue=.5, minValue=0, maxValue=1, keyable = True)
#create the manual twist control setup
twistMod = cmds.duplicate("driver_lowerarm_twist_0" + str(i + 1) + "_" + side , po = True, name = "lowerarm_" + side + "_twist2_mod")[0]
cmds.parent(twistMod, rollGrp)
#create the manual twist control
twistCtrl = self.createControl("circle", 15, "lowerarm_" + side + "_twist2_anim")
cmds.setAttr(twistCtrl + ".ry", -90)
cmds.makeIdentity(twistCtrl, r = 1, apply =True)
constraint = cmds.parentConstraint(twistMod, twistCtrl)[0]
cmds.delete(constraint)
twistCtrlGrp = cmds.group(empty = True, name = "lowerarm_" + side + "_twist2_anim_grp")
constraint = cmds.parentConstraint(twistMod, twistCtrlGrp)[0]
cmds.delete(constraint)
cmds.parent(twistCtrl, twistCtrlGrp)
cmds.parent(twistCtrlGrp, twistMod)
cmds.makeIdentity(twistCtrl, t = 1, r = 1, s = 1, apply = True)
cmds.connectAttr("clavicle_" + side + "_anim.twistCtrlVisLower", twistCtrl + ".v")
cmds.connectAttr("clavicle_" + side + "_anim.twistCtrlVisLower", twistMod + ".v")
for attr in [".sx", ".sy", ".sz"]:
cmds.setAttr(twistCtrl + attr, lock = True, keyable = False)
cmds.setAttr(twistCtrl + ".v", keyable = False)
cmds.setAttr(twistCtrl + ".overrideEnabled", 1)
cmds.setAttr(twistCtrl + ".overrideColor", color)
#drive the twist joint
twistMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "forearm_twist_2_" + side + "_mult_node")
cmds.connectAttr("driver_lowerarm_twist_01_" + side + ".rx", twistMultNode + ".input1X")
cmds.connectAttr("Rig_Settings." + side + "ForearmTwist2Amount", twistMultNode + ".input2X")
cmds.connectAttr(twistMultNode + ".outputX", twistCtrlGrp + ".rx")
cmds.parentConstraint(twistCtrl, "driver_lowerarm_twist_0" + str(i + 1) + "_" + side, mo = True)
if i == 2:
cmds.select("Rig_Settings")
cmds.addAttr(longName= ( side + "ForearmTwist3Amount" ), defaultValue=.25, minValue=0, maxValue=1, keyable = True)
#create the manual twist control setup
twistMod = cmds.duplicate("driver_lowerarm_twist_0" + str(i + 1) + "_" + side , po = True, name = "lowerarm_" + side + "_twist3_mod")[0]
cmds.parent(twistMod, rollGrp)
#create the manual twist control
twistCtrl = self.createControl("circle", 15, "lowerarm_" + side + "_twist3_anim")
cmds.setAttr(twistCtrl + ".ry", -90)
cmds.makeIdentity(twistCtrl, r = 1, apply =True)
constraint = cmds.parentConstraint(twistMod, twistCtrl)[0]
cmds.delete(constraint)
twistCtrlGrp = cmds.group(empty = True, name = "lowerarm_" + side + "_twist3_anim_grp")
constraint = cmds.parentConstraint(twistMod, twistCtrlGrp)[0]
cmds.delete(constraint)
cmds.parent(twistCtrl, twistCtrlGrp)
cmds.parent(twistCtrlGrp, twistMod)
cmds.makeIdentity(twistCtrl, t = 1, r = 1, s = 1, apply = True)
cmds.connectAttr("clavicle_" + side + "_anim.twistCtrlVisLower", twistCtrl + ".v")
cmds.connectAttr("clavicle_" + side + "_anim.twistCtrlVisLower", twistMod + ".v")
for attr in [".sx", ".sy", ".sz"]:
cmds.setAttr(twistCtrl + attr, lock = True, keyable = False)
cmds.setAttr(twistCtrl + ".v", keyable = False)
cmds.setAttr(twistCtrl + ".overrideEnabled", 1)
cmds.setAttr(twistCtrl + ".overrideColor", color)
#drive the twist joint
twistMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "forearm_twist_3_" + side + "_mult_node")
cmds.connectAttr("driver_lowerarm_twist_01_" + side + ".rx", twistMultNode + ".input1X")
cmds.connectAttr("Rig_Settings." + side + "ForearmTwist3Amount", twistMultNode + ".input2X")
cmds.connectAttr(twistMultNode + ".outputX", twistCtrlGrp + ".rx")
cmds.parentConstraint(twistCtrl, "driver_lowerarm_twist_0" + str(i + 1) + "_" + side, mo = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildArmRoll(self, side):
if side == "l":
color = 5
sideName = "left"
else:
color = 12
sideName = "right"
#get number of roll bones
data = cmds.getAttr("SkeletonSettings_Cache." + sideName + "ArmOptions_numUpArmTwistBones")
numRolls = ast.literal_eval(data)[0]
#create a nurbs plane for our ribbon
ribbon = cmds.nurbsPlane(ax = [0,0,1], lr = numRolls, width = 10, d = 3, u = 1, v = numRolls, ch = True, name = "upperarm_twist_ribbon_" + side)[0]
#rebuild the ribbon with 1 U span
ribbon = cmds.rebuildSurface(ribbon, su = 1, du = 1, sv = numRolls, dv = 1, ch = 1)[0]
cmds.setAttr(ribbon + ".rz", -90)
cmds.makeIdentity(ribbon, apply = True, t = 1, r = 1, s = 1)
#create 2 temporary skin joints
moveVal = 0
for i in range(numRolls - 1):
moveVal += 10
cmds.select(clear = True)
topSkinJoint = cmds.joint(name = "top_skinJoint_temp")
cmds.move(moveVal, 0, 0, r = True, os = True, wd = True)
cmds.select(clear = True)
cmds.select(clear = True)
bottomSkinJoint = cmds.joint(name = "bottom_skinJoint_temp")
cmds.move(moveVal * -1, 0, 0, r = True, os = True, wd = True)
cmds.select(clear = True)
#skin ribbon
cmds.select([ribbon, topSkinJoint, bottomSkinJoint])
skin = cmds.skinCluster(tsb = True, mi = 2, omi = True, dr = 5, sm = 0)
#position the joints, thus moving the ribbon
constraint = cmds.parentConstraint("driver_upperarm_" + side, topSkinJoint)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint("driver_lowerarm_" + side, bottomSkinJoint)[0]
cmds.delete(constraint)
#delete ribbon history and skin joints
cmds.delete(ribbon, ch = True)
cmds.delete([bottomSkinJoint, topSkinJoint])
#create hair system on ribbon
cmds.select(ribbon)
mel.eval("createHair 1 3 10 0 0 0 0 5 0 2 1 1;")
#figure out which follicles created represent which areas on the ribbon
hairs = cmds.ls(type = "hairSystem")
if len(hairs) > 0:
hairSys = hairs[0]
parent = cmds.listRelatives(hairs[0], parent = True)[0]
hairSys = cmds.rename(parent, "upperarm_twist_" + side + "_hairSys")
follicles = cmds.listConnections(hairSys + "Shape", type = "follicle")
follicles = set(follicles)
hairFollicles = follicles
#delete outputCurves
cmds.delete(parent + "OutputCurves")
#create a joint per follicle
for follicle in hairFollicles:
cmds.select(clear = True)
joint = cmds.joint(name = follicle + "_joint")
cmds.select(clear = True)
constraint = cmds.parentConstraint(follicle, joint)[0]
cmds.delete(constraint)
cmds.parent(joint, follicle)
cmds.makeIdentity(joint, apply = True, t = 0, r = 1, s = 0)
#create the skin joints (final)
skinJoints = []
for i in range(numRolls + 1):
cmds.select(clear = True)
skinJoint = cmds.joint(name = "skin_upperarm_twist_joint_" + side + str(i))
cmds.select(clear = True)
skinJoints.append(skinJoint)
for i in range(numRolls):
constraint = cmds.parentConstraint("driver_upperarm_twist_0" + str(i + 1) + "_" + side, "skin_upperarm_twist_joint_" + side + str(i))[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint("driver_lowerarm_" + side, skinJoint)[0]
cmds.delete(constraint)
#create our manual control curves
x = 1
groups = []
for joint in skinJoints:
if joint != skinJoints[-1]:
if joint == skinJoints[0]:
name = "upperarm_" + side + "_twist_anim_grp"
else:
name = "upperarm_" + side + "_twist" + str(x) + "_anim_grp"
group = cmds.group(empty = True, name = name)
groups.append(group)
constraint = cmds.parentConstraint(joint, group)[0]
cmds.delete(constraint)
x = x + 1
for i in range(int(len(groups))):
name = groups[i].partition("_grp")[0]
twistCtrl = self.createControl("circle", 20, name)
cmds.setAttr(twistCtrl + ".ry", -90)
cmds.makeIdentity(twistCtrl, r = 1, apply =True)
constraint = cmds.parentConstraint(groups[i], twistCtrl)[0]
cmds.delete(constraint)
cmds.parent(twistCtrl, groups[i])
cmds.parent(skinJoints[i], twistCtrl)
cmds.makeIdentity(skinJoints[i], apply = True, t = 0, r = 1, s = 0)
#clean up control
cmds.setAttr(twistCtrl + ".v", keyable = False)
cmds.setAttr(twistCtrl + ".overrideEnabled", 1)
cmds.setAttr(twistCtrl + ".overrideColor", color)
#organize groups
masterGrp = cmds.group(empty = True, name = "upperarm_twist_master_grp_" + side)
constraint = cmds.parentConstraint("rig_clavicle_" + side, masterGrp)[0]
cmds.delete(constraint)
rollGrp = cmds.duplicate(masterGrp, name = "upperarm_twist_roll_grp_" + side)[0]
constraint = cmds.parentConstraint("driver_upperarm_" + side, rollGrp)[0]
cmds.delete(constraint)
cmds.parent(rollGrp, masterGrp)
#set rotate order on roll grp (xzy)
cmds.setAttr(rollGrp + ".rotateOrder", 3)
for group in groups:
cmds.parent(group, rollGrp)
#skin ribbon to skin joints
cmds.select(ribbon)
for joint in skinJoints:
cmds.select(joint, add = True)
skin = cmds.skinCluster(tsb = True, mi = 2, omi = True, dr = 5, sm = 0)
#orient roll grp to both fk/ik arm joints and set driven keys between them
upArmConstOrient = cmds.orientConstraint(["fk_upperarm_" + side, "ik_upperarm_" + side], rollGrp, mo = True, skip = "x")[0]
cmds.setAttr("Rig_Settings." + side + "ArmMode", 0)
cmds.setAttr(upArmConstOrient + "." + "fk_upperarm_" + side + "W0", 1)
cmds.setAttr(upArmConstOrient + "." + "ik_upperarm_" + side + "W1", 0)
cmds.setDrivenKeyframe([upArmConstOrient + "." + "fk_upperarm_" + side + "W0", upArmConstOrient + "." + "ik_upperarm_" + side + "W1", ], cd = "Rig_Settings." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setAttr("Rig_Settings." + side + "ArmMode", 1)
cmds.setAttr(upArmConstOrient + "." + "fk_upperarm_" + side + "W0", 0)
cmds.setAttr(upArmConstOrient + "." + "ik_upperarm_" + side + "W1", 1)
cmds.setDrivenKeyframe([upArmConstOrient + "." + "fk_upperarm_" + side + "W0", upArmConstOrient + "." + "ik_upperarm_" + side + "W1", ], cd = "Rig_Settings." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setAttr("Rig_Settings." + side + "ArmMode", 0)
#parent end skin joint to masterGrp and orientConstrain twist to driver upper arm
cmds.parent(skinJoints[-1], rollGrp)
cmds.orientConstraint("driver_upperarm_" + side, skinJoints[-1], skip = ["y", "z"])
#parentConstraint master roll grp to driver clavicle
cmds.parentConstraint("driver_clavicle_" + side, masterGrp, mo = True)
#hook up driver joints
hairFollicles = sorted(hairFollicles)
hairFollicles = hairFollicles[::-1]
num = 1
for i in range(len(hairFollicles)):
if cmds.objExists("driver_upperarm_twist_0" + str(num) + "_" + side):
cmds.orientConstraint(skinJoints[i], "driver_upperarm_twist_0" + str(num) + "_" + side)
cmds.pointConstraint(skinJoints[i], "driver_upperarm_twist_0" + str(num) + "_" + side, mo = True)
cmds.scaleConstraint(skinJoints[i], "driver_upperarm_twist_0" + str(num) + "_" + side)
num = num + 1
#add attr on clavicle anim for manual twist control visibility
cmds.select("clavicle_" + side + "_anim")
cmds.addAttr(longName=("twistCtrlVis"), at = 'bool', dv = 0, keyable = True)
cmds.connectAttr("clavicle_" + side + "_anim.twistCtrlVis", rollGrp + ".v")
#hook up multiply nodes so that twistAmount values from rig settings affect the ribbon twist
cmds.select("Rig_Settings")
cmds.addAttr(longName= ( side + "UpperarmTwistAmount" ), defaultValue= .9, minValue= 0 , maxValue= 1, keyable = True)
#take twist ammount attr, multiply by -1, and feed into upperarm twist joint 1
multNodeA = cmds.shadingNode("multiplyDivide", asUtility = True, name = "upperarm_twist_" + side + "_multNodeA")
cmds.connectAttr("Rig_Settings." + side + "UpperarmTwistAmount", multNodeA+ ".input1X")
cmds.setAttr(multNodeA + ".input2X", -1)
multNodeB = cmds.shadingNode("multiplyDivide", asUtility = True, name = "upperarm_twist_" + side + "_multNodeB")
cmds.connectAttr(rollGrp + ".rx", multNodeB+ ".input1X")
cmds.connectAttr(multNodeA + ".outputX", multNodeB + ".input2X")
cmds.connectAttr(multNodeB + ".outputX", groups[0] + ".rx")
#any twist joints over the initial, setup simply mult nodes for carry down values
if numRolls > 1:
for i in range(int(numRolls)):
if i == 1:
cmds.select("Rig_Settings")
cmds.addAttr(longName= ( side + "UpperarmTwist2Amount" ), defaultValue=.5, minValue=0, maxValue=1, keyable = True)
#hook up multiply nodes so that twistAmount values from rig settings affect the ribbon twist
multNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "upperarm_twist2_" + side + "_multNode")
blendNode = cmds.shadingNode("blendColors", asUtility = True, name = "upperarm_twist2_" + side + "_multNode")
#hook up blendnode to take in fk and ik upperarm rx values
cmds.connectAttr( "ik_upperarm_" + side + ".rx", blendNode + ".color1R")
cmds.connectAttr( "fk_upperarm_" + side + ".rx", blendNode + ".color2R")
#take output of that and plug into multNode. multiply by the twist ammount attribute value
cmds.connectAttr(blendNode + ".outputR", multNode + ".input1X")
cmds.connectAttr("Rig_Settings." + side + "UpperarmTwist2Amount" , multNode + ".input2X")
cmds.connectAttr(multNode + ".outputX", groups[i] + ".rx")
#connect blendNode.blender to rig settings arm mode
cmds.connectAttr("Rig_Settings." + side + "ArmMode", blendNode + ".blender")
if i == 2:
cmds.select("Rig_Settings")
cmds.addAttr(longName= ( side + "UpperarmTwist3Amount" ), defaultValue=.5, minValue=0, maxValue=1, keyable = True)
#hook up multiply nodes so that twistAmount values from rig settings affect the ribbon twist
multNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "upperarm_twist3_" + side + "_multNode")
blendNode = cmds.shadingNode("blendColors", asUtility = True, name = "upperarm_twist3_" + side + "_multNode")
#hook up blendnode to take in fk and ik upperarm rx values
cmds.connectAttr( "ik_upperarm_" + side + ".rx", blendNode + ".color1R")
cmds.connectAttr( "fk_upperarm_" + side + ".rx", blendNode + ".color2R")
#take output of that and plug into multNode. multiply by the twist ammount attribute value
cmds.connectAttr(blendNode + ".outputR", multNode + ".input1X")
cmds.connectAttr("Rig_Settings." + side + "UpperarmTwist3Amount" , multNode + ".input2X")
cmds.connectAttr(multNode + ".outputX", groups[i] + ".rx")
#connect blendNode.blender to rig settings arm mode
cmds.connectAttr("Rig_Settings." + side + "ArmMode", blendNode + ".blender")
#Group up and parent into rig
twistGrp = cmds.group(empty = True, name = "upperarm_twist_grp_" + side)
cmds.parent([ribbon, hairSys, masterGrp], twistGrp)
#find follicles grp
for follicle in hairFollicles:
folliclesGrp = cmds.listRelatives(follicle, parent = True)
cmds.parent(folliclesGrp[0], twistGrp)
if cmds.objExists("nucleus1"):
cmds.parent("nucleus1", twistGrp)
#turn inherits transforms off
cmds.setAttr(folliclesGrp[0] + ".inheritsTransform", 0)
cmds.setAttr(ribbon + ".inheritsTransform", 0)
#hide nodes
for node in [folliclesGrp[0], ribbon, hairSys, skinJoints[0]]:
cmds.setAttr(node + ".v", 0)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildThighTwist(self, side):
if side == "l":
color = 5
else:
color = 12
#create a 2 bone joint chain and position the joints
cmds.select(clear = True)
driverJointStart = cmds.joint(name = "thigh_twist_" + side + "_driver")
cmds.select(clear = True)
driverJointEnd = cmds.joint(name = "thigh_twist_" + side + "_driver_end")
cmds.select(clear = True)
constraint = cmds.parentConstraint("driver_pelvis", driverJointStart)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint("driver_thigh_" + side, driverJointEnd)[0]
cmds.delete(constraint)
cmds.parent(driverJointEnd, driverJointStart)
#position the chain back a bit(+Y)
scaleFactor = self.getScaleFactor()
cmds.setAttr(driverJointStart + ".ty", (35 * scaleFactor))
#create our driver rig grp
driverGrp = cmds.group(empty = True, name = "thigh_twist_" + side + "_driver_grp")
constraint = cmds.parentConstraint("driver_pelvis", driverGrp)
#create RP IK on driver joint chain
ikNodes = cmds.ikHandle(sol = "ikRPsolver", name = "thigh_twist_driver_" + side + "_ikHandle", sj = driverJointStart, ee = driverJointEnd)
cmds.setAttr(ikNodes[0] + ".v", 0)
#point constrain IK handle to calf
cmds.pointConstraint("driver_calf_" + side, ikNodes[0])
#create our pole vector locator and get it positioned
poleVector = cmds.spaceLocator(name = "thigh_twist_driver_" + side + "_PV")[0]
cmds.setAttr(poleVector + ".v", 0)
cmds.parent(poleVector, "driver_thigh_" + side)
for attr in [".tx", ".ty", ".tz", ".rx", ".ry", ".rz"]:
cmds.setAttr(poleVector + attr, 0)
#move it back in +Y a bit
if side == "l":
cmds.setAttr(poleVector + ".ty", (35 * scaleFactor))
else:
cmds.setAttr(poleVector + ".ty", (-35 * scaleFactor))
#create a pole Vector constraint between IK handle and the locator
cmds.poleVectorConstraint(poleVector, ikNodes[0])
#parent nodes to driver grp
cmds.parent([driverJointStart, poleVector, ikNodes[0]], driverGrp)
#create our roll group
rollGrp = cmds.group(empty = True, name = "thigh_" + side + "_roll_grp")
cmds.parentConstraint("driver_thigh_" + side, rollGrp)
#create our twist joint and twist mod joint
cmds.select(clear = True)
twistJoint = cmds.joint(name = "thigh_" + side + "_twist_joint")
cmds.select(clear = True)
constraint = cmds.parentConstraint("driver_thigh_twist_01_" + side, twistJoint)[0]
cmds.delete(constraint)
cmds.parent(twistJoint, rollGrp)
#twist mod joint
twistMod = cmds.duplicate(twistJoint, po = True, name = "thigh_" + side + "_twist_mod")[0]
cmds.parent(twistMod, twistJoint)
#create the manual twist control
twistCtrl = self.createControl("circle", 30, "thigh_" + side + "_twist_anim")
cmds.setAttr(twistCtrl + ".ry", -90)
cmds.makeIdentity(twistCtrl, r = 1, apply =True)
constraint = cmds.parentConstraint(twistMod, twistCtrl)[0]
cmds.delete(constraint)
twistCtrlGrp = cmds.group(empty = True, name = "thigh_" + side + "_twist_anim_grp")
constraint = cmds.parentConstraint(twistMod, twistCtrl)[0]
cmds.delete(constraint)
cmds.parent(twistCtrl, twistCtrlGrp)
cmds.parent(twistCtrlGrp, twistMod)
cmds.makeIdentity(twistCtrl, t = 1, r = 1, s = 1, apply = True)
#lock attrs on twist ctrl and color
for attr in [".sx", ".sy", ".sz"]:
cmds.setAttr(twistCtrl + attr, lock = True, keyable = False)
cmds.setAttr(twistCtrl + ".v", keyable = False)
cmds.setAttr(twistCtrl + ".overrideEnabled", 1)
cmds.setAttr(twistCtrl + ".overrideColor", color)
#add attr on clavicle anim for manual twist control visibility
cmds.select("hip_anim")
cmds.addAttr(longName=(side + "TwistCtrlVis"), at = 'bool', dv = 0, keyable = True)
cmds.connectAttr("hip_anim." + side + "TwistCtrlVis", twistCtrl + ".v")
cmds.connectAttr("hip_anim." + side + "TwistCtrlVis", twistMod + ".v")
cmds.connectAttr("hip_anim." + side + "TwistCtrlVis", twistJoint + ".v")
cmds.setAttr(twistMod + ".radius", .01)
cmds.setAttr(twistJoint + ".radius", .01)
#create our aim up locator
aimUp = cmds.spaceLocator(name = "thigh_twist_driver_" + side + "_aimUp")[0]
cmds.setAttr(aimUp + ".v", 0)
cmds.parent(aimUp, "driver_thigh_" + side)
for attr in [".tx", ".ty", ".tz", ".rx", ".ry", ".rz"]:
cmds.setAttr(aimUp + attr, 0)
#move it back in +Y a bit
if side == "l":
cmds.setAttr(aimUp + ".ty", (35 * scaleFactor))
cmds.setAttr(aimUp + ".tx", (-70 * scaleFactor))
if side == "r":
cmds.setAttr(aimUp + ".ty", (-35 * scaleFactor))
cmds.setAttr(aimUp + ".tx", (70 * scaleFactor))
#parent aim up to thigh twist driver
cmds.parent(aimUp, driverJointStart)
#aim constrain the twist joint roll bone to the driver calf
cmds.makeIdentity(twistJoint, t = 0, r = 1, s = 0, apply = True)
cmds.aimConstraint("driver_calf_" + side, twistJoint, weight = 1, aimVector = [-1, 0, 0], upVector = [0, 1, 0], worldUpType = "object", worldUpObject = aimUp)
#create a multiply node that will push some twist up to the twist mode joint but not all
cmds.select("Rig_Settings")
cmds.addAttr(longName= ( side + "ThighTwistAmount" ), defaultValue= .9, minValue= 0 , maxValue= 1, keyable = True)
multNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "thigh_twist_" + side + "_multNode")
subtractNode = cmds.shadingNode("plusMinusAverage", asUtility = True, name = "thigh_twist_corrective_" + side + "_subtractNode")
cmds.setAttr(subtractNode + ".operation", 2)
cmds.connectAttr("Rig_Settings." + side + "ThighTwistAmount", subtractNode + ".input1D[0]")
cmds.setAttr(subtractNode + ".input1D[1]", 1)
cmds.connectAttr(twistJoint + ".rx", multNode+ ".input1X")
cmds.connectAttr(subtractNode + ".output1D", multNode + ".input2X")
cmds.connectAttr(multNode + ".outputX", twistMod + ".rx")
#constrain the driver twist joint to the twist mod
cmds.parentConstraint(twistCtrl, "driver_thigh_twist_01_" + side, mo = True)
#constrain the driver twist joint to the twist mod
#get the number of roll bones
if side == "l":
sideName = "left"
else:
sideName = "right"
data = cmds.getAttr("SkeletonSettings_Cache." + sideName + "LegOptions_numThighTwistBones")
numRolls = ast.literal_eval(data)[0]
if numRolls > 1:
for i in range(int(numRolls)):
if i == 1:
cmds.select("Rig_Settings")
cmds.addAttr(longName= ( side + "ThighTwist2Amount" ), defaultValue=.5, minValue=0, maxValue=1, keyable = True)
#create the manual twist control setup
twistMod = cmds.duplicate("driver_thigh_twist_0" + str(i + 1) + "_" + side , po = True, name = "thigh_" + side + "_twist2_mod")[0]
cmds.parent(twistMod, rollGrp)
#create the manual twist control
twistCtrl = self.createControl("circle", 30, "thigh_" + side + "_twist2_anim")
cmds.setAttr(twistCtrl + ".ry", -90)
cmds.makeIdentity(twistCtrl, r = 1, apply =True)
constraint = cmds.parentConstraint(twistMod, twistCtrl)[0]
cmds.delete(constraint)
twistCtrlGrp = cmds.group(empty = True, name = "thigh_" + side + "_twist2_anim_grp")
constraint = cmds.parentConstraint(twistMod, twistCtrlGrp)[0]
cmds.delete(constraint)
cmds.parent(twistCtrl, twistCtrlGrp)
cmds.parent(twistCtrlGrp, twistMod)
cmds.makeIdentity(twistCtrl, t = 1, r = 1, s = 1, apply = True)
cmds.connectAttr("hip_anim." + side + "TwistCtrlVis", twistCtrl + ".v")
cmds.connectAttr("hip_anim." + side + "TwistCtrlVis", twistMod + ".v")
#lock attrs on twist ctrl and color
for attr in [".sx", ".sy", ".sz"]:
cmds.setAttr(twistCtrl + attr, lock = True, keyable = False)
cmds.setAttr(twistCtrl + ".v", keyable = False)
cmds.setAttr(twistCtrl + ".overrideEnabled", 1)
cmds.setAttr(twistCtrl + ".overrideColor", color)
#drive the twist joint
twistMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "thigh_twist_2_" + side + "_mult_node")
cmds.connectAttr("driver_thigh_twist_01_" + side + ".rx", twistMultNode + ".input1X")
cmds.connectAttr("Rig_Settings." + side + "ThighTwist2Amount", twistMultNode + ".input2X")
cmds.connectAttr(twistMultNode + ".outputX", twistCtrlGrp + ".rx")
cmds.parentConstraint(twistCtrl, "driver_thigh_twist_0" + str(i + 1) + "_" + side, mo = True)
if i == 2:
cmds.select("Rig_Settings")
cmds.addAttr(longName= ( side + "ThighTwist3Amount" ), defaultValue=.25, minValue=0, maxValue=1, keyable = True)
#create the manual twist control setup
twistMod = cmds.duplicate("driver_thigh_twist_0" + str(i + 1) + "_" + side , po = True, name = "thigh_" + side + "_twist3_mod")[0]
cmds.parent(twistMod, rollGrp)
#create the manual twist control
twistCtrl = self.createControl("circle", 30, "thigh_" + side + "_twist3_anim")
cmds.setAttr(twistCtrl + ".ry", -90)
cmds.makeIdentity(twistCtrl, r = 1, apply =True)
constraint = cmds.parentConstraint(twistMod, twistCtrl)[0]
cmds.delete(constraint)
twistCtrlGrp = cmds.group(empty = True, name = "thigh_" + side + "_twist3_anim_grp")
constraint = cmds.parentConstraint(twistMod, twistCtrlGrp)[0]
cmds.delete(constraint)
cmds.parent(twistCtrl, twistCtrlGrp)
cmds.parent(twistCtrlGrp, twistMod)
cmds.makeIdentity(twistCtrl, t = 1, r = 1, s = 1, apply = True)
cmds.connectAttr("hip_anim." + side + "TwistCtrlVis", twistCtrl + ".v")
cmds.connectAttr("hip_anim." + side + "TwistCtrlVis", twistMod + ".v")
#lock attrs on twist ctrl and color
for attr in [".sx", ".sy", ".sz"]:
cmds.setAttr(twistCtrl + attr, lock = True, keyable = False)
cmds.setAttr(twistCtrl + ".v", keyable = False)
cmds.setAttr(twistCtrl + ".overrideEnabled", 1)
cmds.setAttr(twistCtrl + ".overrideColor", color)
#drive the twist joint
twistMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "thigh_twist_3_" + side + "_mult_node")
cmds.connectAttr("driver_thigh_twist_01_" + side + ".rx", twistMultNode + ".input1X")
cmds.connectAttr("Rig_Settings." + side + "ThighTwist3Amount", twistMultNode + ".input2X")
cmds.connectAttr(twistMultNode + ".outputX", twistCtrlGrp + ".rx")
cmds.parentConstraint(twistCtrl, "driver_thigh_twist_0" + str(i + 1) + "_" + side, mo = True)
#clean up hierarchy
cmds.parent([rollGrp, driverGrp], "leg_group_" + side)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildAutoHips(self):
hipWorld = cmds.group(empty = True, name = "auto_hip_world")
yzRot = cmds.group(empty = True, name = "auto_hip_yz_rot_solver")
xRot = cmds.group(empty = True, name = "auto_hip_x_rot_solver")
legSys = cmds.group(empty = True, name = "auto_hip_legs_system")
switchNode = cmds.spaceLocator(name = "auto_hip_on_off")[0]
hipRedirect = cmds.spaceLocator(name = "hip_ctrl_redirect")[0]
#hide locators
for node in [switchNode, hipRedirect]:
shape = cmds.listRelatives(node, children = True)[0]
try:
cmds.setAttr(shape + ".v", 0)
cmds.setAttr(shape + ".v", lock = True)
except:
pass
for node in [hipWorld, yzRot, xRot, legSys, switchNode, hipRedirect]:
constraint = cmds.parentConstraint("driver_pelvis", node)[0]
cmds.delete(constraint)
#setup hierarchy
cmds.parent("hip_anim", hipWorld)
cmds.parent([yzRot, xRot, legSys], "hip_anim")
cmds.parent(switchNode, yzRot)
cmds.parent(hipRedirect, switchNode)
#create the fk knee space locators
pelvisPos = cmds.xform("driver_pelvis", q = True, t = True, ws = True)
height = pelvisPos[2]
leftKneeLoc = cmds.spaceLocator(name = "auto_hips_knee_loc_l")[0]
rightKneeLoc = cmds.spaceLocator(name = "auto_hips_knee_loc_r")[0]
constraint = cmds.pointConstraint("ik_leg_calf_l", leftKneeLoc)[0]
cmds.delete(constraint)
constraint = cmds.pointConstraint("ik_leg_calf_r", rightKneeLoc)[0]
cmds.delete(constraint)
cmds.makeIdentity([leftKneeLoc, rightKneeLoc], t = 1, r = 1, s = 1, apply = True)
leftKneeLocConstraint = cmds.pointConstraint(["invis_legs_ik_leg_calf_l", "invis_legs_fk_calf_l_anim"], leftKneeLoc)[0]
rightKneeLocConstraint = cmds.pointConstraint(["invis_legs_ik_leg_calf_r","invis_legs_fk_calf_r_anim"], rightKneeLoc)[0]
leftTargets = cmds.pointConstraint(leftKneeLocConstraint, q = True, weightAliasList = True)
rightTargets = cmds.pointConstraint(rightKneeLocConstraint, q = True, weightAliasList = True)
cmds.setAttr(leftKneeLocConstraint + "." + leftTargets[1], 0)
cmds.setAttr(rightKneeLocConstraint + "." + rightTargets[1], 0)
#create the thigh locators for solving x (twist)
leftThighLoc = cmds.spaceLocator(name = "auto_hips_x_rot_solv_l")[0]
rightThighLoc = cmds.spaceLocator(name = "auto_hips_x_rot_solv_r")[0]
constraint = cmds.pointConstraint("driver_thigh_l", leftThighLoc)[0]
cmds.delete(constraint)
constraint = cmds.pointConstraint("driver_thigh_r", rightThighLoc)[0]
cmds.delete(constraint)
cmds.setAttr(leftThighLoc + ".tz", height)
cmds.setAttr(rightThighLoc + ".tz", height)
#create the x rotation solver joint chain
cmds.select(clear = True)
xRotJointStart = cmds.joint(name = "auto_hips_x_rot_solv_joint_start")
cmds.select(clear = True)
constraint = cmds.pointConstraint(leftThighLoc, xRotJointStart)[0]
cmds.delete(constraint)
constraint = cmds.orientConstraint("driver_pelvis", xRotJointStart)[0]
cmds.delete(constraint)
xRotJointEnd = cmds.duplicate(xRotJointStart, name = "auto_hips_x_rot_solv_joint_end")[0]
cmds.parent(xRotJointEnd, xRotJointStart)
cmds.xform(xRotJointEnd, ws = True, t = (pelvisPos[0], pelvisPos[1], pelvisPos[2]))
cmds.makeIdentity(xRotJointStart, r = 1, apply = True)
cmds.pointConstraint(leftThighLoc, xRotJointStart)
xRotIKNodes = cmds.ikHandle(sol = "ikSCsolver", name = "auto_hips_x_rot_solver_ik", sj = xRotJointStart, ee = xRotJointEnd)
cmds.pointConstraint(rightThighLoc, xRotIKNodes[0])
#hookup x rot solver
upAxis = self.getUpAxis("hip_anim")
cmds.connectAttr(xRotJointStart + ".rotate" + upAxis, legSys + ".rotate" + upAxis)
#hookup motion of thigh locators
rotXikHipL = cmds.spaceLocator(name = "auto_hips_ik_x_rot_solv_loc_l")[0]
rotXikHipR = cmds.spaceLocator(name = "auto_hips_ik_x_rot_solv_loc_r")[0]
constraint = cmds.pointConstraint("noflip_pv_loc_l", rotXikHipL)[0]
cmds.delete(constraint)
constraint = cmds.pointConstraint("noflip_pv_loc_r", rotXikHipR)[0]
cmds.delete(constraint)
cmds.pointConstraint(leftKneeLoc, leftThighLoc, mo = True, skip = ["x", "z"])
cmds.pointConstraint(rightKneeLoc, rightThighLoc, mo = True, skip = ["x", "z"])
#create the multiply node for our x rot solver node to halve the results
xRotMult = cmds.shadingNode("multiplyDivide", name = "auto_hips_x_rot_mult_node", asUtility = True)
cmds.connectAttr(legSys + ".rotate" + upAxis, xRotMult + ".input1X")
cmds.setAttr(xRotMult + ".input2X", .5)
cmds.connectAttr(xRotMult + ".outputX", xRot + ".rotate" + upAxis)
#create the joints for the yz rotations
cmds.select(clear = True)
yzRotStartJoint = cmds.joint(name = "auto_hips_yz_rot_solv_joint_start")
constraint = cmds.parentConstraint("driver_pelvis", yzRotStartJoint)[0]
cmds.delete(constraint)
yzRotEndJoint = cmds.duplicate(yzRotStartJoint, name = "auto_hips_yz_rot_solv_joint_end")[0]
cmds.parent(yzRotEndJoint, yzRotStartJoint)
cmds.setAttr(yzRotEndJoint + ".tx", (height/2)* -1)
cmds.makeIdentity(yzRotStartJoint, r = 1, apply = True)
yzRotikNodes = cmds.ikHandle(sol = "ikRPsolver", name = "auto_hips_yz_rot_solv_ik", sj = yzRotStartJoint, ee = yzRotEndJoint)
yzRotTargetLoc = cmds.spaceLocator(name = "auto_hips_yz_target_loc")[0]
constraint = cmds.pointConstraint([leftKneeLoc, rightKneeLoc], yzRotTargetLoc)[0]
cmds.delete(constraint)
cmds.makeIdentity(yzRotTargetLoc, t = 1, apply = True)
cmds.parent(yzRotikNodes[0], yzRotTargetLoc)
#setup motion for yz solver
cmds.pointConstraint([leftKneeLoc, rightKneeLoc], yzRotTargetLoc)
yzSolvConst = cmds.orientConstraint(yzRotStartJoint, yzRot)[0]
#setup distance tools for reducing popping when foot gets close to pelvis
cmds.select(clear = True)
yzRotFlipCtrlNodesL = cmds.duplicate("noflip_begin_joint_l", name = "auto_hips_dist_ctrl_begin_joint_l", rc = True)
yzRotFlipCtrlBeginL = yzRotFlipCtrlNodesL[0]
yzRotFlipCtrlEndL = cmds.rename(yzRotFlipCtrlNodesL[1], "auto_hips_dist_ctrl_end_joint_l")
#now for the right side
cmds.select(clear = True)
yzRotFlipCtrlNodesR = cmds.duplicate("noflip_begin_joint_r", name = "auto_hips_dist_ctrl_begin_joint_r", rc = True)
yzRotFlipCtrlBeginR = yzRotFlipCtrlNodesR[0]
yzRotFlipCtrlEndR = cmds.rename(yzRotFlipCtrlNodesR[1], "auto_hips_dist_ctrl_end_joint_r")
#setup the distance mover joint
distMoverL = cmds.duplicate(yzRotFlipCtrlEndL, name = "distance_mover_joint_l")[0]
distMoverR = cmds.duplicate(yzRotFlipCtrlEndR, name = "distance_mover_joint_r")[0]
cmds.pointConstraint("ik_foot_anim_l", distMoverL, mo = True)
cmds.pointConstraint("ik_foot_anim_r", distMoverR, mo = True)
#setup system for fixing flipping when foot gets too close to pelvis
originalLen = cmds.getAttr(yzRotFlipCtrlEndL + ".tz")
conditionNodeL = cmds.shadingNode("condition", asUtility = True, name = "autoHips_flipFix_condition_l")
conditionNodeR = cmds.shadingNode("condition", asUtility = True, name = "autoHips_flipFix_condition_r")
cmds.setAttr(conditionNodeL + ".secondTerm", originalLen)
cmds.setAttr(conditionNodeR + ".secondTerm", originalLen)
cmds.connectAttr(distMoverL + ".tz", conditionNodeL + ".firstTerm")
cmds.connectAttr(distMoverR + ".tz", conditionNodeR + ".firstTerm")
scaleFactor = self.getScaleFactor()
cmds.setAttr(conditionNodeL + ".operation", 2)
cmds.setAttr(conditionNodeR + ".operation", 2)
cmds.setAttr(conditionNodeL + ".colorIfFalseR", 0)
cmds.setAttr(conditionNodeR + ".colorIfFalseR", 0)
cmds.setAttr(conditionNodeL + ".colorIfTrueR", -60 * scaleFactor)
cmds.setAttr(conditionNodeR + ".colorIfTrueR", -60 * scaleFactor)
cmds.connectAttr(conditionNodeL + ".outColorR", "noflip_aim_soft_grp_l.tz")
cmds.connectAttr(conditionNodeR + ".outColorR", "noflip_aim_soft_grp_r.tz")
#hookup the hips to use the x and yz rot solver data
cmds.connectAttr(xRot + ".rotate" + upAxis, yzSolvConst + ".offset" + upAxis)
cmds.connectAttr("hip_anim.rotate", hipRedirect + ".rotate")
#orient constrain the on/off node to the world control and the yz rot solver
onOffConstraint = cmds.orientConstraint([hipWorld, yzSolvConst], switchNode)[0]
#add auto on/off attr to hip control. hookup connections to constraint
cmds.select("hip_anim")
cmds.addAttr(longName='autoHips', defaultValue=0, minValue=0, maxValue=1, keyable = True)
orientTargets = cmds.orientConstraint(onOffConstraint, q = True, weightAliasList = True)
cmds.connectAttr("hip_anim.autoHips", onOffConstraint + "." + orientTargets[1])
reverseNode = cmds.shadingNode("reverse", asUtility = True, name = "autoHips_reverse_node_onOff")
cmds.connectAttr("hip_anim.autoHips", reverseNode + ".inputX")
cmds.connectAttr(reverseNode + ".outputX", onOffConstraint + "." + orientTargets[0])
#plug the body anim's rotates into the onOffConstraint's offsets
bodyMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "autoHips_body_rotation_fix")
cmds.connectAttr("body_anim.rotateX", bodyMultNode + ".input1X")
cmds.connectAttr("body_anim.rotateY", bodyMultNode + ".input1Y")
cmds.connectAttr("body_anim.rotateZ", bodyMultNode + ".input1Z")
cmds.connectAttr("hip_anim.autoHips", bodyMultNode + ".input2X")
cmds.connectAttr("hip_anim.autoHips", bodyMultNode + ".input2Y")
cmds.connectAttr("hip_anim.autoHips", bodyMultNode + ".input2Z")
#cmds.connectAttr(bodyMultNode + ".outputX", onOffConstraint + ".offsetX")
#cmds.connectAttr(bodyMultNode + ".outputY", onOffConstraint + ".offsetY")
#cmds.connectAttr(bodyMultNode + ".outputZ", onOffConstraint + ".offsetZ")
#update invisible leg IK joints
cmds.setToolTo('moveSuperContext')
cmds.select("invis_legs_ik_leg_thigh_l", hi = True)
cmds.setToolTo('RotateSuperContext')
cmds.select(clear = True)
#constrain driver joint to control
cmds.parentConstraint(hipRedirect, "driver_pelvis", mo = True)
#parent hip world to body anim
cmds.parent(hipWorld, "body_anim")
#clean up hierarchy
autoHipsMasterGrp = cmds.group(empty = True, name = "autoHips_sys_grp")
cmds.parent([leftKneeLoc, rightKneeLoc, leftThighLoc, rightThighLoc, xRotJointStart, rotXikHipL, rotXikHipR, yzRotStartJoint,xRotIKNodes[0], yzRotTargetLoc], autoHipsMasterGrp)
#cosntrain ik leg bones(thighs) to the hip ctrl redirect
cmds.parentConstraint(hipRedirect, "leg_joints_grp_l", mo = True)
cmds.parentConstraint(hipRedirect, "leg_joints_grp_r", mo = True)
#parent the bottom of the spline ik spine bone to the hip redirect
if cmds.objExists("spine_splineIK_bottom_joint"):
cmds.parent("spine_splineIK_bottom_joint", hipRedirect)
#hide stuff
cmds.setAttr("autoHips_sys_grp.v", 0)
#hook up autohips to leg mode
reverseNodeL = "legSwitcher_reverse_node_l"
reverseNodeR = "legSwitcher_reverse_node_r"
targets = cmds.pointConstraint(leftKneeLocConstraint, q = True, weightAliasList = True)
cmds.connectAttr("Rig_Settings" + ".lLegMode", leftKneeLocConstraint + "." + targets[0])
cmds.connectAttr(reverseNodeL + ".outputX", leftKneeLocConstraint + "." + targets[1])
targets = cmds.pointConstraint(rightKneeLocConstraint, q = True, weightAliasList = True)
cmds.connectAttr("Rig_Settings" + ".rLegMode", rightKneeLocConstraint + "." + targets[0])
cmds.connectAttr(reverseNodeR + ".outputX", rightKneeLocConstraint + "." + targets[1])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def autoSpine(self):
numSpineBones = cmds.getAttr("Skeleton_Settings.numSpineBones")
if numSpineBones > 2:
#drive the mid IK spine control based on what the upper spine control is doing
#drives the ik spine controls based on auto hip attr and hip motion
yzRotSolver = "auto_hip_yz_rot_solver"
midDriver = "mid_ik_anim_driver_grp"
midDriverTop = "mid_ik_anim_translate_driver_grp"
#add auto on/off attr to hip control. hookup connections to constraint
cmds.select("chest_ik_anim")
cmds.addAttr(longName='autoSpine', defaultValue=0, minValue=0, maxValue=1, keyable = True)
cmds.addAttr(longName='rotationInfluence', defaultValue=.25, minValue=0, maxValue=1, keyable = True)
topCtrlMultRY = cmds.shadingNode("multiplyDivide", asUtility = True, name = "autoSpine_top_driver_mult_ry")
topCtrlMultRZ = cmds.shadingNode("multiplyDivide", asUtility = True, name = "autoSpine_top_driver_mult_rz")
topCtrlMultSwitchRY = cmds.shadingNode("multiplyDivide", asUtility = True, name = "autoSpine_top_mult_switch_ry")
topCtrlMultSwitchRZ = cmds.shadingNode("multiplyDivide", asUtility = True, name = "autoSpine_top_mult_switch_rz")
#create a node that will track all world space translations and rotations on the chest IK anim
chestMasterTrackNode = cmds.spaceLocator(name = "chest_ik_track_parent")[0]
constraint = cmds.parentConstraint("chest_ik_anim", chestMasterTrackNode)[0]
cmds.delete(constraint)
chestTrackNode = cmds.spaceLocator(name = "chest_ik_tracker")[0]
constraint = cmds.parentConstraint("chest_ik_anim", chestTrackNode)[0]
cmds.delete(constraint)
cmds.parent(chestTrackNode, chestMasterTrackNode)
cmds.parentConstraint("chest_ik_anim", chestTrackNode)
cmds.parent(chestMasterTrackNode, "body_anim")
#hide locator
cmds.setAttr(chestMasterTrackNode + ".v", 0)
#Rotate Y
cmds.connectAttr(chestTrackNode + ".ry", topCtrlMultRY + ".input1X")
cmds.connectAttr("chest_ik_anim.rotationInfluence", topCtrlMultRY + ".input2X")
cmds.connectAttr(topCtrlMultRY + ".outputX", topCtrlMultSwitchRY + ".input1X")
cmds.connectAttr("chest_ik_anim.autoSpine", topCtrlMultSwitchRY + ".input2X")
cmds.connectAttr(topCtrlMultSwitchRY + ".outputX", midDriver + ".tz")
#Rotate Z
multInverse = cmds.shadingNode("multiplyDivide", asUtility = True, name = "autoSpine_mult_rz_inverse")
cmds.connectAttr("chest_ik_anim.rotationInfluence", multInverse + ".input1X")
cmds.setAttr(multInverse + ".input2X", -1)
cmds.connectAttr(chestTrackNode + ".rz", topCtrlMultRZ + ".input1X")
cmds.connectAttr(multInverse + ".outputX", topCtrlMultRZ + ".input2X")
cmds.connectAttr(topCtrlMultRZ + ".outputX", topCtrlMultSwitchRZ + ".input1X")
cmds.connectAttr("chest_ik_anim.autoSpine", topCtrlMultSwitchRZ + ".input2X")
cmds.connectAttr(topCtrlMultSwitchRZ + ".outputX", midDriver + ".ty")
#Translate X
#Chest Control Translate X + Hip Control Translate X / 2 * autpSpine
autoSpineTXNode = cmds.shadingNode("plusMinusAverage", asUtility = True, name = midDriverTop + "_TX_Avg")
cmds.setAttr(autoSpineTXNode + ".operation", 3)
autoSpineTX_MultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = midDriverTop + "_TX_Mult")
cmds.connectAttr("chest_ik_anim.translateX", autoSpineTXNode + ".input1D[0]")
cmds.connectAttr("hip_anim.translateX", autoSpineTXNode + ".input1D[1]")
cmds.connectAttr(autoSpineTXNode + ".output1D", autoSpineTX_MultNode + ".input1X")
cmds.connectAttr("chest_ik_anim.autoSpine", autoSpineTX_MultNode + ".input2X")
cmds.connectAttr(autoSpineTX_MultNode + ".outputX", midDriverTop + ".translateX")
#Translate Y
autoSpineTYNode = cmds.shadingNode("plusMinusAverage", asUtility = True, name = midDriverTop + "_TY_Avg")
cmds.setAttr(autoSpineTYNode + ".operation", 3)
autoSpineTY_MultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = midDriverTop + "_TY_Mult")
cmds.connectAttr(chestTrackNode + ".translateY", autoSpineTYNode + ".input1D[0]")
cmds.connectAttr("hip_anim.translateY", autoSpineTYNode + ".input1D[1]")
cmds.connectAttr(autoSpineTYNode + ".output1D", autoSpineTY_MultNode + ".input1X")
cmds.connectAttr("chest_ik_anim.autoSpine", autoSpineTY_MultNode + ".input2X")
cmds.connectAttr(autoSpineTY_MultNode + ".outputX", midDriverTop + ".translateY")
#Translate Z
autoSpineTZNode = cmds.shadingNode("plusMinusAverage", asUtility = True, name = midDriverTop + "_TZ_Avg")
cmds.setAttr(autoSpineTZNode + ".operation", 3)
autoSpineTZ_MultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = midDriverTop + "_TZ_Mult")
cmds.connectAttr(chestTrackNode + ".translateZ", autoSpineTZNode + ".input1D[0]")
cmds.connectAttr("hip_anim.translateZ", autoSpineTZNode + ".input1D[1]")
cmds.connectAttr(autoSpineTZNode + ".output1D", autoSpineTZ_MultNode + ".input1X")
cmds.connectAttr("chest_ik_anim.autoSpine", autoSpineTZ_MultNode + ".input2X")
cmds.connectAttr(autoSpineTZ_MultNode + ".outputX", midDriverTop + ".translateZ")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def hookupSpine(self, ikControls, fkControls):
#constrain the driver joints to the FK and IK controls
i = 0
upAxis = "X"
for joint in ikControls:
driverJoint = joint.partition("splineIK_")[2]
driverJoint = "driver_" + driverJoint
if joint == ikControls[0]:
upAxis = self.getUpAxis(driverJoint)
if upAxis == "X":
axisB = "Y"
axisC = "Z"
if upAxis == "Y":
axisB = "X"
axisC = "Z"
if upAxis == "Z":
axisB = "X"
axisC = "Y"
twistJoint = "twist_" + joint
cmds.parentConstraint([twistJoint, fkControls[i]], driverJoint, mo = True)
cmds.scaleConstraint([twistJoint, fkControls[i]], driverJoint, mo = True)
scaleDriverJoint = driverJoint
if joint != ikControls[len(ikControls) - 1] and joint != ikControls[0]:
twistJoint = "twist_" + joint
cmds.parentConstraint([twistJoint, fkControls[i]], driverJoint, mo = True)
#create blendColors nodes for scale
blenderNodeScale = cmds.shadingNode("blendColors", asUtility = True, name = driverJoint + "_blenderNodeScale")
cmds.connectAttr(twistJoint + ".scale" + axisB, blenderNodeScale + ".color1R")
cmds.connectAttr(fkControls[i] + ".scale" + axisB, blenderNodeScale + ".color2R")
cmds.connectAttr(twistJoint + ".scale" + axisC, blenderNodeScale + ".color1G")
cmds.connectAttr(fkControls[i] + ".scale" + axisC, blenderNodeScale + ".color2G")
cmds.connectAttr(blenderNodeScale + ".outputR", driverJoint + ".scale" + axisB)
cmds.connectAttr(blenderNodeScale + ".outputG", driverJoint + ".scale" + axisC)
if joint == ikControls[len(ikControls) - 1]:
topJoint = "spine_splineIK_top_joint"
twistJoint = "twist_" + joint
cmds.orientConstraint([topJoint, fkControls[i]], driverJoint, mo = True)
cmds.pointConstraint([twistJoint, fkControls[i]], driverJoint, mo = True)
i = i + 1
#add attributes to the Rig_Settings node
cmds.select("Rig_Settings")
cmds.addAttr(longName='spine_ik', defaultValue=0, minValue=0, maxValue=1, keyable = True)
cmds.addAttr(longName='spine_fk', defaultValue=0, minValue=0, maxValue=1, keyable = True)
#hookup Rig_Settings attrs to the parentConstraint weights
#find connection targets
for ctrl in ikControls:
driverJoint = ctrl.partition("splineIK_")[2]
driverJoint = "driver_" + driverJoint
constraint = driverJoint + "_parentConstraint1"
#hook up blendColors scale node
try:
blenderNodeScale = driverJoint + "_blenderNodeScale"
cmds.connectAttr("Rig_Settings" + ".spine_ik", blenderNodeScale + ".blender")
except:
pass
if cmds.objExists(constraint):
targets = cmds.parentConstraint(constraint, q = True, weightAliasList = True)
for target in targets:
if target.find("IK") != -1:
cmds.connectAttr("Rig_Settings" + ".spine_ik", constraint + "." + target)
else:
cmds.connectAttr("Rig_Settings" + ".spine_fk", constraint + "." + target)
else:
pointConstraint = driverJoint + "_pointConstraint1"
orientConstraint = driverJoint + "_orientConstraint1"
pointTargets = cmds.pointConstraint(pointConstraint, q = True, weightAliasList = True)
orientTargets = cmds.orientConstraint(orientConstraint, q = True, weightAliasList = True)
for target in pointTargets:
if target.find("IK") != -1:
cmds.connectAttr("Rig_Settings" + ".spine_ik", pointConstraint + "." + target)
else:
cmds.connectAttr("Rig_Settings" + ".spine_fk", pointConstraint + "." + target)
for target in orientTargets:
if target.find("IK") != -1:
cmds.connectAttr("Rig_Settings" + ".spine_ik", orientConstraint + "." + target)
else:
cmds.connectAttr("Rig_Settings" + ".spine_fk", orientConstraint + "." + target)
#hook up spine control vis to the rig settings
cmds.connectAttr("Rig_Settings" + ".spine_fk", "spine_01_anim_grp.v")
cmds.connectAttr("Rig_Settings" + ".spine_ik", "mid_ik_anim_grp.v")
cmds.connectAttr("Rig_Settings" + ".spine_ik", "chest_ik_anim_grp.v")
#set spine rig to be IK by default
cmds.setAttr("Rig_Settings" + ".spine_ik", 1)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildFKArms(self):
#duplicate our driver joints to create our FK arm skeleton
for side in ["l", "r"]:
#if the side has an arm, create the fk joints
if cmds.objExists("driver_upperarm_" + side):
autoClavJointStart = cmds.duplicate("driver_clavicle_" + side, po = True, name = "auto_clavicle_" + side)[0]
clavJoint = cmds.duplicate("driver_clavicle_" + side, po = True, name = "rig_clavicle_" + side)[0]
fkArmJoint = cmds.duplicate("driver_upperarm_" + side, po = True, name = "fk_upperarm_" + side)[0]
fkElbowJoint = cmds.duplicate("driver_lowerarm_" + side, po = True, name = "fk_lowerarm_" + side)[0]
fkWristJoint = cmds.duplicate("driver_hand_" + side, po = True, name = "fk_hand_" + side)[0]
#parent the fk upperarm to the world
parent = cmds.listRelatives(clavJoint, parent = True)
if parent != None:
cmds.parent(clavJoint, world = True)
#recreate the fk arm hierarchy
cmds.parent(fkArmJoint, clavJoint)
cmds.parent(fkElbowJoint, fkArmJoint)
cmds.parent(fkWristJoint, fkElbowJoint)
cmds.makeIdentity(fkArmJoint, t = 0, r = 1, s = 0, apply = True)
#set rotation order on fk arm joint
cmds.setAttr(fkArmJoint + ".rotateOrder", 3)
#create the shoulder hierarchy
parent = cmds.listRelatives(autoClavJointStart, parent = True)
if parent != None:
cmds.parent(autoClavJointStart, world = True)
autoClavEndJoint = cmds.duplicate(fkArmJoint, parentOnly = True, name = "auto_clavicle_end_" + side)[0]
pos = cmds.xform(autoClavEndJoint, q = True, ws = True, t = True)
zPos = cmds.xform(autoClavJointStart, q = True, ws = True, t = True)[2]
cmds.xform(autoClavEndJoint, ws = True, t = [pos[0], pos[1], zPos])
cmds.parent(autoClavEndJoint, autoClavJointStart)
#create the IK for the clavicle
ikNodes = cmds.ikHandle(sj = autoClavJointStart, ee = autoClavEndJoint, sol = "ikSCsolver", name = "auto_clav_to_elbow_ikHandle_" + side)[0]
#position the IK handle at the elbow joint
constraint = cmds.pointConstraint(fkElbowJoint, ikNodes)[0]
cmds.delete(constraint)
#create our autoClav world grp
autoClavWorld = cmds.group(empty = True, name = "auto_clav_world_grp_" + side)
constraint = cmds.pointConstraint(autoClavEndJoint, autoClavWorld)[0]
cmds.delete(constraint)
cmds.makeIdentity(autoClavWorld, t = 1, r = 1, s = 1, apply = True)
#duplicate the FK arm to create our invisible arm
invisUpArm = cmds.duplicate(fkArmJoint, po = True, name = "invis_upperarm_" + side)[0]
invisLowArm = cmds.duplicate(fkElbowJoint, po = True, name = "invis_lowerarm_" + side)[0]
invisHand = cmds.duplicate(fkWristJoint, po = True, name = "invis_hand_" + side)[0]
cmds.parent(invisHand, invisLowArm)
cmds.parent(invisLowArm, invisUpArm)
cmds.parent(invisUpArm, autoClavWorld)
#create our upperarm orient locator
invisArmOrient = cmds.spaceLocator(name = "invis_arm_orient_loc_" + side)[0]
invisArmOrientGrp = cmds.group(empty = True, name = "invis_arm_orient_loc_grp_" + side)
constraint = cmds.parentConstraint(fkArmJoint, invisArmOrient)[0]
cmds.delete(constraint)
constraint = cmds.parentConstraint(fkArmJoint, invisArmOrientGrp)[0]
cmds.delete(constraint)
cmds.parent(invisArmOrient, invisArmOrientGrp)
#create the invis arm fk control (to derive autoClav info)
invisFkArmCtrl = self.createControl("circle", 20, "invis_fk_arm_" + side + "_anim")
cmds.setAttr(invisFkArmCtrl + ".ry", -90)
cmds.makeIdentity(invisFkArmCtrl, r = 1, apply =True)
constraint = cmds.parentConstraint(fkArmJoint, invisFkArmCtrl)[0]
cmds.delete(constraint)
invisFkArmCtrlGrp = cmds.group(empty = True, name = "invis_fk_arm_" + side + "_grp")
constraint = cmds.parentConstraint(fkArmJoint, invisFkArmCtrlGrp)[0]
cmds.delete(constraint)
cmds.parent(invisFkArmCtrl, invisFkArmCtrlGrp)
#position the arm FK control so that it is about halfway down the arm length
dist = (cmds.getAttr(fkElbowJoint + ".tx")) / 2
cmds.setAttr(invisFkArmCtrl + ".translateX", dist)
#set the pivot of the arm control back to the arm joint
piv = cmds.xform(fkArmJoint, q = True, ws = True, rotatePivot = True)
cmds.xform(invisFkArmCtrl, ws = True, piv = [piv[0], piv[1], piv[2]])
#freeze transforms on the control
cmds.makeIdentity(invisFkArmCtrl, t = 1, r = 1, s = 1, apply = True)
#parent the orient arm grp to the fk ctrl
cmds.parent(invisArmOrientGrp, invisFkArmCtrl)
#duplicate the invis arm fk control setup for the real fk control(upper arm)
dupeNodes = cmds.duplicate(invisFkArmCtrlGrp, rc = True)
for node in dupeNodes:
name = node.partition("invis_")[2]
if name.find("1") != -1:
name = name.partition("1")[0]
cmds.rename(node, name)
#orient constrain the invis fk up arm to the invis up arm orient loc. Also do this for the real arm
cmds.orientConstraint(invisArmOrient, invisUpArm)
cmds.orientConstraint("arm_orient_loc_" + side, fkArmJoint)
#connect invis arm ctrl rotates to be driven by real arm control rotates
cmds.connectAttr("fk_arm_" + side + "_anim.rotate", invisFkArmCtrl + ".rotate")
#the following section of code will essentially give us a vector from the clav joint to the elbow. This will help to drive the rotations of the auto clav
#create our locators to determine elbow's position in space (will drive the ik handle on the auto clav)
autoTransLoc = cmds.spaceLocator(name = "elbow_auto_trans_loc_" + side)[0]
constraint = cmds.pointConstraint(fkElbowJoint, autoTransLoc)[0]
cmds.delete(constraint)
#duplicate the locator to create a parent loc
autoTransLocParent = cmds.duplicate(autoTransLoc, po = True, name = "elbow_auto_trans_loc_parent_" + side)[0]
cmds.pointConstraint(autoTransLoc, ikNodes)
cmds.parent(autoTransLoc, autoTransLocParent)
cmds.parent(autoTransLocParent, autoClavWorld)
cmds.makeIdentity(autoTransLocParent, t = 1, r = 1, s = 1, apply = True)
#constrain the parent trans loc(elbow) to the invis elbow joint
cmds.pointConstraint(invisLowArm, autoTransLocParent, mo = True)
#create our locator that will handle switching between auto clav and manual clav. position at end joint (autoClavEndJoint)
autoClavSwitchLoc = cmds.spaceLocator(name = "auto_clav_switch_loc_" + side)[0]
constraint = cmds.pointConstraint(autoClavEndJoint, autoClavSwitchLoc)[0]
cmds.delete(constraint)
cmds.parent(autoClavSwitchLoc, autoClavWorld)
cmds.makeIdentity(autoClavSwitchLoc, t = 1, r = 1, s = 1, apply = True)
cmds.parent(autoClavJointStart, autoClavWorld)
#setup constraint for switching between auto/manual
autoClavSwitchConstraint = cmds.pointConstraint([autoClavEndJoint, autoClavWorld], autoClavSwitchLoc, mo = True)[0]
cmds.setAttr(autoClavSwitchConstraint + "." + autoClavWorld + "W1", 0)
#create our IK for the auto clav to move
autoClavIK = cmds.ikHandle(sj = clavJoint, ee = fkArmJoint, sol = "ikSCsolver", name = "auto_clav_ikHandle_" + side)[0]
autoClavIKGrp = cmds.group(empty = True, name = "auto_clav_ikHandle_grp_" + side)
constraint = cmds.pointConstraint(autoClavIK, autoClavIKGrp)[0]
cmds.delete(constraint)
autoClavIKGrpMaster = cmds.duplicate(po = True, name = "auto_clav_ikHandle_grp_master_" + side)[0]
cmds.parent(autoClavIKGrpMaster, autoClavSwitchLoc)
cmds.parent(autoClavIKGrp, autoClavIKGrpMaster)
cmds.parent(autoClavIK, autoClavIKGrp)
#cmds.makeIdentity(autoClavIKGrp, t = 1, r = 1, s = 1, apply = True)
#create the shoulder control
shoulderCtrl = self.createControl("pin", 1.5, "clavicle_" + side + "_anim")
cmds.setAttr(shoulderCtrl + ".ry", 90)
cmds.setAttr(shoulderCtrl + ".rx", 90)
constraint = cmds.pointConstraint(fkArmJoint, shoulderCtrl)[0]
cmds.delete(constraint)
shoulderCtrlGrp = cmds.group(empty = True, name = "clavicle_" + side + "_anim_grp")
constraint = cmds.pointConstraint(fkArmJoint, shoulderCtrl)[0]
cmds.delete(constraint)
cmds.parent(shoulderCtrl, shoulderCtrlGrp)
cmds.parent(shoulderCtrlGrp, autoClavWorld)
cmds.makeIdentity(shoulderCtrl, t = 1, r = 1, s = 1, apply = True)
cmds.setAttr(shoulderCtrl + ".sx", .65)
cmds.setAttr(shoulderCtrl + ".sy", 1.2)
cmds.setAttr(shoulderCtrl + ".sz", 1.2)
cmds.makeIdentity(shoulderCtrl, t = 1, r = 1, s = 1, apply = True)
#connect shoulder ctrl translate to the autoClavIKGrp translate
cmds.connectAttr(shoulderCtrl + ".translate", autoClavIKGrp + ".translate")
cmds.connectAttr(autoClavSwitchLoc + ".translate", shoulderCtrl + ".rotatePivotTranslate")
#set limits on shoulder control
cmds.select(shoulderCtrl)
if side == "l":
cmds.transformLimits(tx = (-1,0), etx = (False, True))
else:
cmds.transformLimits(tx = (0,1), etx = (True, False))
#create FK elbow control
fkElbowCtrl = self.createControl("circle", 18, "fk_elbow_" + side + "_anim")
cmds.setAttr(fkElbowCtrl + ".ry", -90)
cmds.makeIdentity(fkElbowCtrl, r = 1, apply =True)
constraint = cmds.parentConstraint(fkElbowJoint, fkElbowCtrl)[0]
cmds.delete(constraint)
fkElbowCtrlGrp = cmds.group(empty = True, name = "fk_elbow_" + side + "_anim_grp")
constraint = cmds.parentConstraint(fkElbowJoint, fkElbowCtrlGrp)[0]
cmds.delete(constraint)
cmds.parent(fkElbowCtrl, fkElbowCtrlGrp)
cmds.makeIdentity(fkElbowCtrl, t = 1, r = 1, s = 1, apply = True)
cmds.parent(fkElbowCtrlGrp, "fk_arm_" + side + "_anim")
#constrain elbow joint to ctrl
cmds.parentConstraint(fkElbowCtrl, fkElbowJoint)
#create FK wrist control
fkWristCtrl = self.createControl("circle", 15, "fk_wrist_" + side + "_anim")
cmds.setAttr(fkWristCtrl + ".ry", -90)
cmds.makeIdentity(fkWristCtrl, r = 1, apply =True)
constraint = cmds.parentConstraint(fkWristJoint, fkWristCtrl)[0]
cmds.delete(constraint)
fkWristCtrlGrp = cmds.group(empty = True, name = "fk_wrist_" + side + "_anim_grp")
constraint = cmds.parentConstraint(fkWristJoint, fkWristCtrlGrp)[0]
cmds.delete(constraint)
cmds.parent(fkWristCtrl, fkWristCtrlGrp)
cmds.makeIdentity(fkWristCtrl, t = 1, r = 1, s = 1, apply = True)
cmds.parent(fkWristCtrlGrp, fkElbowCtrl)
#constrain wrist joint to ctrl
cmds.parentConstraint(fkWristCtrl, fkWristJoint)
#point constrain the fk arm grp to the fk upper arm joint
cmds.pointConstraint(fkArmJoint, "fk_arm_" + side + "_grp")
#clean up FK rig in scene
cmds.parent(invisFkArmCtrlGrp, autoClavWorld)
#find children under autoClavWorld
children = cmds.listRelatives(autoClavWorld, children = True)
dntGrp = cmds.group(empty = True, name = "auto_clav_sys_grp_" + side)
cmds.parent(dntGrp, autoClavWorld)
for child in children:
cmds.parent(child, dntGrp)
cmds.parent(shoulderCtrlGrp, autoClavWorld)
#group up the groups!
jointsGrp = cmds.group(empty = True, name = "joints_" + side + "_grp")
cmds.parent(clavJoint, jointsGrp)
masterGrp = cmds.group(empty = True, name = "arm_rig_master_grp_" + side)
constraint = cmds.pointConstraint(fkArmJoint, masterGrp)[0]
cmds.delete(constraint)
cmds.parent([ikNodes, jointsGrp, autoClavWorld], masterGrp)
cmds.setAttr(dntGrp + ".v", 0)
cmds.setAttr(ikNodes + ".v", 0)
#add fk orientation options(normal, body, world)
fkOrient = cmds.spaceLocator(name = "fk_orient_master_loc_" + side)[0]
shape = cmds.listRelatives(fkOrient, shapes = True)[0]
cmds.setAttr(shape + ".v", 0)
constraint = cmds.parentConstraint(autoClavWorld, fkOrient)[0]
cmds.delete(constraint)
fkNormalOrient = cmds.duplicate(fkOrient, po = True, name = "fk_orient_normal_loc_" + side)[0]
fkBodyOrient = cmds.duplicate(fkOrient, po = True, name = "fk_orient_body_loc_" + side)[0]
fkWorldOrient = cmds.duplicate(fkOrient, po = True, name = "fk_orient_world_loc_" + side)[0]
fkOrientConstraint = cmds.orientConstraint([fkNormalOrient, fkBodyOrient, fkWorldOrient], fkOrient)[0]
cmds.parent(fkBodyOrient, "body_anim")
cmds.parent(fkNormalOrient, shoulderCtrl)
cmds.parent(fkOrient, masterGrp)
#parent FK arm ctrl grp to shoulder ctrl
cmds.parent("fk_arm_" + side + "_grp", fkOrient)
#put mode into default fk operation
cmds.setAttr(fkOrientConstraint + "." + fkBodyOrient + "W1", 0)
cmds.setAttr(fkOrientConstraint + "." + fkWorldOrient + "W2", 0)
#get the number of spine bones and constrain the master grp to the last spine joint
numSpineBones = self.getSpineJoints()
cmds.parentConstraint("driver_spine_0" + str(len(numSpineBones)), masterGrp, mo = True)
#setup autoShoulder attr
cmds.select(shoulderCtrl)
cmds.addAttr(longName='autoShoulders', defaultValue=0, minValue=0, maxValue=1, keyable = True)
cmds.setAttr(shoulderCtrl + ".autoShoulders", 0)
cmds.setAttr(autoClavSwitchConstraint + "." + autoClavEndJoint + "W0", 0)
cmds.setAttr(autoClavSwitchConstraint + "." + autoClavWorld + "W1", 1)
cmds.setDrivenKeyframe([autoClavSwitchConstraint + "." + autoClavEndJoint + "W0", autoClavSwitchConstraint + "." + autoClavWorld + "W1"], cd = shoulderCtrl + ".autoShoulders", itt = "linear", ott = "linear")
cmds.setAttr(shoulderCtrl + ".autoShoulders", 1)
cmds.setAttr(autoClavSwitchConstraint + "." + autoClavEndJoint + "W0", 1)
cmds.setAttr(autoClavSwitchConstraint + "." + autoClavWorld + "W1", 0)
cmds.setDrivenKeyframe([autoClavSwitchConstraint + "." + autoClavEndJoint + "W0", autoClavSwitchConstraint + "." + autoClavWorld + "W1"], cd = shoulderCtrl + ".autoShoulders", itt = "linear", ott = "linear")
cmds.setAttr(shoulderCtrl + ".autoShoulders", 0)
#setup FK arm orient attr
cmds.select("Rig_Settings")
cmds.addAttr(longName= side + "FkArmOrient", at = 'enum', en = "fk:body:world:", keyable = True)
cmds.setAttr("Rig_Settings." + side + "FkArmOrient", 0)
cmds.setAttr(fkOrientConstraint + "." + fkNormalOrient + "W0", 1)
cmds.setAttr(fkOrientConstraint + "." + fkBodyOrient + "W1", 0)
cmds.setAttr(fkOrientConstraint + "." + fkWorldOrient + "W2", 0)
cmds.setDrivenKeyframe([fkOrientConstraint + "." + fkNormalOrient + "W0", fkOrientConstraint + "." + fkBodyOrient + "W1", fkOrientConstraint + "." + fkWorldOrient + "W2"], cd = "Rig_Settings." + side + "FkArmOrient", itt = "linear", ott = "linear")
cmds.setAttr("Rig_Settings." + side + "FkArmOrient", 1)
cmds.setAttr(fkOrientConstraint + "." + fkNormalOrient + "W0", 0)
cmds.setAttr(fkOrientConstraint + "." + fkBodyOrient + "W1", 1)
cmds.setAttr(fkOrientConstraint + "." + fkWorldOrient + "W2", 0)
cmds.setDrivenKeyframe([fkOrientConstraint + "." + fkNormalOrient + "W0", fkOrientConstraint + "." + fkBodyOrient + "W1", fkOrientConstraint + "." + fkWorldOrient + "W2"], cd = "Rig_Settings." + side + "FkArmOrient", itt = "linear", ott = "linear")
cmds.setAttr("Rig_Settings." + side + "FkArmOrient", 2)
cmds.setAttr(fkOrientConstraint + "." + fkNormalOrient + "W0", 0)
cmds.setAttr(fkOrientConstraint + "." + fkBodyOrient + "W1", 0)
cmds.setAttr(fkOrientConstraint + "." + fkWorldOrient + "W2", 1)
cmds.setDrivenKeyframe([fkOrientConstraint + "." + fkNormalOrient + "W0", fkOrientConstraint + "." + fkBodyOrient + "W1", fkOrientConstraint + "." + fkWorldOrient + "W2"], cd = "Rig_Settings." + side + "FkArmOrient", itt = "linear", ott = "linear")
cmds.setAttr("Rig_Settings." + side + "FkArmOrient", 0)
#setup limits on auto clavicle
cmds.setAttr(shoulderCtrl + ".autoShoulders", 1)
#cmds.setAttr("fk_arm_" + side + "_anim.ry", -60)
#grrr. need to force update since maya is not getting the info correctly
cmds.select("fk_arm_" + side + "_anim")
cmds.setToolTo( 'moveSuperContext' )
cmds.refresh(force = True)
cmds.select(clear = True)
#limitInfo = cmds.xform(autoClavSwitchLoc, q = True, t = True)
#cmds.setAttr("fk_arm_" + side + "_anim.ry", 0)
cmds.setAttr(shoulderCtrl + ".autoShoulders", 0)
cmds.select("fk_arm_" + side + "_anim")
cmds.setToolTo( 'moveSuperContext' )
cmds.refresh(force = True)
cmds.select(clear = True)
#lock attrs that should not be animated and colorize controls
for control in [shoulderCtrl, "fk_arm_" + side + "_anim", fkElbowCtrl, fkWristCtrl]:
if control == shoulderCtrl:
for attr in [".rx", ".ry", ".rz", ".sx", ".sy", ".sz", ".v"]:
cmds.setAttr(control + attr, lock = True, keyable = False)
else:
for attr in [".sx", ".sy", ".sz", ".v"]:
cmds.setAttr(control + attr, lock = True, keyable = False)
if side == "l":
color = 6
else:
color = 13
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", color)
#parent fkWorldOrient to master anim
cmds.parent(fkWorldOrient, "master_anim")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildIkArms(self):
#duplicate the fk arm joints and create our ik arm chain
for side in ["l", "r"]:
if cmds.objExists("fk_upperarm_" + side):
ikUpArmJoint = cmds.duplicate("fk_upperarm_" + side, po = True, name = "ik_upperarm_" + side)[0]
ikLowArmJoint = cmds.duplicate("fk_lowerarm_" + side, po = True, name = "ik_lowerarm_" + side)[0]
ikWristJoint = cmds.duplicate("fk_hand_" + side, po = True, name = "ik_hand_" + side)[0]
ikWristEndJoint = cmds.duplicate("fk_hand_" + side, po = True, name = "ik_hand_end_" + side)[0]
cmds.parent([ikWristEndJoint], ikWristJoint)
cmds.parent(ikWristJoint, ikLowArmJoint)
cmds.parent(ikLowArmJoint, ikUpArmJoint)
#create fk matching joints
fkMatchUpArm = cmds.duplicate(ikUpArmJoint, po = True, name = "ik_upperarm_fk_matcher_" + side)[0]
fkMatchLowArm = cmds.duplicate(ikLowArmJoint, po = True, name = "ik_lowerarm_fk_matcher_" + side)[0]
fkMatchWrist = cmds.duplicate(ikWristJoint, po = True, name = "ik_wrist_fk_matcher_" + side)[0]
cmds.parent(fkMatchWrist, fkMatchLowArm)
cmds.parent(fkMatchLowArm, fkMatchUpArm)
cmds.parentConstraint(ikUpArmJoint, fkMatchUpArm, mo = True)
cmds.parentConstraint(ikLowArmJoint, fkMatchLowArm, mo = True)
cmds.parentConstraint(ikWristJoint, fkMatchWrist, mo = True)
#move wrist end joint out a bit
scaleFactor = self.getScaleFactor()
if side == "l":
cmds.setAttr(ikWristEndJoint + ".tx", 15 * scaleFactor)
else:
cmds.setAttr(ikWristEndJoint + ".tx", -15 * scaleFactor)
cmds.makeIdentity(ikUpArmJoint, t = 0, r = 1, s = 0, apply = True)
#set rotate order on ikUpArm
cmds.setAttr(ikUpArmJoint + ".rotateOrder", 3)
#set preferred angle on arm
cmds.setAttr(ikLowArmJoint + ".preferredAngleZ", -90)
#create ik control
ikCtrl = self.createControl("circle", 15, "ik_wrist_" + side + "_anim")
cmds.setAttr(ikCtrl + ".ry", -90)
cmds.makeIdentity(ikCtrl, r = 1, apply =True)
constraint = cmds.pointConstraint(ikWristJoint, ikCtrl)[0]
cmds.delete(constraint)
ikCtrlGrp = cmds.group(empty = True, name = "ik_wrist_" + side + "_anim_grp")
constraint = cmds.pointConstraint(ikWristJoint, ikCtrlGrp)[0]
cmds.delete(constraint)
ikCtrlSpaceSwitchFollow = cmds.duplicate(ikCtrlGrp, po = True, n = "ik_wrist_" + side + "_anim_space_switcher_follow")[0]
ikCtrlSpaceSwitch = cmds.duplicate(ikCtrlGrp, po = True, n = "ik_wrist_" + side + "_anim_space_switcher")[0]
cmds.parent(ikCtrlSpaceSwitch, ikCtrlSpaceSwitchFollow)
cmds.parent(ikCtrlGrp, ikCtrlSpaceSwitch)
cmds.parent(ikCtrl, ikCtrlGrp)
cmds.makeIdentity(ikCtrlGrp, t = 1, r = 1, s = 1, apply = True)
#create RP IK on arm and SC ik from wrist to wrist end
rpIkHandle = cmds.ikHandle(name = "arm_ikHandle_" + side, solver = "ikRPsolver", sj = ikUpArmJoint, ee = ikWristJoint)[0]
scIkHandle = cmds.ikHandle(name = "hand_ikHandle_" + side, solver = "ikSCsolver", sj = ikWristJoint, ee = ikWristEndJoint)[0]
cmds.parent(scIkHandle, rpIkHandle)
cmds.setAttr(rpIkHandle + ".v", 0)
cmds.setAttr(scIkHandle + ".v", 0)
#parent IK to ik control
cmds.parent(rpIkHandle, ikCtrl)
#create a pole vector control
ikPvCtrl = self.createControl("sphere", 6, "ik_elbow_" + side + "_anim")
constraint = cmds.pointConstraint(ikLowArmJoint, ikPvCtrl)[0]
cmds.delete(constraint)
cmds.makeIdentity(ikPvCtrl, t = 1, r = 1, s = 1, apply = True)
#move out a bit
cmds.setAttr(ikPvCtrl + ".ty", (30 * scaleFactor))
cmds.makeIdentity(ikPvCtrl, t = 1, r = 1, s = 1, apply = True)
#create group for control
ikPvCtrlGrp = cmds.group(empty = True, name = "ik_elbow_" + side + "_anim_grp")
constraint = cmds.parentConstraint(ikPvCtrl, ikPvCtrlGrp)[0]
cmds.delete(constraint)
ikPvSpaceSwitchFollow = cmds.duplicate(ikPvCtrlGrp, po = True, name = "ik_elbow_" + side + "_anim_space_switcher_follow")[0]
ikPvSpaceSwitch = cmds.duplicate(ikPvCtrlGrp, po = True, name = "ik_elbow_" + side + "_anim_space_switcher")[0]
cmds.parent(ikPvSpaceSwitch, ikPvSpaceSwitchFollow)
cmds.parent(ikPvCtrlGrp, ikPvSpaceSwitch)
cmds.parent(ikPvCtrl, ikPvCtrlGrp)
cmds.parent(ikPvSpaceSwitchFollow, "offset_anim")
cmds.makeIdentity(ikPvCtrl, t = 1, r = 1, s = 1, apply = True)
#setup pole vector constraint
cmds.poleVectorConstraint(ikPvCtrl, rpIkHandle)
#create IK for invisible arm
invisRpIkHandle = cmds.ikHandle(name = "invis_arm_ikHandle_" + side, solver = "ikRPsolver", sj = "invis_upperarm_" + side, ee = "invis_hand_" + side)[0]
cmds.parent(invisRpIkHandle, ikCtrl)
cmds.poleVectorConstraint(ikPvCtrl, invisRpIkHandle)
cmds.setAttr(invisRpIkHandle + ".v", 0)
#constrain driver joints to both fk and ik chains
cmds.parentConstraint("rig_clavicle_" + side, "driver_clavicle_" + side, mo = True)
upArmConstPoint = cmds.pointConstraint(["fk_arm_" + side + "_anim", "ik_upperarm_" + side], "driver_upperarm_" + side)[0]
upArmConstOrient = cmds.orientConstraint(["fk_upperarm_" + side, "ik_upperarm_" + side], "driver_upperarm_" + side)[0]
lowArmConst = cmds.parentConstraint(["fk_lowerarm_" + side, "ik_lowerarm_" + side], "driver_lowerarm_" + side)[0]
handConst = cmds.parentConstraint(["fk_hand_" + side, "ik_hand_" + side], "driver_hand_" + side, mo = True)[0]
#create blend nodes for the scale
scaleBlendColors_UpArm = cmds.shadingNode("blendColors", asUtility = True, name = side + "_up_arm_scale_blend")
cmds.connectAttr(ikUpArmJoint + ".scale", scaleBlendColors_UpArm + ".color1")
cmds.connectAttr("fk_arm_" + side + "_anim.scale", scaleBlendColors_UpArm + ".color2")
cmds.connectAttr(scaleBlendColors_UpArm + ".output", "driver_upperarm_" + side + ".scale")
scaleBlendColors_LoArm = cmds.shadingNode("blendColors", asUtility = True, name = side + "_lo_arm_scale_blend")
cmds.connectAttr(ikLowArmJoint + ".scale", scaleBlendColors_LoArm + ".color1")
cmds.connectAttr("fk_elbow_" + side + "_anim.scale", scaleBlendColors_LoArm + ".color2")
cmds.connectAttr(scaleBlendColors_LoArm + ".output", "driver_lowerarm_" + side + ".scale")
scaleBlendColors_Wrist = cmds.shadingNode("blendColors", asUtility = True, name = side + "_wrist_scale_blend")
cmds.connectAttr(ikWristJoint + ".scale", scaleBlendColors_Wrist + ".color1")
cmds.connectAttr("fk_wrist_" + side + "_anim.scale", scaleBlendColors_Wrist + ".color2")
cmds.connectAttr(scaleBlendColors_Wrist + ".output", "driver_hand_" + side + ".scale")
#set limits
cmds.select("driver_upperarm_" + side)
cmds.transformLimits(sy = (.05, 1.25), sz = (.05, 1.25), esy = [False, True], esz = [False, True])
cmds.select("driver_lowerarm_" + side)
cmds.transformLimits(sy = (.05, 1.25), sz = (.05, 1.25), esy = [False, True], esz = [False, True])
#create the IK/FK switch
cmds.select("Rig_Settings")
cmds.addAttr(longName= side + "ArmMode", at = 'enum', en = "fk:ik:", keyable = True)
cmds.setAttr("Rig_Settings." + side + "ArmMode", 0)
cmds.setAttr(upArmConstPoint + "." + "fk_arm_" + side + "_anim" + "W0", 1)
cmds.setAttr(upArmConstPoint + "." + "ik_upperarm_" + side + "W1", 0)
cmds.setAttr(upArmConstOrient + "." + "fk_upperarm_" + side + "W0", 1)
cmds.setAttr(upArmConstOrient + "." + "ik_upperarm_" + side + "W1", 0)
cmds.setAttr(scaleBlendColors_UpArm + "." + "blender", 0)
cmds.setAttr(lowArmConst + "." + "fk_lowerarm_" + side + "W0", 1)
cmds.setAttr(lowArmConst + "." + "ik_lowerarm_" + side + "W1", 0)
cmds.setAttr(scaleBlendColors_LoArm + "." + "blender", 0)
cmds.setAttr(handConst + "." + "fk_hand_" + side + "W0", 1)
cmds.setAttr(handConst + "." + "ik_hand_" + side + "W1", 0)
cmds.setAttr(scaleBlendColors_Wrist + "." + "blender", 0)
cmds.setAttr(invisRpIkHandle + ".ikBlend", 0)
cmds.setAttr("fk_arm_" + side + "_grp.v", 1)
cmds.setAttr("ik_wrist_" + side + "_anim_space_switcher.v", 0)
cmds.setAttr("ik_elbow_" + side + "_anim_space_switcher.v", 0)
cmds.setDrivenKeyframe([scaleBlendColors_UpArm + "." + "blender", scaleBlendColors_LoArm + "." + "blender", scaleBlendColors_Wrist + "." + "blender", upArmConstPoint + "." + "fk_arm_" + side + "_anim" + "W0", upArmConstPoint + "." + "ik_upperarm_" + side + "W1", ], cd = "Rig_Settings." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setDrivenKeyframe([upArmConstOrient + "." + "fk_upperarm_" + side + "W0", upArmConstOrient + "." + "ik_upperarm_" + side + "W1", ], cd = "Rig_Settings." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setDrivenKeyframe([lowArmConst + "." + "fk_lowerarm_" + side + "W0", lowArmConst + "." + "ik_lowerarm_" + side + "W1", ], cd = "Rig_Settings." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setDrivenKeyframe([handConst + "." + "fk_hand_" + side + "W0", handConst + "." + "ik_hand_" + side + "W1", ], cd = "Rig_Settings." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setDrivenKeyframe([invisRpIkHandle + ".ikBlend", "fk_arm_" + side + "_grp.v"], cd = "Rig_Settings." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setDrivenKeyframe(["ik_wrist_" + side + "_anim_space_switcher.v", "ik_elbow_" + side + "_anim_space_switcher.v"], cd = "Rig_Settings." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setAttr("Rig_Settings." + side + "ArmMode", 1)
cmds.setAttr(upArmConstPoint + "." + "fk_arm_" + side + "_anim" "W0", 0)
cmds.setAttr(upArmConstPoint + "." + "ik_upperarm_" + side + "W1", 1)
cmds.setAttr(upArmConstOrient + "." + "fk_upperarm_" + side + "W0", 0)
cmds.setAttr(upArmConstOrient + "." + "ik_upperarm_" + side + "W1", 1)
cmds.setAttr(scaleBlendColors_UpArm + "." + "blender", 1)
cmds.setAttr(lowArmConst + "." + "fk_lowerarm_" + side + "W0", 0)
cmds.setAttr(lowArmConst + "." + "ik_lowerarm_" + side + "W1", 1)
cmds.setAttr(scaleBlendColors_LoArm + "." + "blender", 1)
cmds.setAttr(handConst + "." + "fk_hand_" + side + "W0", 0)
cmds.setAttr(handConst + "." + "ik_hand_" + side + "W1", 1)
cmds.setAttr(scaleBlendColors_Wrist + "." + "blender", 1)
cmds.setAttr(invisRpIkHandle + ".ikBlend", 1)
cmds.setAttr("fk_arm_" + side + "_grp.v", 0)
cmds.setAttr("ik_wrist_" + side + "_anim_space_switcher.v", 1)
cmds.setAttr("ik_elbow_" + side + "_anim_space_switcher.v", 1)
cmds.setDrivenKeyframe([scaleBlendColors_UpArm + "." + "blender", scaleBlendColors_LoArm + "." + "blender", scaleBlendColors_Wrist + "." + "blender", upArmConstPoint + "." + "fk_arm_" + side + "_anim" + "W0", upArmConstPoint + "." + "ik_upperarm_" + side + "W1", ], cd = "Rig_Settings." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setDrivenKeyframe([upArmConstOrient + "." + "fk_upperarm_" + side + "W0", upArmConstOrient + "." + "ik_upperarm_" + side + "W1", ], cd = "Rig_Settings." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setDrivenKeyframe([lowArmConst + "." + "fk_lowerarm_" + side + "W0", lowArmConst + "." + "ik_lowerarm_" + side + "W1", ], cd = "Rig_Settings." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setDrivenKeyframe([handConst + "." + "fk_hand_" + side + "W0", handConst + "." + "ik_hand_" + side + "W1", ], cd = "Rig_Settings." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setDrivenKeyframe([invisRpIkHandle + ".ikBlend", "fk_arm_" + side + "_grp.v"], cd = "Rig_Settings." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setDrivenKeyframe(["ik_wrist_" + side + "_anim_space_switcher.v", "ik_elbow_" + side + "_anim_space_switcher.v"], cd = "Rig_Settings." + side + "ArmMode", itt = "linear", ott = "linear")
cmds.setAttr("Rig_Settings." + side + "ArmMode", 0)
#setup stretch on IK
cmds.select(ikCtrl)
cmds.addAttr(longName=("stretch"), at = 'double',min = 0, max = 1, dv = 0, keyable = True)
cmds.addAttr(longName=("squash"), at = 'double',min = 0, max = 1, dv = 0, keyable = True)
stretchMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "ikHand_stretchToggleMultNode_" + side)
#need to get the total length of the arm chain
totalDist = abs(cmds.getAttr(ikLowArmJoint + ".tx" ) + cmds.getAttr(ikWristJoint + ".tx"))
#create a distanceBetween node
distBetween = cmds.shadingNode("distanceBetween", asUtility = True, name = side + "_ik_arm_distBetween")
#get world positions of upper arm and ik
baseGrp = cmds.group(empty = True, name = "ik_arm_base_grp_" + side)
endGrp = cmds.group(empty = True, name = "ik_arm_end_grp_" + side)
cmds.pointConstraint(ikUpArmJoint, baseGrp)
cmds.pointConstraint(ikCtrl, endGrp)
#hook in group translates into distanceBetween node inputs
cmds.connectAttr(baseGrp + ".translate", distBetween + ".point1")
cmds.connectAttr(endGrp + ".translate", distBetween + ".point2")
#create a condition node that will compare original length to current length
#if second term is greater than, or equal to the first term, the chain needs to stretch
ikArmCondition = cmds.shadingNode("condition", asUtility = True, name = side + "_ik_arm_stretch_condition")
cmds.setAttr(ikArmCondition + ".operation", 3)
cmds.connectAttr(distBetween + ".distance", ikArmCondition + ".secondTerm")
cmds.setAttr(ikArmCondition + ".firstTerm", totalDist)
#hook up the condition node's return colors
cmds.setAttr(ikArmCondition + ".colorIfTrueR", totalDist)
cmds.connectAttr(distBetween + ".distance", ikArmCondition + ".colorIfFalseR")
#create the mult/divide node(set to divide) that will take the original creation length as a static value in input2x, and the connected length into 1x.
armDistMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = "arm_dist_multNode_" + side)
cmds.setAttr(armDistMultNode + ".operation", 2) #divide
cmds.setAttr(armDistMultNode + ".input2X", totalDist)
cmds.connectAttr(ikArmCondition + ".outColorR", armDistMultNode + ".input1X")
#create a stretch toggle mult node that multiplies the stretch factor by the bool of the stretch attr. (0 or 1), this way our condition reads
#if this result is greater than the original length(impossible if stretch bool is off, since result will be 0), than take this result and plug it
#into the scale of our IK arm joints
stretchToggleCondition = cmds.shadingNode("condition", asUtility = True, name = "arm_stretch_toggle_condition_" + side)
cmds.setAttr(stretchToggleCondition + ".operation", 0)
cmds.connectAttr(ikCtrl + ".stretch", stretchToggleCondition + ".firstTerm")
cmds.setAttr(stretchToggleCondition + ".secondTerm", 1)
cmds.connectAttr(armDistMultNode + ".outputX", stretchToggleCondition + ".colorIfTrueR")
cmds.setAttr(stretchToggleCondition + ".colorIfFalseR", 1)
#set up the squash nodes
squashMultNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = side + "_ik_arm_squash_mult")
cmds.setAttr(squashMultNode + ".operation", 2)
cmds.setAttr(squashMultNode + ".input1X", totalDist)
cmds.connectAttr(ikArmCondition + ".outColorR", squashMultNode + ".input2X")
#create a stretch toggle mult node that multiplies the stretch factor by the bool of the stretch attr. (0 or 1), this way our condition reads
#if this result is greater than the original length(impossible if stretch bool is off, since result will be 0), than take this result and plug it
#into the scale of our IK arm joints
squashToggleCondition = cmds.shadingNode("condition", asUtility = True, name = "arm_squash_toggle_condition_" + side)
cmds.setAttr(squashToggleCondition + ".operation", 0)
cmds.connectAttr(ikCtrl + ".squash", squashToggleCondition + ".firstTerm")
cmds.setAttr(squashToggleCondition + ".secondTerm", 1)
cmds.connectAttr(squashMultNode + ".outputX", squashToggleCondition + ".colorIfTrueR")
cmds.setAttr(squashToggleCondition + ".colorIfFalseR", 1)
#connect to arm scale
cmds.connectAttr(stretchToggleCondition + ".outColorR", ikUpArmJoint + ".sx")
cmds.connectAttr(stretchToggleCondition + ".outColorR", ikLowArmJoint + ".sx")
cmds.connectAttr(squashToggleCondition + ".outColorR", ikLowArmJoint + ".sy")
cmds.connectAttr(squashToggleCondition + ".outColorR", ikLowArmJoint + ".sz")
cmds.connectAttr(squashToggleCondition + ".outColorR", ikUpArmJoint + ".sy")
cmds.connectAttr(squashToggleCondition + ".outColorR", ikUpArmJoint + ".sz")
#add base and end groups to arm grp
cmds.parent([baseGrp, endGrp], "arm_rig_master_grp_" + side)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#setup roll bones if obj Exists
if cmds.objExists("driver_upperarm_twist_01_" + side):
self.buildArmRoll(side)
if cmds.objExists("driver_lowerarm_twist_01_" + side):
self.buildForearmTwist(side)
#colorize controls, cleanup attrs, and cleanup hierarchy
for attr in [".sx", ".sy", ".sz", ".v"]:
cmds.setAttr(ikCtrl + attr, lock = True, keyable = False)
for attr in [".sx", ".sy", ".sz", ".rx", ".ry", ".rz", ".v"]:
cmds.setAttr(ikPvCtrl + attr, lock = True, keyable = False)
for control in [ikCtrl, ikPvCtrl]:
if side == "l":
color = 6
else:
color = 13
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", color)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buildNeckAndHead(self):
numNeckBones = cmds.getAttr("Skeleton_Settings.numNeckBones")
if numNeckBones == 1:
#create the FK control for the neck
neckControl = self.createControl("circle", 25, "neck_01_fk_anim")
constraint = cmds.parentConstraint("driver_neck_01", neckControl)[0]
cmds.delete(constraint)
neckControlGrp = cmds.group(empty = True, name = "neck_01_fk_anim_grp")
constraint = cmds.parentConstraint("driver_neck_01", neckControlGrp)[0]
cmds.delete(constraint)
cmds.parent(neckControl, neckControlGrp)
cmds.setAttr(neckControl + ".ry", -90)
cmds.makeIdentity(neckControl, t = 1, r = 1, s = 1, apply = True)
#create the FK control for the head
headControl = self.createControl("circle", 30, "head_fk_anim")
constraint = cmds.parentConstraint("driver_head", headControl)[0]
cmds.delete(constraint)
headControlGrp = cmds.group(empty = True, name = "head_fk_anim_grp")
constraint = cmds.parentConstraint("driver_head", headControlGrp)[0]
cmds.delete(constraint)
cmds.parent(headControl, headControlGrp)
cmds.setAttr(headControl + ".ry", 90)
cmds.setAttr(headControl + ".rz", -35)
cmds.makeIdentity(headControl, t = 1, r = 1, s = 1, apply = True)
#setup head orientation
orientNodes = self.setupHeadOrientation(neckControl, headControl)
neckOrientNodes = self.setupNeckOrientation(neckControl)
#hook into spine
cmds.parent(headControlGrp, orientNodes[0])
cmds.parent(orientNodes[0], neckControl)
numSpineBones = self.getSpineJoints()
cmds.parent(neckOrientNodes[0], neckControlGrp)
cmds.parent(neckControl, neckOrientNodes[0])
cmds.parentConstraint("driver_spine_0" + str(len(numSpineBones)), neckControlGrp, mo = True)
#constrain driver joints to controls
cmds.parentConstraint(neckControl, "driver_neck_01", mo = True)
cmds.parentConstraint(headControl, "driver_head", mo = True)
cmds.connectAttr(neckControl + ".scale", "driver_neck_01.scale")
cmds.connectAttr(headControl + ".scale", "driver_head.scale")
#lock attrs, color controls, and clean up hierarchy
for control in [neckControl, headControl]:
for attr in [".v"]:
cmds.setAttr(control + attr, lock = True, keyable = False)
cmds.setAttr(neckControl + ".overrideEnabled", 1)
cmds.setAttr(neckControl + ".overrideColor", 18)
cmds.setAttr(headControl + ".overrideEnabled", 1)
cmds.setAttr(headControl + ".overrideColor", 17)
masterGrp = cmds.group(empty = True, name = "head_sys_grp")
cmds.parent(orientNodes[4], masterGrp)
cmds.parent(neckOrientNodes[3], masterGrp)
#cmds.parent(neckOrientNodes[0], masterGrp)
else:
neckControlMid = ""
if numNeckBones == 2:
#create the FK controls for the neck
neckControl = self.createControl("circle", 25, "neck_01_fk_anim")
constraint = cmds.parentConstraint("driver_neck_01", neckControl)[0]
cmds.delete(constraint)
neckControlGrp = cmds.group(empty = True, name = "neck_01_fk_anim_grp")
constraint = cmds.parentConstraint("driver_neck_01", neckControlGrp)[0]
cmds.delete(constraint)
cmds.parent(neckControl, neckControlGrp)
cmds.setAttr(neckControl + ".ry", -90)
cmds.makeIdentity(neckControl, t = 1, r = 1, s = 1, apply = True)
neckControlEnd = self.createControl("circle", 25, "neck_02_fk_anim")
constraint = cmds.parentConstraint("driver_neck_02", neckControlEnd)[0]
cmds.delete(constraint)
neckControlEndGrp = cmds.group(empty = True, name = "neck_02_fk_anim_grp")
constraint = cmds.parentConstraint("driver_neck_02", neckControlEndGrp)[0]
cmds.delete(constraint)
cmds.parent(neckControlEnd, neckControlEndGrp)
cmds.setAttr(neckControlEnd + ".ry", -90)
cmds.makeIdentity(neckControlEnd, t = 1, r = 1, s = 1, apply = True)
#setup neck hiearchy
cmds.parent(neckControlEndGrp, neckControl)
if numNeckBones == 3:
#create the FK controls for the neck
neckControl = self.createControl("circle", 25, "neck_01_fk_anim")
constraint = cmds.parentConstraint("driver_neck_01", neckControl)[0]
cmds.delete(constraint)
neckControlGrp = cmds.group(empty = True, name = "neck_01_fk_anim_grp")
constraint = cmds.parentConstraint("driver_neck_01", neckControlGrp)[0]
cmds.delete(constraint)
cmds.parent(neckControl, neckControlGrp)
cmds.setAttr(neckControl + ".ry", -90)
cmds.makeIdentity(neckControl, t = 1, r = 1, s = 1, apply = True)
neckControlMid = self.createControl("circle", 25, "neck_02_fk_anim")
constraint = cmds.parentConstraint("driver_neck_02", neckControlMid)[0]
cmds.delete(constraint)
neckControlMidGrp = cmds.group(empty = True, name = "neck_02_fk_anim_grp")
constraint = cmds.parentConstraint("driver_neck_02", neckControlMidGrp)[0]
cmds.delete(constraint)
cmds.parent(neckControlMid, neckControlMidGrp)
cmds.setAttr(neckControlMid + ".ry", -90)
cmds.makeIdentity(neckControlMid, t = 1, r = 1, s = 1, apply = True)
neckControlEnd = self.createControl("circle", 25, "neck_03_fk_anim")
constraint = cmds.parentConstraint("driver_neck_03", neckControlEnd)[0]
cmds.delete(constraint)
neckControlEndGrp = cmds.group(empty = True, name = "neck_03_fk_anim_grp")
constraint = cmds.parentConstraint("driver_neck_03", neckControlEndGrp)[0]
cmds.delete(constraint)
cmds.parent(neckControlEnd, neckControlEndGrp)
cmds.setAttr(neckControlEnd + ".ry", -90)
cmds.makeIdentity(neckControlEnd, t = 1, r = 1, s = 1, apply = True)
#setup neck hiearchy
cmds.parent(neckControlEndGrp, neckControlMid)
cmds.parent(neckControlMidGrp, neckControl)
#create the FK control for the head
headControl = self.createControl("circle", 30, "head_fk_anim")
constraint = cmds.parentConstraint("driver_head", headControl)[0]
cmds.delete(constraint)
headControlGrp = cmds.group(empty = True, name = "head_fk_anim_grp")
constraint = cmds.parentConstraint("driver_head", headControlGrp)[0]
cmds.delete(constraint)
cmds.parent(headControl, headControlGrp)
cmds.setAttr(headControl + ".ry", 90)
cmds.setAttr(headControl + ".rz", -35)
cmds.makeIdentity(headControl, t = 1, r = 1, s = 1, apply = True)
#setup head orientation
orientNodes = self.setupHeadOrientation(neckControlEnd, headControl)
neckOrientNodes = self.setupNeckOrientation(neckControl)
#hook into spine
cmds.parent(headControlGrp, orientNodes[0])
cmds.parent(orientNodes[0], neckControlEnd)
numSpineBones = self.getSpineJoints()
cmds.parent(neckOrientNodes[0], neckControlGrp)
cmds.parent(neckControl, neckOrientNodes[0])
cmds.parentConstraint("driver_spine_0" + str(len(numSpineBones)), neckControlGrp, mo = True)
#constrain driver joints to controls
if numNeckBones == 2:
cmds.parentConstraint(neckControl, "driver_neck_01", mo = True)
cmds.parentConstraint(neckControlEnd, "driver_neck_02", mo = True)
cmds.connectAttr(neckControl + ".scale", "driver_neck_01.scale")
cmds.connectAttr(neckControlEnd + ".scale", "driver_neck_02.scale")
if numNeckBones == 3:
cmds.parentConstraint(neckControl, "driver_neck_01", mo = True)
cmds.parentConstraint(neckControlMid, "driver_neck_02", mo = True)
cmds.parentConstraint(neckControlEnd, "driver_neck_03", mo = True)
cmds.connectAttr(neckControl + ".scale", "driver_neck_01.scale")
cmds.connectAttr(neckControlMid + ".scale", "driver_neck_02.scale")
cmds.connectAttr(neckControlEnd + ".scale", "driver_neck_03.scale")
cmds.parentConstraint(headControl, "driver_head", mo = True)
cmds.connectAttr(headControl + ".scale", "driver_head.scale")
#lock attrs, color controls, and clean up hierarchy
for control in [neckControl, neckControlEnd, neckControlMid, headControl]:
if cmds.objExists(control):
for attr in [".v"]:
cmds.setAttr(control + attr, lock = True, keyable = False)
cmds.setAttr(control + ".overrideEnabled", 1)
for control in [neckControl, neckControlEnd, neckControlMid]:
if cmds.objExists(control):
cmds.setAttr(control + ".overrideColor", 18)
cmds.setAttr(headControl + ".overrideEnabled", 1)
cmds.setAttr(headControl + ".overrideColor", 17)
masterGrp = cmds.group(empty = True, name = "head_sys_grp")
cmds.parent(orientNodes[4], masterGrp)
cmds.parent(neckOrientNodes[3], masterGrp)
#cmds.parent(neckOrientNodes[0], masterGrp)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def setupHeadOrientation(self, neckControl, headControl):
#create head orient controls (neck, shoulder, body, world)
orientMaster = cmds.group(empty = True, name = "head_fk_orient_master")
constraint = cmds.parentConstraint("driver_head", orientMaster)[0]
cmds.delete(constraint)
orientNeck = cmds.duplicate(orientMaster, po = True, name = "head_fk_orient_neck")[0]
orientShoulder = cmds.duplicate(orientMaster, po = True, name = "head_fk_orient_shoulder")[0]
orientBody = cmds.duplicate(orientMaster, po = True, name = "head_fk_orient_body")[0]
orientWorld = cmds.duplicate(orientMaster, po = True, name = "head_fk_orient_world")[0]
fkHeadOrientConstraint = cmds.orientConstraint([orientNeck, orientShoulder, orientBody, orientWorld], orientMaster)[0]
numSpineBones = self.getSpineJoints()
cmds.parent(orientNeck, neckControl)
cmds.parent(orientShoulder, "driver_spine_0" + str(len(numSpineBones)))
cmds.parent(orientBody, "body_anim")
#add the fk orient attr to the head control
cmds.select(headControl)
cmds.addAttr(longName= "fkOrientation", at = 'enum', en = "neck:shoulder:body:world:", keyable = True)
#setup sdks for controlling constraint weight
cmds.setAttr(headControl + ".fkOrientation", 0)
cmds.setAttr(fkHeadOrientConstraint + "." + orientNeck + "W0", 1)
cmds.setAttr(fkHeadOrientConstraint + "." + orientShoulder + "W1", 0)
cmds.setAttr(fkHeadOrientConstraint + "." + orientBody + "W2", 0)
cmds.setAttr(fkHeadOrientConstraint + "." + orientWorld + "W3", 0)
cmds.setDrivenKeyframe([fkHeadOrientConstraint + "." + orientNeck + "W0", fkHeadOrientConstraint + "." + orientShoulder + "W1", fkHeadOrientConstraint + "." + orientBody + "W2", fkHeadOrientConstraint + "." + orientWorld + "W3"], cd = headControl + ".fkOrientation", itt = "linear", ott = "linear")
cmds.setAttr(headControl + ".fkOrientation", 1)
cmds.setAttr(fkHeadOrientConstraint + "." + orientNeck + "W0", 0)
cmds.setAttr(fkHeadOrientConstraint + "." + orientShoulder + "W1", 1)
cmds.setAttr(fkHeadOrientConstraint + "." + orientBody + "W2", 0)
cmds.setAttr(fkHeadOrientConstraint + "." + orientWorld + "W3", 0)
cmds.setDrivenKeyframe([fkHeadOrientConstraint + "." + orientNeck + "W0", fkHeadOrientConstraint + "." + orientShoulder + "W1", fkHeadOrientConstraint + "." + orientBody + "W2", fkHeadOrientConstraint + "." + orientWorld + "W3"], cd = headControl + ".fkOrientation", itt = "linear", ott = "linear")
cmds.setAttr(headControl + ".fkOrientation", 2)
cmds.setAttr(fkHeadOrientConstraint + "." + orientNeck + "W0", 0)
cmds.setAttr(fkHeadOrientConstraint + "." + orientShoulder + "W1", 0)
cmds.setAttr(fkHeadOrientConstraint + "." + orientBody + "W2", 1)
cmds.setAttr(fkHeadOrientConstraint + "." + orientWorld + "W3", 0)
cmds.setDrivenKeyframe([fkHeadOrientConstraint + "." + orientNeck + "W0", fkHeadOrientConstraint + "." + orientShoulder + "W1", fkHeadOrientConstraint + "." + orientBody + "W2", fkHeadOrientConstraint + "." + orientWorld + "W3"], cd = headControl + ".fkOrientation", itt = "linear", ott = "linear")
cmds.setAttr(headControl + ".fkOrientation", 3)
cmds.setAttr(fkHeadOrientConstraint + "." + orientNeck + "W0", 0)
cmds.setAttr(fkHeadOrientConstraint + "." + orientShoulder + "W1", 0)
cmds.setAttr(fkHeadOrientConstraint + "." + orientBody + "W2", 0)
cmds.setAttr(fkHeadOrientConstraint + "." + orientWorld + "W3", 1)
cmds.setDrivenKeyframe([fkHeadOrientConstraint + "." + orientNeck + "W0", fkHeadOrientConstraint + "." + orientShoulder + "W1", fkHeadOrientConstraint + "." + orientBody + "W2", fkHeadOrientConstraint + "." + orientWorld + "W3"], cd = headControl + ".fkOrientation", itt = "linear", ott = "linear")
cmds.setAttr(headControl + ".fkOrientation", 0)
return [orientMaster, orientNeck, orientShoulder, orientBody, orientWorld]
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def setupNeckOrientation(self, neckControl):
#create head orient controls (neck, shoulder, body, world)
orientMaster = cmds.group(empty = True, name = "neck_fk_orient_master")
constraint = cmds.parentConstraint("driver_neck_01", orientMaster)[0]
cmds.delete(constraint)
orientShoulder = cmds.duplicate(orientMaster, po = True, name = "neck_fk_orient_shoulder")[0]
orientBody = cmds.duplicate(orientMaster, po = True, name = "neck_fk_orient_body")[0]
orientWorld = cmds.duplicate(orientMaster, po = True, name = "neck_fk_orient_world")[0]
fkHeadOrientConstraint = cmds.orientConstraint([orientShoulder, orientBody, orientWorld], orientMaster)[0]
numSpineBones = self.getSpineJoints()
cmds.parent(orientShoulder, "driver_spine_0" + str(len(numSpineBones)))
cmds.parent(orientBody, "body_anim")
#add the fk orient attr to the head control
cmds.select(neckControl)
cmds.addAttr(longName= "fkOrientation", at = 'enum', en = "shoulder:body:world:", keyable = True)
#setup sdks for controlling constraint weight
cmds.setAttr(neckControl + ".fkOrientation", 0)
cmds.setAttr(fkHeadOrientConstraint + "." + orientShoulder + "W0", 1)
cmds.setAttr(fkHeadOrientConstraint + "." + orientBody + "W1", 0)
cmds.setAttr(fkHeadOrientConstraint + "." + orientWorld + "W2", 0)
cmds.setDrivenKeyframe([fkHeadOrientConstraint + "." + orientShoulder + "W0", fkHeadOrientConstraint + "." + orientBody + "W1", fkHeadOrientConstraint + "." + orientWorld + "W2"], cd = neckControl + ".fkOrientation", itt = "linear", ott = "linear")
cmds.setAttr(neckControl + ".fkOrientation", 1)
cmds.setAttr(fkHeadOrientConstraint + "." + orientShoulder + "W0", 0)
cmds.setAttr(fkHeadOrientConstraint + "." + orientBody + "W1", 1)
cmds.setAttr(fkHeadOrientConstraint + "." + orientWorld + "W2", 0)
cmds.setDrivenKeyframe([fkHeadOrientConstraint + "." + orientShoulder + "W0", fkHeadOrientConstraint + "." + orientBody + "W1", fkHeadOrientConstraint + "." + orientWorld + "W2"], cd = neckControl + ".fkOrientation", itt = "linear", ott = "linear")
cmds.setAttr(neckControl + ".fkOrientation", 2)
cmds.setAttr(fkHeadOrientConstraint + "." + orientShoulder + "W0", 0)
cmds.setAttr(fkHeadOrientConstraint + "." + orientBody + "W1", 0)
cmds.setAttr(fkHeadOrientConstraint + "." + orientWorld + "W2", 1)
cmds.setDrivenKeyframe([fkHeadOrientConstraint + "." + orientShoulder + "W0", fkHeadOrientConstraint + "." + orientBody + "W1", fkHeadOrientConstraint + "." + orientWorld + "W2"], cd = neckControl + ".fkOrientation", itt = "linear", ott = "linear")
cmds.setAttr(neckControl + ".fkOrientation", 0)
return [orientMaster, orientShoulder, orientBody, orientWorld]
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def rigLeafJoints(self):
#find attrs on the skeleton settings node
createdControls = []
attrs = cmds.listAttr("Skeleton_Settings")
for attr in attrs:
if attr.find("extraJoint") == 0:
attribute = cmds.getAttr("Skeleton_Settings." + attr, asString = True)
jointType = attribute.partition("/")[2].partition("/")[0]
name = attribute.rpartition("/")[2]
parent = attribute.partition("/")[0]
if parent.find("(") != -1:
parent = parent.partition(" (")[0]
if jointType == "leaf":
attrs = name.partition("(")[2].partition(")")[0]
name = name.partition(" (")[0]
#create the control
control = cmds.curve(name = (name + "_anim"), d = 1, p = [(0, 0, 1), (0, 0.5, 0.866025), (0, 0.866025, 0.5), (0, 1, 0), (0, 0.866025, -0.5), (0, 0.5, -0.866025), (0, 0, -1), (0, -0.5, -0.866025), (0, -0.866025, -0.5), (0, -1, 0), (0, -0.866025, 0.5), (0, -0.5, 0.866025), (0, 0, 1), (0.707107, 0, 0.707107), (1, 0, 0), (0.707107, 0, -0.707107), (0, 0, -1), (-0.707107, 0, -0.707107), (-1, 0, 0), (-0.866025, 0.5, 0), (-0.5, 0.866025, 0), (0, 1, 0), (0.5, 0.866025, 0), (0.866025, 0.5, 0), (1, 0, 0), (0.866025, -0.5, 0), (0.5, -0.866025, 0), (0, -1, 0), (-0.5, -0.866025, 0), (-0.866025, -0.5, 0), (-1, 0, 0), (-0.707107, 0, 0.707107), (0, 0, 1)])
#scale up
cmds.setAttr(control + ".sx", 9)
cmds.setAttr(control + ".sy", 9)
cmds.setAttr(control + ".sz", 9)
#freeze transforms
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
#position control
constraint = cmds.parentConstraint("driver_" + name, control)[0]
cmds.delete(constraint)
#create the control group
ctrlGrp = cmds.group(empty = True, name = (name + "_anim_grp"))
constraint = cmds.parentConstraint("driver_" + name, ctrlGrp)[0]
cmds.delete(constraint)
#create space switcher group
spaceSwitcherFollow = cmds.duplicate(ctrlGrp, po = True, name = (name + "_anim_space_switcher_follow"))[0]
spaceSwitcher = cmds.duplicate(ctrlGrp, po = True, name = (name + "_anim_space_switcher"))[0]
#create the top parent group
topParent = cmds.duplicate(ctrlGrp, po = True, name = (name + "_parent_grp"))[0]
#parent control to group
cmds.parent(spaceSwitcher, spaceSwitcherFollow)
cmds.parent(spaceSwitcherFollow, topParent)
cmds.parent(ctrlGrp, spaceSwitcher)
cmds.parent(control, ctrlGrp)
#constrain driver joint to control
cmds.parentConstraint(control, "driver_" + name)
cmds.connectAttr(control + ".scale", "driver_" + name + ".scale")
#lock attrs depending on type of control
lockAttrs = []
if attrs == "TR":
lockAttrs = [".v"]
if attrs == "T":
lockAttrs = [".v", ".rx", ".ry", ".rz"]
if attrs == "R":
lockAttrs = [".v", ".tx", ".ty", ".tz"]
for attr in lockAttrs:
cmds.setAttr(control + attr, lock = True, keyable = False)
#parent constrain the topParent to the parent of the control
cmds.parentConstraint("driver_" + parent, topParent, mo = True)
#color the control
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", 18)
#add the topParent to the createdControls list
createdControls.append(topParent)
return createdControls
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def rigJiggleJoints(self):
#find attrs on the skeleton settings node
createdControls = []
attrs = cmds.listAttr("Skeleton_Settings")
for attr in attrs:
if attr.find("extraJoint") == 0:
attribute = cmds.getAttr("Skeleton_Settings." + attr, asString = True)
jointType = attribute.partition("/")[2].partition("/")[0]
name = attribute.rpartition("/")[2]
parent = attribute.partition("/")[0]
if jointType == "jiggle":
#duplicate the driver joint
jiggleStart = cmds.duplicate("driver_" + name, po = True, name = "rig_" + name + "_start")[0]
cmds.parent(jiggleStart, world = True)
jiggleEnd = cmds.duplicate(jiggleStart, po = True, name = "rig_" + name + "_end")[0]
cmds.parent(jiggleEnd, jiggleStart)
#move jiggleEnd down a bit in up axis
scaleFactor = self.getScaleFactor()
jointPos = cmds.xform(jiggleStart, q = True, ws = True, t = True)
cmds.xform(jiggleEnd, ws = True, t = (jointPos[0], jointPos[1], (jointPos[2] - (25 * scaleFactor))))
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
#Create curve on joint chain
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
joints = [jiggleStart, jiggleEnd]
positions = []
#get the world space positions of each joint, and create a curve using those positions
for i in range(int(len(joints))):
pos = cmds.xform(joints[i], q = True, ws = True, t = True)
positions.append(pos)
createCurveCommand = "curve -d 1"
for pos in positions:
xPos = pos[0]
yPos = pos[1]
zPos = pos[2]
createCurveCommand += " -p " + str(xPos) + " " + str(yPos) + " " + str(zPos)
for i in range(int(len(positions))):
createCurveCommand += " -k " + str(i)
curve = mel.eval(createCurveCommand)
curve = cmds.rename(curve, name + "_dynCurve")
cmds.setAttr(curve + ".v", 0)
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
#Create hair system
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
cmds.select(curve)
#find all hair systems in scene
hairSystems = cmds.ls(type = "hairSystem")
hairSys = ""
#create the hair system and make the stiffness uniform
madeHairCurve = True
if hairSys == "":
hairSys = cmds.createNode("hairSystem")
cmds.removeMultiInstance(hairSys + ".stiffnessScale[1]", b = True)
cmds.setAttr(hairSys + ".clumpWidth", 0.0)
cmds.connectAttr("time1.outTime", hairSys + ".currentTime")
hairSysParent = cmds.listRelatives(hairSys, parent = True)
hairSysParent = cmds.rename(hairSysParent, name + "_hairSystem")
cmds.setAttr(hairSysParent + ".v", 0)
hairSys = name + "_hairSystemShape"
#create the hair follicle
hair = cmds.createNode("follicle")
cmds.setAttr(hair + ".parameterU", 0)
cmds.setAttr(hair + ".parameterV", 0)
hairTransforms = cmds.listRelatives(hair, p = True)
hairDag = hairTransforms[0]
hairDag = cmds.rename(hairDag, name + "_follicle")
hair = name + "_follicleShape"
cmds.setAttr(hairDag + ".v", 0)
cmds.setAttr(hair + ".startDirection", 1)
#get the curve CVs and set follicle degree to 1 if CVs are less than 3
curveCVs = cmds.getAttr(curve + ".cp", size = True)
if curveCVs < 3:
cmds.setAttr(hair + ".degree", 1)
#parent the curve to the follicle and connect the curve's worldspace[0] to the follicle startPos
cmds.parent(curve, hairDag, relative = True)
cmds.connectAttr(curve + ".worldSpace[0]", hair + ".startPosition")
#connect the hair follicle to the hair system
cmds.connectAttr(hair + ".outHair", hairSys + ".inputHair[0]")
#create a new curve and connect the follicle's outCurve attr to the new curve
cmds.connectAttr(hairSys + ".outputHair[0]", hair + ".currentPosition")
crv = cmds.createNode("nurbsCurve")
crvParent = cmds.listRelatives(crv, parent = True)[0]
crvParent = cmds.rename(crvParent, name + "_track_rt_curve")
crv = name + "_track_rt_curveShape"
cmds.setAttr(crvParent + ".v", 0)
cmds.connectAttr(hair + ".outCurve", crv + ".create")
#set the hair follicle attrs
if len(hairDag) > 0:
cmds.setAttr(hairDag + ".pointLock", 3)
cmds.setAttr(hairDag + ".restPose", 1)
cmds.select(hairSys)
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
#Create Spline Handle for the selected chain and the duplicated curve. the original is driven by hair
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
ikNodes = cmds.ikHandle(sol = "ikSplineSolver", ccv = False, pcv = False, snc = True, sj = jiggleStart, ee = jiggleEnd, c = crv)[0]
cmds.setAttr(ikNodes + ".v", 0)
ikNodes = cmds.rename(ikNodes, name + "_dynChain_ikHandle")
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
#Create a duplicate joint chain for manual animation
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
dupeChain = cmds.duplicate(jiggleStart, rr = True, rc = True)
dupeStartJoint = dupeChain[0]
dupeJoints = cmds.listRelatives(dupeStartJoint, ad = True)
joints = cmds.listRelatives(jiggleStart, ad = True)
#rename duped joints and connect real joints to duped joints
for i in range(int(len(joints))):
if cmds.objectType(dupeJoints[i], isType = 'joint'):
cmds.rename(dupeJoints[i], "ANIM_" + joints[i])
cmds.connectAttr("ANIM_" + joints[i] + ".r", joints[i] + ".r", force = True)
else:
cmds.delete(dupeJoints[i])
#connect up start joint to ANIM start joint
cmds.connectAttr(dupeStartJoint + ".t", jiggleStart + ".t")
cmds.connectAttr(dupeStartJoint + ".r", jiggleStart + ".r")
cmds.connectAttr(dupeStartJoint + ".s", jiggleStart + ".s")
dupeStartJoint = cmds.rename(dupeStartJoint, "ANIM_" + jiggleStart)
#cmds.parent(dupeStartJoint, world = True)
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
#Create skinCluster between duplicate curve and animation joint chain(dupe chain)
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
cmds.select(dupeStartJoint, hi = True)
dupeSkel = cmds.ls(sl = True, type = "joint")
cmds.select(curve)
cmds.select(dupeSkel, add = True)
skinCluster = cmds.skinCluster(tsb = True, mi = 3, dr = 4)
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
#Create the control that has all of our dynamic attrs
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
control = self.createControl("square", 15, name + "_anim")
constraint = cmds.parentConstraint(jiggleStart, control)[0]
cmds.delete(constraint)
ctrlGrp = cmds.group(empty = True, name = control + "_grp")
constraint = cmds.parentConstraint(jiggleStart, ctrlGrp)[0]
cmds.delete(constraint)
cmds.parent(control, ctrlGrp)
cmds.parentConstraint(control, dupeStartJoint)
#lock attrs
cmds.setAttr(control + ".sx", lock = True, keyable = False)
cmds.setAttr(control + ".sy", lock = True, keyable = False)
cmds.setAttr(control + ".sz", lock = True, keyable = False)
cmds.setAttr(control + ".v", lock = True, keyable = False)
#add attrs
cmds.select(control)
cmds.addAttr(ln = "___DYNAMICS___", at = "double", keyable = True)
cmds.setAttr(control + ".___DYNAMICS___", lock = True)
cmds.addAttr(ln = "chainAttach", at = "enum", en = "No Attach:Base:Tip:Both End:", dv = 1, keyable = True)
cmds.addAttr(ln = "chainStartEnvelope", at = "double", min = 0, max = 1, dv = 1, keyable = True)
cmds.addAttr(ln = "chainStartFrame", at = "double", dv = 1, keyable = True)
cmds.addAttr(ln = "___BEHAVIOR___", at = "double", keyable = True)
cmds.setAttr(control + ".___BEHAVIOR___", lock = True)
cmds.addAttr(ln = "chainStiffness", at = "double", min = 0, dv = .1, keyable = True)
cmds.addAttr(ln = "chainDamping", at = "double", min = 0, dv = 0.2, keyable = True)
cmds.addAttr(ln = "chainGravity", at = "double", min = 0, dv = 1, keyable = True)
cmds.addAttr(ln = "chainIteration", at = "long", min = 0, dv = 1, keyable = True)
cmds.addAttr(ln = "___COLLISIONS___", at = "double", keyable = True)
cmds.setAttr(control + ".___COLLISIONS___", lock = True)
cmds.addAttr(ln = "chainCollide", at = "bool", dv = 0, keyable = True)
cmds.addAttr(ln = "chainWidthBase", at = "double", min = 0, dv = 1, keyable = True)
cmds.addAttr(ln = "chainWidthExtremity", at = "double", min = 0, dv = 1, keyable = True)
cmds.addAttr(ln = "chainCollideGround", at = "bool", dv = 0, keyable = True)
cmds.addAttr(ln = "chainCollideGroundHeight", at = "double", dv = 0, keyable = True)
#connect attrs
cmds.connectAttr(control + ".chainStartEnvelope", ikNodes + ".ikBlend")
cmds.connectAttr(control + ".chainAttach", hair + ".pointLock")
cmds.connectAttr(control + ".chainStartFrame", hairSys + ".startFrame")
cmds.connectAttr(control + ".chainStiffness", hairSys + ".stiffness")
cmds.connectAttr(control + ".chainDamping", hairSys + ".damp")
cmds.connectAttr(control + ".chainGravity", hairSys + ".gravity")
cmds.connectAttr(control + ".chainIteration", hairSys + ".iterations")
cmds.connectAttr(control + ".chainCollide", hairSys + ".collide")
cmds.connectAttr(control + ".chainWidthBase", hairSys + ".clumpWidth")
cmds.connectAttr(control + ".chainWidthExtremity", hairSys + ".clumpWidthScale[1].clumpWidthScale_FloatValue")
cmds.connectAttr(control + ".chainCollideGround", hairSys + ".collideGround")
cmds.connectAttr(control + ".chainCollideGroundHeight", hairSys + ".groundHeight")
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
#Create the expression for real time
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
track_RealTime = cmds.spaceLocator(name = name + "_track_rt_loc")[0]
cmds.pointConstraint(dupeSkel[len(dupeSkel) -1], track_RealTime)
connections = cmds.listConnections(hairSys + ".currentTime", p = True, c = True)
cmds.disconnectAttr(connections[1], connections[0])
expressionString = "if(frame!= " + hairSys + ".startFrame)\n\t" + hairSys + ".currentTime = " + hairSys + ".currentTime + 1 + " + track_RealTime + ".tx - " + track_RealTime + ".tx + " + track_RealTime + ".ty - " + track_RealTime + ".ty + " + track_RealTime + ".tz - " + track_RealTime + ".tz + " + control + ".chainWidthBase - "+ control + ".chainWidthBase + "+ control + ".chainWidthExtremity - "+ control + ".chainWidthExtremity + " + control + ".chainGravity - "+ control + ".chainGravity;\n" +"else\n\t" + hairSys + ".currentTime = " + hairSys + ".startFrame;"
cmds.expression(name = "EXP_" + hairSys + "_TRACK_RealTime", string = expressionString)
cmds.setAttr(track_RealTime + ".v", 0)
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
#Set Defaults
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
cmds.setAttr(hairSys + ".drawCollideWidth", 1)
cmds.setAttr(hairSys + ".widthDrawSkip", 0)
cmds.setAttr(hair + ".degree", 1)
cmds.parentConstraint("driver_" + parent, ctrlGrp, mo = True)
if cmds.objExists("dynHairChain") == False:
cmds.group(empty = True, name = "dynHairChain")
if cmds.objExists(jiggleStart + "_HairControls") == False:
group = cmds.group([ikNodes, hairSys, hair, track_RealTime, ctrlGrp, crvParent], name = jiggleStart + "_HairControls")
else:
cmds.parent([ikNodes, hairSys, hair, ctrlGrp,crvParent ], jiggleStart + "_HairControls")
cmds.parent(group, "dynHairChain")
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
#Cleanup
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
jointsGrp = cmds.group(empty = True, name = name + "_jiggle_jointsGrp")
createdControls.append(jointsGrp)
cmds.parent([jiggleStart, dupeStartJoint], jointsGrp)
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", 18)
#constrain driver joints
cmds.parentConstraint(jiggleStart, "driver_" + name, mo = True)
#return top level group
return createdControls
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def rigCustomJointChains(self):
#find attrs on the skeleton settings node
createdControls = []
rootControl = ""
attrs = cmds.listAttr("Skeleton_Settings")
for attr in attrs:
if attr.find("extraJoint") == 0:
attribute = cmds.getAttr("Skeleton_Settings." + attr, asString = True)
jointType = attribute.partition("/")[2].partition("/")[0]
name = attribute.rpartition("/")[2]
parent = attribute.partition("/")[0]
if jointType == "chain":
numJointsInChain = name.partition("(")[2].partition(")")[0]
name = name.partition(" (")[0]
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
#FK RIG
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
fkJoints = []
frameCacheNodes = []
fkRootGrp = ""
for i in range(int(numJointsInChain)):
jointNum = i + 1
if jointNum == 1:
firstControl = "fk_" + name + "_0" + str(jointNum) + "_anim"
#create and position the joint
if cmds.objExists("rig_fk_" + name + "_0" + str(jointNum)):
cmds.delete("rig_fk_" + name + "_0" + str(jointNum))
cmds.select(clear = True)
joint = cmds.joint(name = "rig_fk_" + name + "_0" + str(jointNum))
cmds.select(clear = True)
fkJoints.append(joint)
constraint = cmds.parentConstraint("driver_" + name + "_0" + str(jointNum), joint)[0]
cmds.delete(constraint)
#create the control and position
control = self.createControl("circle", 15, "fk_" + name + "_0" + str(jointNum) + "_anim")
cmds.setAttr(control + ".rx", 90)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
constraint = cmds.parentConstraint(joint, control)[0]
cmds.delete(constraint)
cmds.makeIdentity(control, t = 0, r = 1, s = 0, apply = True)
#cmds.setAttr(control + ".rz", -90)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
#create the control grp and parent the control to the group
ctrlGrp = cmds.group(empty = True, name = "fk_" + name + "_0" + str(jointNum) + "_grp")
if i == 0:
fkRootGrp = ctrlGrp
constraint = cmds.parentConstraint(joint, ctrlGrp)[0]
cmds.delete(constraint)
cmds.parent(control, ctrlGrp)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
#duplicate the ctrl grp for the lag mode
lagGrp = cmds.duplicate(ctrlGrp, po = True, name = "fk_" + name + "_0" + str(jointNum) + "_lag_grp")[0]
cmds.parent(lagGrp, ctrlGrp)
cmds.parent(control, lagGrp)
#color the control
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", 18)
if jointNum != 1:
cmds.setAttr(control + ".sx", lock = True, keyable = False)
cmds.setAttr(control + ".sy", lock = True, keyable = False)
cmds.setAttr(control + ".sz", lock = True, keyable = False)
else:
#aliasAttr one of the scale axis and connect the other two to that one
cmds.aliasAttr("global_scale", control + ".scaleZ")
cmds.connectAttr(control + ".scaleZ", control + ".scaleX")
cmds.connectAttr(control + ".scaleZ", control + ".scaleY")
cmds.setAttr(control + ".sx", lock = True, keyable = False)
cmds.setAttr(control + ".sy", lock = True, keyable = False)
cmds.setAttr(control + ".v", lock = True, keyable = False)
#parent the joint to the control
cmds.parent(joint, control)
#TEMP!
cmds.parentConstraint(joint, "driver_" + name + "_0" + str(jointNum))
#add attr to root joint of chain for turning on "lag" mode
if i == 0:
rootControl = ctrlGrp
cmds.select(control)
cmds.addAttr(longName='lagMode', defaultValue=0, minValue=0, maxValue=1, keyable = True)
cmds.addAttr(longName='lagValue', defaultValue= 3, minValue= 0, maxValue=100, keyable = False)
#setup lag mode node chain
frameCacheX = cmds.createNode("frameCache")
frameCacheX = cmds.rename(frameCacheX, name + "_frameCacheX")
frameCacheY = cmds.createNode("frameCache")
frameCacheY = cmds.rename(frameCacheY, name + "_frameCacheY")
frameCacheZ = cmds.createNode("frameCache")
frameCacheZ = cmds.rename(frameCacheZ, name + "_frameCacheZ")
frameCacheNodes.append(frameCacheX)
frameCacheNodes.append(frameCacheY)
frameCacheNodes.append(frameCacheZ)
#create a switcher node
switchNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = name + "_switcherNode")
frameCacheNodes.append(switchNode)
cmds.connectAttr(control + ".rotateX", switchNode + ".input1X")
cmds.connectAttr(control + ".lagMode", switchNode + ".input2X")
cmds.connectAttr(control + ".rotateY", switchNode + ".input1Y")
cmds.connectAttr(control + ".lagMode", switchNode + ".input2Y")
cmds.connectAttr(control + ".rotateZ", switchNode + ".input1Z")
cmds.connectAttr(control + ".lagMode", switchNode + ".input2Z")
cmds.connectAttr(switchNode + ".outputX", frameCacheX + ".stream")
cmds.connectAttr(switchNode + ".outputY", frameCacheY + ".stream")
cmds.connectAttr(switchNode + ".outputZ", frameCacheZ + ".stream")
mainControl = control
#setup FK hierarchy
lagValue = cmds.getAttr(mainControl + ".lagValue")
if i != 0:
cmds.parent(ctrlGrp, lastControl)
#connect framecache results to lag Grps
mode = "past"
cmds.connectAttr(frameCacheX + "." + mode + "[" + str(int(abs(lagValue)) * (i + 1)) + "]", lagGrp + ".rotateX")
cmds.connectAttr(frameCacheY + "." + mode + "[" + str(int(abs(lagValue)) * (i + 1)) + "]", lagGrp + ".rotateY")
cmds.connectAttr(frameCacheZ + "." + mode + "[" + str(int(abs(lagValue)) * (i + 1)) + "]", lagGrp + ".rotateZ")
lastControl = control
#add nodes to container
lagContainer = cmds.container(name = (name + "_lag_container"))
for node in frameCacheNodes:
cmds.container(lagContainer, edit = True, addNode = node, includeNetwork = True, ihb = True)
#constrain root of fk chain to driver's parent joint
parent = cmds.listRelatives("driver_" + name + "_01", parent = True)[0]
cmds.parentConstraint(parent, rootControl, mo = True)
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
#IK RIG
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
ikJoints = []
clusterControls = []
for i in range(int(numJointsInChain)):
jointNum = i + 1
#create and position the joint
if cmds.objExists("rig_ik_" + name + "_0" + str(jointNum)):
cmds.delete("rig_ik_" + name + "_0" + str(jointNum))
cmds.select(clear = True)
joint = cmds.joint(name = "rig_ik_" + name + "_0" + str(jointNum))
cmds.select(clear = True)
ikJoints.append(joint)
constraint = cmds.parentConstraint("driver_" + name + "_0" + str(jointNum), joint)[0]
cmds.delete(constraint)
#recreate the joint heirarchy
if i != 0:
cmds.parent(joint, lastJoint)
lastJoint = joint
startJoint = ikJoints[0]
endJoint = ikJoints[(len(ikJoints) - 1)]
cmds.makeIdentity(startJoint, r = 1, t = 0, s = 0, apply = True)
#create the spline IK
ikNodes = cmds.ikHandle(sj = startJoint, ee = endJoint, sol = "ikSplineSolver", createCurve = True, simplifyCurve = False, parentCurve = False, name = str(ikJoints[0]) + "_splineIK")
ikHandle = ikNodes[0]
ikCurve = ikNodes[2]
ikCurve = cmds.rename(ikCurve, name + "_splineIK_curve")
cmds.setAttr(ikCurve + ".inheritsTransform", 0)
cmds.setAttr(ikHandle + ".v", 0)
cmds.setAttr(ikCurve + ".v", 0)
#create the three joints to skin the curve to
if int(numJointsInChain) <= 6:
#3 joints for the curve
skinJoints = 3
if int(numJointsInChain) >= 7:
#add 1 joint to the curve every odd number
oddJoints = []
for i in range(7, int(numJointsInChain)):
if i % 2 != 0:
oddJoints.append(i)
#now we have a list of the total number of odd joints in our numJointsInChain. Take the length of the list + 3 to get the joints to create for our curve
skinJoints = 3 + len(oddJoints)
#create the joints to skin to the curve
curveJoints = []
if skinJoints == 3:
botJoint = cmds.duplicate(startJoint, name = name + "_splineIK_skin_joint_1", parentOnly = True)[0]
topJoint = cmds.duplicate(endJoint, name = name + "_splineIK_skin_joint_2", parentOnly = True)[0]
midJoint = cmds.duplicate(topJoint, name = name + "_splineIK_skin_joint_3", parentOnly = True)[0]
cmds.parent([botJoint, topJoint,midJoint], world = True)
constraint = cmds.pointConstraint([botJoint, topJoint], midJoint)[0]
cmds.delete(constraint)
curveJoints.append(botJoint)
curveJoints.append(topJoint)
curveJoints.append(midJoint)
else:
for i in range(skinJoints):
if i == 0:
joint = cmds.duplicate(ikJoints[i], name = name + "_splineIK_skin_joint_" + str(i), parentOnly = True)[0]
curveJoints.append(joint)
else:
joint = cmds.duplicate(ikJoints[i + i], name = name + "_splineIK_skin_joint_" + str(i), parentOnly = True)[0]
curveJoints.append(joint)
#parent all of the joints to the world
for joint in curveJoints:
try:
cmds.parent(joint, world = True)
except:
print joint
pass
#skin the joints to the curve
cmds.select(curveJoints)
cmds.skinCluster( curveJoints, ikCurve, toSelectedBones = True )
#find number of CVs on created curve
numSpans = cmds.getAttr(ikCurve + ".spans")
degree = cmds.getAttr(ikCurve + ".degree")
numCVs = numSpans + degree
#for each cv, create a cluster, then create the control
clusters = []
for cv in range(int(numCVs)):
cmds.select(ikCurve + ".cv[" + str(cv) + "]" )
cluster = cmds.cluster(name = name + "_cluster_" + str(cv))
clusters.append(cluster)
#cleanup clusters list
cmds.delete(clusters[1])
cmds.delete(clusters[(len(clusters) - 2)])
clusters.pop(1)
clusters.pop(len(clusters) - 2)
clusterNodes = []
ikAnimGrps = []
#create the controls for each cluster
for i in range(int(len(clusters))):
cluster = cmds.rename(clusters[i][1], name + "_cluster_" + str(i))
clusterNodes.append(cluster)
cmds.setAttr(cluster + ".v", 0)
control = cmds.spaceLocator(name = name + "_cv_" + str(i) + "_anim")[0]
constraint = cmds.parentConstraint(ikJoints[i], control)[0]
cmds.delete(constraint)
clusterControls.append(control)
#scale up the locator
scaleFactor = self.getScaleFactor()
shape = cmds.listRelatives(control, shapes = True)[0]
cmds.setAttr(shape + ".localScaleX", 15 * scaleFactor)
cmds.setAttr(shape + ".localScaleY", 15 * scaleFactor)
cmds.setAttr(shape + ".localScaleZ", 15 * scaleFactor)
ctrlGrp = cmds.group(empty = True, name = control + "_grp")
ikAnimGrps.append(ctrlGrp)
constraint = cmds.pointConstraint(ikJoints[i], ctrlGrp)[0]
cmds.delete(constraint)
cmds.parent(control, ctrlGrp)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
#point constrain the ctrlGrp to the corresponding joint
cmds.pointConstraint(ikJoints[i], ctrlGrp)
#connect the clusters translate to the control's so the cluster will move when the control does
cmds.connectAttr(control + ".translate", cluster + ".translate")
#color controls
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", 18)
#hookup stretch to joint scale
cmds.select(ikCurve)
curveInfoNode = cmds.arclen(cmds.ls(sl = True), ch = True )
originalLength = cmds.getAttr(curveInfoNode + ".arcLength")
#create the multiply/divide node that will get the scale factor
divideNode = cmds.shadingNode("multiplyDivide", asUtility = True, name = name + "_divideNode")
cmds.setAttr(divideNode + ".operation", 2)
cmds.setAttr(divideNode + ".input2X", originalLength)
#create the blendcolors node
blenderNode = cmds.shadingNode("blendColors", asUtility = True, name = name + "_blenderNode")
cmds.setAttr(blenderNode + ".color2R", 1)
#connect attrs
cmds.connectAttr(curveInfoNode + ".arcLength", divideNode + ".input1X")
for joint in ikJoints:
cmds.connectAttr(divideNode + ".outputX", joint + ".scaleX")
cmds.connectAttr(divideNode + ".outputX", joint + ".scaleY")
cmds.connectAttr(divideNode + ".outputX", joint + ".scaleZ")
#create the control curves for the ik curve joints
i = 1
ikControls = []
for joint in curveJoints:
if joint != curveJoints[0]:
control = self.createControl("circle", 25, name + "_ik_" + str(i) + "_anim")
cmds.setAttr(control + ".rx", 90)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
ikControls.append(control)
#position
constraint = cmds.parentConstraint(joint, control)[0]
cmds.delete(constraint)
#create grp
controlGrp = cmds.group(empty = True, name = control + "_grp")
constraint = cmds.parentConstraint(joint, controlGrp)[0]
cmds.delete(constraint)
#setup hierarchy
cmds.parent(control, controlGrp)
cmds.parent(joint, control)
#color controls
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", 18)
#lock attrs
cmds.setAttr(control + ".sx", lock = True, keyable = False)
cmds.setAttr(control + ".sy", lock = True, keyable = False)
cmds.setAttr(control + ".sz", lock = True, keyable = False)
cmds.setAttr(control + ".v", lock = True, keyable = False)
else:
#find parent of base joint and constrain base joint to the parent
parent = cmds.listRelatives("driver_" + name + "_01", parent = True)[0]
#create a control for the base
control = self.createControl("circle", 30, name + "_ik_base_anim")
cmds.setAttr(control + ".rx", 90)
cmds.makeIdentity(control, t = 1, r = 1, s = 1, apply = True)
#position
constraint = cmds.parentConstraint(joint, control)[0]
cmds.delete(constraint)
#create grp
controlGrp = cmds.group(empty = True, name = control + "_grp")
constraint = cmds.parentConstraint(joint, controlGrp)[0]
cmds.delete(constraint)
#setup hierarchy
cmds.parent(control, controlGrp)
cmds.parent(joint, control)
#color controls
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", 17)
#lock attrs
cmds.aliasAttr("global_scale", control + ".scaleZ")
cmds.connectAttr(control + ".scaleZ", control + ".scaleX")
cmds.connectAttr(control + ".scaleZ", control + ".scaleY")
cmds.setAttr(control + ".sx", lock = True, keyable = False)
cmds.setAttr(control + ".sy", lock = True, keyable = False)
cmds.setAttr(control + ".v", lock = True, keyable = False)
#hook the base control grp to the chain's parent
cmds.parentConstraint(parent, controlGrp, mo = True)
i = i + 1
#parent the other IK grp controls under the base
for control in ikControls:
grp = control + "_grp"
cmds.parent(grp, name + "_ik_base_anim")
#tip control only:
#add attr to show clusters on tip control
tipControl = ikControls[len(ikControls) - 1]
cmds.select(tipControl)
cmds.addAttr(longName=("clusterControlVis"), at = 'bool', dv = 0, keyable = True)
for control in clusterControls:
cmds.connectAttr(tipControl + ".clusterControlVis", control + ".v")
cmds.setAttr(control + ".sx", lock = True, keyable = False)
cmds.setAttr(control + ".sy", lock = True, keyable = False)
cmds.setAttr(control + ".sz", lock = True, keyable = False)
cmds.setAttr(control + ".v", lock = True, keyable = False)
cmds.setAttr(control + ".rx", lock = True, keyable = False)
cmds.setAttr(control + ".ry", lock = True, keyable = False)
cmds.setAttr(control + ".rz", lock = True, keyable = False)
#set color for tip
cmds.setAttr(tipControl + ".overrideColor", 17)
#lock tip attrs
cmds.setAttr(tipControl + ".sx", lock = True, keyable = False)
cmds.setAttr(tipControl + ".sy", lock = True, keyable = False)
cmds.setAttr(tipControl + ".sz", lock = True, keyable = False)
cmds.setAttr(tipControl + ".v", lock = True, keyable = False)
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
#Dynamics RIG
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
dynamicJoints = []
for i in range(int(numJointsInChain)):
jointNum = i + 1
#create and position the joint
if cmds.objExists("rig_dyn_" + name + "_0" + str(jointNum)):
cmds.delete("rig_dyn_" + name + "_0" + str(jointNum))
cmds.select(clear = True)
joint = cmds.joint(name = "rig_dyn_" + name + "_0" + str(jointNum))
if joint.find("|") == 0:
joint = joint.partition("|")[2]
cmds.select(clear = True)
dynamicJoints.append(joint)
constraint = cmds.parentConstraint("driver_" + name + "_0" + str(jointNum), joint)[0]
cmds.delete(constraint)
#recreate the joint heirarchy
if i != 0:
cmds.parent(joint, lastJoint)
lastJoint = joint
#freeze rotations on joints
cmds.makeIdentity(dynamicJoints[0], t = False, r = True, scale = False, apply = True)
#Create curve on joint chain
positions = []
#get the world space positions of each joint, and create a curve using those positions
for i in range(int(len(dynamicJoints))):
pos = cmds.xform(dynamicJoints[i], q = True, ws = True, t = True)
positions.append(pos)
createCurveCommand = "curve -d 1"
for pos in positions:
xPos = pos[0]
yPos = pos[1]
zPos = pos[2]
createCurveCommand += " -p " + str(xPos) + " " + str(yPos) + " " + str(zPos)
for i in range(int(len(positions))):
createCurveCommand += " -k " + str(i)
curve = mel.eval(createCurveCommand)
curve = cmds.rename(curve, name + "_dynCurve")
cmds.setAttr(curve + ".v", 0)
#Create hair system
cmds.select(curve)
#find all hair systems in scene
hairSystems = cmds.ls(type = "hairSystem")
hairSys = ""
#create the hair system and make the stiffness uniform
madeHairCurve = True
if hairSys == "":
hairSys = cmds.createNode("hairSystem")
cmds.removeMultiInstance(hairSys + ".stiffnessScale[1]", b = True)
cmds.setAttr(hairSys + ".clumpWidth", 0.0)
cmds.connectAttr("time1.outTime", hairSys + ".currentTime")
hairSysParent = cmds.listRelatives(hairSys, parent = True)
hairSysParent = cmds.rename(hairSysParent, name + "_hairSystem")
cmds.setAttr(hairSysParent + ".v", 0)
hairSys = name + "_hairSystemShape"
#create the hair follicle
hair = cmds.createNode("follicle")
cmds.setAttr(hair + ".parameterU", 0)
cmds.setAttr(hair + ".parameterV", 0)
hairTransforms = cmds.listRelatives(hair, p = True)
hairDag = hairTransforms[0]
hairDag = cmds.rename(hairDag, name + "_follicle")
hair = name + "_follicleShape"
cmds.setAttr(hairDag + ".v", 0)
cmds.setAttr(hair + ".startDirection", 1)
#get the curve CVs and set follicle degree to 1 if CVs are less than 3
curveCVs = cmds.getAttr(curve + ".cp", size = True)
if curveCVs < 3:
cmds.setAttr(hair + ".degree", 1)
#parent the curve to the follicle and connect the curve's worldspace[0] to the follicle startPos
cmds.parent(curve, hairDag, relative = True)
cmds.connectAttr(curve + ".worldSpace[0]", hair + ".startPosition")
#connect the hair follicle to the hair system
cmds.connectAttr(hair + ".outHair", hairSys + ".inputHair[0]")
#create a new curve and connect the follicle's outCurve attr to the new curve
cmds.connectAttr(hairSys + ".outputHair[0]", hair + ".currentPosition")
crv = cmds.createNode("nurbsCurve")
crvParent = cmds.listRelatives(crv, parent = True)[0]
crvParent = cmds.rename(crvParent, name + "_track_rt_curve")
crv = name + "_track_rt_curveShape"
cmds.setAttr(crvParent + ".v", 0)
cmds.connectAttr(hair + ".outCurve", crv + ".create")
#set the hair follicle attrs
if len(hairDag) > 0:
cmds.setAttr(hairDag + ".pointLock", 3)
cmds.setAttr(hairDag + ".restPose", 1)
cmds.select(hairSys)
#Create Spline Handle for the selected chain and the duplicated curve. the original is driven by hair
ikNodes = cmds.ikHandle(sol = "ikSplineSolver", ccv = False, pcv = False, snc = True, rootTwistMode = False, sj = dynamicJoints[0], ee = dynamicJoints[len(dynamicJoints) - 1], c = crv)[0]
cmds.setAttr(ikNodes + ".v", 0)
ikNodes = cmds.rename(ikNodes, name + "_dynChain_ikHandle")
#Create a duplicate joint chain for manual animation
dupeChain = cmds.duplicate(dynamicJoints[0], rr = True, rc = True)
dupeStartJoint = dupeChain[0]
dupeJoints = cmds.listRelatives(dupeStartJoint, ad = True)
joints = cmds.listRelatives(dynamicJoints[0], ad = True)
#rename duped joints and connect real joints to duped joints
for i in range(int(len(joints))):
if cmds.objectType(dupeJoints[i], isType = 'joint'):
cmds.rename(dupeJoints[i], "ANIM_" + joints[i])
cmds.connectAttr("ANIM_" + joints[i] + ".r", joints[i] + ".r", force = True)
else:
cmds.delete(dupeJoints[i])
#connect up start joint to ANIM start joint
cmds.connectAttr(dupeStartJoint + ".t", dynamicJoints[0] + ".t")
cmds.connectAttr(dupeStartJoint + ".r", dynamicJoints[0] + ".r")
cmds.connectAttr(dupeStartJoint + ".s", dynamicJoints[0] + ".s")
dupeStartJoint = cmds.rename(dupeStartJoint, "ANIM_" + dynamicJoints[0])
#cmds.parent(dupeStartJoint, world = True)
#Create skinCluster between duplicate curve and animation joint chain(dupe chain)
cmds.select(dupeStartJoint, hi = True)
dupeSkel = cmds.ls(sl = True, type = "joint")
cmds.select(curve)
cmds.select(dupeSkel, add = True)
skinCluster = cmds.skinCluster(tsb = True, mi = 3, dr = 4)
#Create the control that has all of our dynamic attrs
control = self.createControl("square", 30, name + "_dyn_anim")
constraint = cmds.parentConstraint(dynamicJoints[0], control)[0]
cmds.delete(constraint)
cmds.makeIdentity(control, r = 1, t = 0, s = 0, apply = True)
cmds.setAttr(control + ".rx", 90)
ctrlGrp = cmds.group(empty = True, name = control + "_grp")
constraint = cmds.parentConstraint(dynamicJoints[0], ctrlGrp)[0]
cmds.delete(constraint)
cmds.parent(control, ctrlGrp)
cmds.makeIdentity(control, r = 1, t = 1, s = 1, apply = True)
cmds.parentConstraint(control, dupeStartJoint)
#lock attrs
cmds.setAttr(control + ".sx", lock = True, keyable = False)
cmds.setAttr(control + ".sy", lock = True, keyable = False)
cmds.setAttr(control + ".sz", lock = True, keyable = False)
cmds.setAttr(control + ".v", lock = True, keyable = False)
#add attrs
cmds.select(control)
cmds.addAttr(ln = "___DYNAMICS___", at = "double", keyable = True)
cmds.setAttr(control + ".___DYNAMICS___", lock = True)
cmds.addAttr(ln = "chainAttach", at = "enum", en = "No Attach:Base:Tip:Both End:", dv = 1, keyable = True)
cmds.addAttr(ln = "chainStartEnvelope", at = "double", min = 0, max = 1, dv = 1, keyable = True)
cmds.addAttr(ln = "chainStartFrame", at = "double", dv = 1, keyable = True)
cmds.addAttr(ln = "___BEHAVIOR___", at = "double", keyable = True)
cmds.setAttr(control + ".___BEHAVIOR___", lock = True)
cmds.addAttr(ln = "chainStiffness", at = "double", min = 0, dv = .1, keyable = True)
cmds.addAttr(ln = "chainDamping", at = "double", min = 0, dv = 0.2, keyable = True)
cmds.addAttr(ln = "chainGravity", at = "double", min = 0, dv = 1, keyable = True)
cmds.addAttr(ln = "chainIteration", at = "long", min = 0, dv = 1, keyable = True)
cmds.addAttr(ln = "___COLLISIONS___", at = "double", keyable = True)
cmds.setAttr(control + ".___COLLISIONS___", lock = True)
cmds.addAttr(ln = "chainCollide", at = "bool", dv = 0, keyable = True)
cmds.addAttr(ln = "chainWidthBase", at = "double", min = 0, dv = 1, keyable = True)
cmds.addAttr(ln = "chainWidthExtremity", at = "double", min = 0, dv = 1, keyable = True)
cmds.addAttr(ln = "chainCollideGround", at = "bool", dv = 0, keyable = True)
cmds.addAttr(ln = "chainCollideGroundHeight", at = "double", dv = 0, keyable = True)
#connect attrs
cmds.connectAttr(control + ".chainStartEnvelope", ikNodes + ".ikBlend")
cmds.connectAttr(control + ".chainAttach", hair + ".pointLock")
cmds.connectAttr(control + ".chainStartFrame", hairSys + ".startFrame")
cmds.connectAttr(control + ".chainStiffness", hairSys + ".stiffness")
cmds.connectAttr(control + ".chainDamping", hairSys + ".damp")
cmds.connectAttr(control + ".chainGravity", hairSys + ".gravity")
cmds.connectAttr(control + ".chainIteration", hairSys + ".iterations")
cmds.connectAttr(control + ".chainCollide", hairSys + ".collide")
cmds.connectAttr(control + ".chainWidthBase", hairSys + ".clumpWidth")
cmds.connectAttr(control + ".chainWidthExtremity", hairSys + ".clumpWidthScale[1].clumpWidthScale_FloatValue")
cmds.connectAttr(control + ".chainCollideGround", hairSys + ".collideGround")
cmds.connectAttr(control + ".chainCollideGroundHeight", hairSys + ".groundHeight")
#Create the expression for real time
track_RealTime = cmds.spaceLocator(name = name + "_track_rt_loc")[0]
cmds.pointConstraint(dupeSkel[len(dupeSkel) -1], track_RealTime)
connections = cmds.listConnections(hairSys + ".currentTime", p = True, c = True)
cmds.disconnectAttr(connections[1], connections[0])
expressionString = "if(frame!= " + hairSys + ".startFrame)\n\t" + hairSys + ".currentTime = " + hairSys + ".currentTime + 1 + " + track_RealTime + ".tx - " + track_RealTime + ".tx + " + track_RealTime + ".ty - " + track_RealTime + ".ty + " + track_RealTime + ".tz - " + track_RealTime + ".tz + " + control + ".chainWidthBase - "+ control + ".chainWidthBase + "+ control + ".chainWidthExtremity - "+ control + ".chainWidthExtremity + " + control + ".chainGravity - "+ control + ".chainGravity;\n" +"else\n\t" + hairSys + ".currentTime = " + hairSys + ".startFrame;"
cmds.expression(name = "EXP_" + hairSys + "_TRACK_RealTime", string = expressionString)
cmds.setAttr(track_RealTime + ".v", 0)
#Set Defaults
cmds.setAttr(hairSys + ".drawCollideWidth", 1)
cmds.setAttr(hairSys + ".widthDrawSkip", 0)
cmds.setAttr(hair + ".degree", 1)
cmds.parentConstraint(parent, ctrlGrp, mo = True)
if cmds.objExists("dynHairChain") == False:
cmds.group(empty = True, name = "dynHairChain")
if cmds.objExists(dynamicJoints[0] + "_HairControls") == False:
group = cmds.group([ikNodes, hairSys, hair, track_RealTime, ctrlGrp, crvParent], name = dynamicJoints[0] + "_HairControls")
else:
cmds.parent([ikNodes, hairSys, hair, ctrlGrp,crvParent ], dynamicJoints[0] + "_HairControls")
cmds.parent(group, "dynHairChain")
#Cleanup
jointsGrp = cmds.group(empty = True, name = name + "_jointGrp")
cmds.parent([dynamicJoints[0], dupeStartJoint], jointsGrp)
cmds.setAttr(control + ".overrideEnabled", 1)
cmds.setAttr(control + ".overrideColor", 18)
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
#CLEAN UP SCENE
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
ikGrp = cmds.group(empty = True, name = name + "_ik_ctrl_grp")
clustersGrp = cmds.group(empty = True, name = name + "_ik_clusters_grp")
dynGrp = cmds.group(empty = True, name = name + "_dyn_ctrl_grp")
masterGrp = cmds.group(empty = True, name = name + "_master_ctrl_grp")
createdControls.append(masterGrp)
#need to parent control groups in here
cmds.parent([ikHandle, ikJoints[0], ikCurve, name + "_ik_base_anim_grp"], ikGrp)
for cluster in clusterNodes:
cmds.parent(cluster, clustersGrp)
for grp in ikAnimGrps:
cmds.parent(grp, ikGrp)
cmds.parent(clustersGrp, "master_anim")
cmds.setAttr(clustersGrp + ".inheritsTransform", 0)
cmds.parent(name + "_jointGrp", dynGrp)
cmds.parent([ikGrp, dynGrp, fkRootGrp], masterGrp)
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
#HOOKUP RIGS TO RIG SETTINGS
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@#
cmds.select("Rig_Settings")
cmds.addAttr(longName= (name + "_fk"), defaultValue=1, minValue=0, maxValue=1, keyable = True)
cmds.addAttr(longName= (name + "_ik"), defaultValue=0, minValue=0, maxValue=1, keyable = True)
cmds.addAttr(longName= (name + "_dynamic"), defaultValue=0, minValue=0, maxValue=1, keyable = True)
for i in range(int(len(dynamicJoints))):
driverJoint = dynamicJoints[i].replace("rig_dyn_", "driver_")
constraint = cmds.parentConstraint([fkJoints[i], ikJoints[i], dynamicJoints[i]], driverJoint)[0]
cmds.connectAttr("Rig_Settings." + name + "_fk", constraint + "." + fkJoints[i] + "W0")
cmds.connectAttr("Rig_Settings." + name + "_ik", constraint + "." + ikJoints[i] + "W1")
cmds.connectAttr("Rig_Settings." + name + "_dynamic", constraint + "." + dynamicJoints[i] + "W2")
#create blend Color nodes for scale
scaleBlendColors = cmds.shadingNode("blendColors", asUtility = True, name = name + "_scale_blend")
cmds.connectAttr(firstControl + ".scale", scaleBlendColors + ".color1")
cmds.connectAttr(name + "_ik_base_anim" + ".scale", scaleBlendColors + ".color2")
cmds.connectAttr(scaleBlendColors + ".output", driverJoint + ".scale")
cmds.connectAttr("Rig_Settings." + name + "_fk", scaleBlendColors + ".blender")
#setup visibility connections
cmds.connectAttr("Rig_Settings." + name + "_fk", fkRootGrp + ".v")
cmds.connectAttr("Rig_Settings." + name + "_ik", ikGrp + ".v")
cmds.connectAttr("Rig_Settings." + name + "_dynamic", name + "_dyn_anim_grp.v")
return createdControls
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def createDriverSkeleton(self):
dupe = cmds.duplicate("root", rc = True)[0]
cmds.select("root", hi = True)
joints = cmds.ls(sl = True)
cmds.select(dupe, hi = True)
dupeJoints = cmds.ls(sl = True)
driverJoints = []
for i in range(int(len(dupeJoints))):
if cmds.objExists(dupeJoints[i]):
driverJoint = cmds.rename(dupeJoints[i], "driver_" + joints[i])
driverJoints.append(driverJoint)
#create a direct connection between the driver and the export joints
exceptionJoints = ["upperarm_l", "upperarm_r"]
for joint in driverJoints:
exportJoint = joint.partition("_")[2]
if exportJoint not in exceptionJoints:
cmds.connectAttr(joint + ".translate", exportJoint + ".translate")
cmds.connectAttr(joint + ".rotate", exportJoint + ".rotate")
cmds.connectAttr(joint + ".scale", exportJoint + ".scale")
else:
cmds.connectAttr(joint + ".translate", exportJoint + ".translate")
cmds.connectAttr(joint + ".scale", exportJoint + ".scale")
cmds.orientConstraint(joint, exportJoint)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def getSpineJoints(self):
numSpineBones = int(cmds.getAttr("Skeleton_Settings.numSpineBones"))
spineJoints = []
for i in range(int(numSpineBones)):
if i < 10:
spineJoint = "spine_0" + str(i + 1)
else:
spineJoint = "spine_" + (i + 1)
spineJoints.append(spineJoint)
return spineJoints
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def createControl(self, controlType, size, name):
scale = self.getScaleFactor()
if controlType == "circle":
control = cmds.circle(c = (0,0,0), sw = 360, r = size * scale, d = 3, name = name)[0]
if controlType == "circleSpecial":
control = cmds.circle(c = (0,0,0), sw = 360, r = 1, d = 3, name = name)[0]
side = name.rpartition("_")[2]
if side == "l":
cmds.xform(control, piv = (0, -1, 0))
else:
cmds.xform(control, piv = (0, 1, 0))
cmds.setAttr(control + ".scaleX", size * scale)
cmds.setAttr(control + ".scaleY", size * scale)
cmds.setAttr(control + ".scaleZ", size * scale)
if controlType == "square":
control = cmds.circle(c = (0,0,0), s = 4, sw = 360, r = size * scale, d = 1, name = name)[0]
cmds.setAttr(control + ".rz", 45)
if controlType == "foot":
control = cmds.curve(name = name, d = 3, p = [(0, 40, 0), (-3.42, 39, 0), (-10.2, 37, 0), (-13, 22, 0), (-15.7, 13.2, 0), (-20, -14, 0), (-18.1, -25.6, 0), (-15, -44.8, 0), (1.1, -41.2, 0), (4.8, -41.7, 0), (15.5, -31.9, 0), (16.9, -22.7, 0), (18.6, -15.2, 0), (16.5, -.5, 0), (11.2, 29.2, 0), (10.7, 39.7, 0), (3.6, 39.9, 0), (0, 40, 0)])
footLoc = cmds.spaceLocator(name = (name + "_end_loc"))[0]
cmds.parent(footLoc, control)
cmds.setAttr(footLoc + ".ty", -40)
cmds.setAttr(footLoc + ".v", 0)
cmds.setAttr(control + ".scaleX", size * scale)
cmds.setAttr(control + ".scaleY", size * scale)
cmds.setAttr(control + ".scaleZ", size * scale)
if controlType == "arrow":
control = cmds.curve(name = name, d = 1, p = [(0, -45, 0), (5, -45, 0), (5, -62, 0), (10, -62, 0), (0, -72, 0), (-10, -62, 0), (-5, -62, 0), (-5, -45, 0), (0, -45, 0)])
cmds.xform(control, cp = True)
cmds.setAttr(control + ".ty", 58.5)
cmds.makeIdentity(control, t = 1, apply = True)
cmds.xform(control, piv = (0, 13.5, 0))
cmds.setAttr(control + ".scaleX", size * scale)
cmds.setAttr(control + ".scaleY", size * scale)
cmds.setAttr(control + ".scaleZ", size * scale)
if controlType == "arrowOnBall":
control = cmds.curve(name = name, d = 1, p = [(0.80718, 0.830576, 8.022739), (0.80718, 4.219206, 7.146586 ), (0.80718, 6.317059, 5.70073), (2.830981, 6.317059, 5.70073), (0, 8.422749, 2.94335), (-2.830981, 6.317059, 5.70073), (-0.80718, 6.317059, 5.70073), (-0.80718, 4.219352, 7.146486), (-0.80718, 0.830576, 8.022739), (-4.187851, 0.830576, 7.158003), (-6.310271, 0.830576, 5.705409), (-6.317059, 2.830981, 5.7007), (-8.422749, 0, 2.94335), (-6.317059, -2.830981, 5.70073), (-6.317059, -0.830576, 5.70073), (-4.225134, -0.830576, 7.142501), (-0.827872, -0.830576, 8.017446), (-0.80718, -4.176512, 7.160965), (-0.80718, -6.317059, 5.70073), (-2.830981, -6.317059, 5.70073), (0, -8.422749, 2.94335), (2.830981, -6.317059, 5.70073), (0.80718, -6.317059, 5.70073), (0.80718, -4.21137, 7.151987), (0.80718, -0.830576, 8.022739), (4.183345, -0.830576, 7.159155), (6.317059, -0.830576, 5.70073), (6.317059, -2.830981, 5.70073), (8.422749, 0, 2.94335), (6.317059, 2.830981, 5.70073), (6.317059, 0.830576, 5.70073), (4.263245, 0.830576, 7.116234), (0.80718, 0.830576, 8.022739)])
cmds.setAttr(control + ".scaleX", size * scale)
cmds.setAttr(control + ".scaleY", size * scale)
cmds.setAttr(control + ".scaleZ", size * scale)
if controlType == "semiCircle":
control = cmds.curve(name = name, d = 3, p = [(0,0,0), (7, 0, 0), (8, 0, 0), (5, 4, 0), (0, 5, 0), (-5, 4, 0), (-8, 0, 0), (-7, 0, 0), (0,0,0)])
cmds.xform(control, ws = True, t = (0, 5, 0))
cmds.xform(control, ws = True, piv = (0,0,0))
cmds.makeIdentity(control, t = 1, apply = True)
cmds.setAttr(control + ".scaleX", size * scale)
cmds.setAttr(control + ".scaleY", size * scale)
cmds.setAttr(control + ".scaleZ", size * scale)
if controlType == "pin":
control = cmds.curve(name = name, d = 1, p = [(12,0,0), (0, 0, 0), (-12, -12, 0), (-12, 12, 0), (0, 0, 0)])
cmds.xform(control, ws = True, piv = [12,0,0])
cmds.setAttr(control + ".scaleY", .5)
cmds.makeIdentity(control, t = 1, apply = True)
cmds.setAttr(control + ".scaleX", size * scale)
cmds.setAttr(control + ".scaleY", size * scale)
cmds.setAttr(control + ".scaleZ", size * scale)
if controlType == "sphere":
points = [(0, 0, 1), (0, 0.5, 0.866), (0, 0.866025, 0.5), (0, 1, 0), (0, 0.866025, -0.5), (0, 0.5, -0.866025), (0, 0, -1), (0, -0.5, -0.866025), (0, -0.866025, -0.5), (0, -1, 0), (0, -0.866025, 0.5), (0, -0.5, 0.866025), (0, 0, 1), (0.707107, 0, 0.707107), (1, 0, 0), (0.707107, 0, -0.707107), (0, 0, -1), (-0.707107, 0, -0.707107), (-1, 0, 0), (-0.866025, 0.5, 0), (-0.5, 0.866025, 0), (0, 1, 0), (0.5, 0.866025, 0), (0.866025, 0.5, 0), (1, 0, 0), (0.866025, -0.5, 0), (0.5, -0.866025, 0), (0, -1, 0), (-0.5, -0.866025, 0), (-0.866025, -0.5, 0), (-1, 0, 0), (-0.707107, 0, 0.707107), (0, 0, 1)]
control = cmds.curve(name = name, d = 1, p = points)
cmds.setAttr(control + ".scaleX", size * scale)
cmds.setAttr(control + ".scaleY", size * scale)
cmds.setAttr(control + ".scaleZ", size * scale)
return control
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def getScaleFactor(self):
headLoc = cmds.spaceLocator(name = "headLoc")[0]
cmds.parentConstraint("head", headLoc)
height = cmds.getAttr(headLoc + ".tz")
defaultHeight = 400
scaleFactor = height/defaultHeight
cmds.delete(headLoc)
return scaleFactor
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def getUpAxis(self, obj):
cmds.xform(obj, ws = True, relative = True, t = [0, 0, 10])
translate = cmds.getAttr(obj + ".translate")[0]
newTuple = (abs(translate[0]), abs(translate[1]), abs(translate[2]))
cmds.xform(obj, ws = True, relative = True, t = [0, 0, -10])
highestVal = max(newTuple)
axis = newTuple.index(highestVal)
upAxis = None
if axis == 0:
upAxis = "X"
if axis == 1:
upAxis = "Y"
if axis == 2:
upAxis = "Z"
return upAxis
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def normalizeSubVector(self, vector1, vector2):
import math
returnVec = []
for i in range(len(vector1)):
returnVec.append(vector1[i] - vector2[i])
#get length of vector
length = math.sqrt( (returnVec[0] * returnVec[0]) + (returnVec[1] * returnVec[1]) + (returnVec[2] * returnVec[2]) )
#normalize the vector
normalizedVector = []
for i in range(len(returnVec)):
normalizedVector.append(returnVec[i]/length)
absVector = []
for i in range(len(normalizedVector)):
absVector.append(abs(normalizedVector[i]))
aimAxis = max(absVector)
aimAxisIndex = absVector.index(aimAxis)
if aimAxisIndex == 0:
if normalizedVector[0] < 0:
axis = "X"
if normalizedVector[0] > 0:
axis = "-X"
if aimAxisIndex == 1:
if normalizedVector[1] < 0:
axis = "Y"
if normalizedVector[1] > 0:
axis = "-Y"
if aimAxisIndex == 2:
if normalizedVector[2] < 0:
axis = "Z"
if normalizedVector[2] > 0:
axis = "-Z"
return axis
| 409,327 | 409,327 | 0.500114 | 36,788 | 409,327 | 5.462488 | 0.04265 | 0.052166 | 0.023737 | 0.020273 | 0.671656 | 0.615658 | 0.557172 | 0.506051 | 0.474775 | 0.447286 | 0 | 0.01915 | 0.361239 | 409,327 | 1 | 409,327 | 409,327 | 0.749428 | 0.972338 | 0 | 0.492991 | 0 | 0 | 0.111658 | 0.017493 | 0.001294 | 0 | 0 | 0 | 0 | 0 | null | null | 0.001725 | 0.001294 | null | null | 0.000863 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
0aee6ad417b7d64fede433bee91654a704989116 | 60 | py | Python | torch/package/analyze/__init__.py | Hacky-DH/pytorch | 80dc4be615854570aa39a7e36495897d8a040ecc | [
"Intel"
] | 60,067 | 2017-01-18T17:21:31.000Z | 2022-03-31T21:37:45.000Z | torch/package/analyze/__init__.py | Hacky-DH/pytorch | 80dc4be615854570aa39a7e36495897d8a040ecc | [
"Intel"
] | 66,955 | 2017-01-18T17:21:38.000Z | 2022-03-31T23:56:11.000Z | torch/package/analyze/__init__.py | Hacky-DH/pytorch | 80dc4be615854570aa39a7e36495897d8a040ecc | [
"Intel"
] | 19,210 | 2017-01-18T17:45:04.000Z | 2022-03-31T23:51:56.000Z | from .trace_dependencies import (
trace_dependencies,
)
| 15 | 33 | 0.766667 | 6 | 60 | 7.333333 | 0.666667 | 0.772727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 60 | 3 | 34 | 20 | 0.88 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
0af12dd7028e4643ecb0ef90178bf7d28b179392 | 210 | py | Python | pycifrw/run_test.py | st3107/conda-recipes | 61a8fbefa807f43f1023397fd00310551da200a9 | [
"BSD-3-Clause"
] | null | null | null | pycifrw/run_test.py | st3107/conda-recipes | 61a8fbefa807f43f1023397fd00310551da200a9 | [
"BSD-3-Clause"
] | null | null | null | pycifrw/run_test.py | st3107/conda-recipes | 61a8fbefa807f43f1023397fd00310551da200a9 | [
"BSD-3-Clause"
] | 1 | 2020-12-01T18:11:29.000Z | 2020-12-01T18:11:29.000Z | #!/usr/bin/env python
import warnings
warnings.filterwarnings("error", message='invalid escape sequence',
category=DeprecationWarning)
import CifFile.StarFile
import CifFile.StarScan
| 21 | 67 | 0.72381 | 21 | 210 | 7.238095 | 0.809524 | 0.171053 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.190476 | 210 | 9 | 68 | 23.333333 | 0.894118 | 0.095238 | 0 | 0 | 0 | 0 | 0.148148 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.6 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
7c27adf3b12b36209491d7faa15359ffd5af6333 | 239 | py | Python | backend/AOGSbackend/utils/http_tools.py | myxiur/AOGuildSite | ed30018533bad1f5f0053603e8d7fea583bce02e | [
"MIT"
] | null | null | null | backend/AOGSbackend/utils/http_tools.py | myxiur/AOGuildSite | ed30018533bad1f5f0053603e8d7fea583bce02e | [
"MIT"
] | null | null | null | backend/AOGSbackend/utils/http_tools.py | myxiur/AOGuildSite | ed30018533bad1f5f0053603e8d7fea583bce02e | [
"MIT"
] | null | null | null | import json
from django.http import HttpResponse, JsonResponse
def error_response(error_name: str, error_msg: str, error_code: int) -> HttpResponse:
return JsonResponse({"error": error_name, "message": error_msg}, status=error_code) | 34.142857 | 87 | 0.778243 | 32 | 239 | 5.59375 | 0.5625 | 0.100559 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.117155 | 239 | 7 | 87 | 34.142857 | 0.848341 | 0 | 0 | 0 | 0 | 0 | 0.05 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.5 | 0.25 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 5 |
7c2834b0a9fe0df1171854f49b4c60c038fe6df1 | 58 | py | Python | Unit 3/3.1/3.1.6 Fix This Program.py | shashwat73/cse | 60e49307e57105cf9916c7329f53f891c5e81fdb | [
"MIT"
] | 1 | 2021-04-08T14:02:49.000Z | 2021-04-08T14:02:49.000Z | Unit 3/3.1/3.1.6 Fix This Program.py | shashwat73/cse | 60e49307e57105cf9916c7329f53f891c5e81fdb | [
"MIT"
] | null | null | null | Unit 3/3.1/3.1.6 Fix This Program.py | shashwat73/cse | 60e49307e57105cf9916c7329f53f891c5e81fdb | [
"MIT"
] | null | null | null | print "Hi there!"
print "My favorite color is magenta."
| 19.333333 | 38 | 0.706897 | 9 | 58 | 4.555556 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.189655 | 58 | 2 | 39 | 29 | 0.87234 | 0 | 0 | 0 | 0 | 0 | 0.678571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 1 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 5 |
7c65ce4f9a8842a81854570f2b5a6a26bbf1d5ed | 219 | py | Python | src/common/session.py | ryanmcandrew/streamlit-app | c892904cb0e456d0c5019fb70de15cf49dc93322 | [
"Apache-2.0"
] | null | null | null | src/common/session.py | ryanmcandrew/streamlit-app | c892904cb0e456d0c5019fb70de15cf49dc93322 | [
"Apache-2.0"
] | null | null | null | src/common/session.py | ryanmcandrew/streamlit-app | c892904cb0e456d0c5019fb70de15cf49dc93322 | [
"Apache-2.0"
] | null | null | null | import streamlit as st
class Session:
def __init__(self):
self.page_config = None
def run_config(self):
if not self.page_config:
self.page_config = st.set_page_config(layout="wide") | 24.333333 | 64 | 0.657534 | 31 | 219 | 4.322581 | 0.580645 | 0.298507 | 0.313433 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.255708 | 219 | 9 | 64 | 24.333333 | 0.822086 | 0 | 0 | 0 | 0 | 0 | 0.018182 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0.142857 | 0 | 0.571429 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 5 |
7cb0d15c534e030fb68ec04de776bd12aa2dbfff | 69 | py | Python | yui/box/apps/__init__.py | item4/yui | 8628d0d54b94ada3cbe7d1b0f624063258bad10a | [
"MIT"
] | 36 | 2017-06-12T01:09:46.000Z | 2021-01-31T17:57:41.000Z | yui/box/apps/__init__.py | item4/yui | 8628d0d54b94ada3cbe7d1b0f624063258bad10a | [
"MIT"
] | 145 | 2017-06-21T13:31:29.000Z | 2021-06-20T01:01:30.000Z | yui/box/apps/__init__.py | item4/yui | 8628d0d54b94ada3cbe7d1b0f624063258bad10a | [
"MIT"
] | 21 | 2017-07-24T15:53:19.000Z | 2021-12-23T04:18:31.000Z | from . import route
from .base import BaseApp
from .basic import App
| 17.25 | 25 | 0.782609 | 11 | 69 | 4.909091 | 0.636364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.173913 | 69 | 3 | 26 | 23 | 0.947368 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
7ce276b21d5dbcbbf92923de788068edce62c9ba | 68 | py | Python | paranormal-pioneers/project/langs/brainfuck/__main__.py | python-discord/code-jam-6 | a7eb3b1256ae113c93f0337892c667768e8bc199 | [
"MIT"
] | 76 | 2020-01-17T12:09:48.000Z | 2022-03-26T19:17:26.000Z | paranormal-pioneers/project/langs/brainfuck/__main__.py | 1nf1del/code-jam-6 | a7eb3b1256ae113c93f0337892c667768e8bc199 | [
"MIT"
] | 17 | 2020-01-21T23:13:34.000Z | 2020-02-07T00:07:04.000Z | paranormal-pioneers/project/langs/brainfuck/__main__.py | 1nf1del/code-jam-6 | a7eb3b1256ae113c93f0337892c667768e8bc199 | [
"MIT"
] | 91 | 2020-01-17T12:01:06.000Z | 2022-03-22T20:38:59.000Z | from project.langs.brainfuck.impl import launch_repl
launch_repl()
| 17 | 52 | 0.838235 | 10 | 68 | 5.5 | 0.8 | 0.363636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.088235 | 68 | 3 | 53 | 22.666667 | 0.887097 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
6b010c0d2ed4d66545cb3abc83078d2234f0dc27 | 18,710 | py | Python | tests/unit/test_sc2metric.py | matthewj8489/Starcraft2Metrics | 5156434bc22d25cc005c83e22ac4b3423ee40355 | [
"MIT"
] | 4 | 2019-10-06T01:16:36.000Z | 2020-12-23T21:01:55.000Z | tests/unit/test_sc2metric.py | matthewj8489/Starcraft2Metrics | 5156434bc22d25cc005c83e22ac4b3423ee40355 | [
"MIT"
] | 3 | 2019-03-09T17:26:43.000Z | 2020-04-12T18:19:35.000Z | tests/unit/test_sc2metric.py | matthewj8489/Starcraft2Metrics | 5156434bc22d25cc005c83e22ac4b3423ee40355 | [
"MIT"
] | null | null | null | import os
import sys
if __name__ == '__main__':
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)))
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest
else:
import unittest
import metrics
from metrics.sc2metric import Sc2MetricAnalyzer
from metrics.metric_containers import *
class TestSc2MetricAnalyzer(unittest.TestCase):
## def setUp(self):
## self.metrics = Sc2MetricAnalyzer()
##
## def tearDown(self):
## self.metrics = None
def test_first_max(self):
met = Sc2MetricAnalyzer()
met.supply.append(FoodCount(0, 15, 15)) # 0 second initial supply
met.supply.append(FoodCount(50, 60, 101)) # nothing special
met.supply.append(FoodCount(100, 197, 197)) #near max and near max supply made
met.supply.append(FoodCount(130, 197, 200)) #near max
met.supply.append(FoodCount(140, 198, 200)) #near max
met.supply.append(FoodCount(150, 199, 200)) #near max
met.supply.append(FoodCount(180, 200, 200)) #first max
met.supply.append(FoodCount(200, 189, 200)) #dipped below max
met.supply.append(FoodCount(220, 200, 200)) #got to max again
self.assertEqual(met.first_max(), 180)
def test_avg_sq(self):
met = Sc2MetricAnalyzer()
met.resources.append(ResourceCount(10, 500, 120))
met.resources.append(ResourceCount(20, 750, 300))
met.resources.append(ResourceCount(40, 1100, 250))
# SQ(i,u)=35(0.00137i-ln(u))+240
# i = avg rcr = 783.3
# u = aur = 223.3
self.assertEqual(round(met.avg_sq(), 1), 88.3)
def test_avg_sq_at_time(self):
met = Sc2MetricAnalyzer()
met.resources.append(ResourceCount(10, 500, 120))
met.resources.append(ResourceCount(20, 750, 300))
met.resources.append(ResourceCount(40, 1100, 250))
self.assertEqual(met.avg_sq_at_time(5), 0) # time before any input
self.assertEqual(round(met.avg_sq_at_time(50), 1), 88.3) # at a time later than last recorded
self.assertEqual(round(met.avg_sq_at_time(30), 1), 82.8) # in between 2 times
def test_avg_sq_pre_max(self):
met = Sc2MetricAnalyzer()
met.supply.append(FoodCount(0, 15, 15)) # 0 second initial supply
met.supply.append(FoodCount(50, 60, 101)) # nothing special
met.supply.append(FoodCount(100, 197, 197)) #near max and near max supply made
met.supply.append(FoodCount(130, 197, 200)) #near max
met.supply.append(FoodCount(140, 198, 200)) #near max
met.supply.append(FoodCount(150, 199, 200)) #near max
met.supply.append(FoodCount(180, 200, 200)) #first max
met.supply.append(FoodCount(200, 189, 200)) #dipped below max
met.supply.append(FoodCount(220, 200, 200)) #got to max again
met.resources.append(ResourceCount(10, 500, 120))
met.resources.append(ResourceCount(20, 750, 300))
met.resources.append(ResourceCount(40, 1100, 250))
met.resources.append(ResourceCount(180, 3100, 850))
met.resources.append(ResourceCount(200, 3300, 1050))
met.resources.append(ResourceCount(220, 3000, 950))
self.assertEqual(round(met.avg_sq_pre_max(), 1), 97.4)
def test_avg_sq_pre_max_when_never_maxed(self):
met = Sc2MetricAnalyzer()
met.supply.append(FoodCount(0, 15, 15)) # 0 second initial supply
met.supply.append(FoodCount(50, 60, 101)) # nothing special
met.supply.append(FoodCount(100, 197, 197)) #near max and near max supply made
met.resources.append(ResourceCount(10, 500, 120))
met.resources.append(ResourceCount(20, 750, 300))
met.resources.append(ResourceCount(40, 1100, 250))
self.assertEqual(round(met.avg_sq_pre_max(), 1), 88.3)
def test_aur(self):
met = Sc2MetricAnalyzer()
met.resources.append(ResourceCount(10, 500, 120))
met.resources.append(ResourceCount(20, 750, 300))
met.resources.append(ResourceCount(40, 1100, 250))
# shouldn't avg rcr care about the time (second) that this value was read when calculating average?
self.assertEqual(round(met.aur(), 1), 223.3)
def test_aur_at_time(self):
met = Sc2MetricAnalyzer()
met.resources.append(ResourceCount(10, 500, 120))
met.resources.append(ResourceCount(20, 750, 300))
met.resources.append(ResourceCount(40, 1100, 250))
self.assertIsNone(met.aur_at_time(5)) # bad input
self.assertEqual(round(met.aur_at_time(50), 1), 223.3) # at a time later than last recorded
self.assertEqual(round(met.aur_at_time(30), 1), 210.0) # in between 2 times
def test_aur_pre_max(self):
met = Sc2MetricAnalyzer()
met.supply.append(FoodCount(0, 15, 15)) # 0 second initial supply
met.supply.append(FoodCount(50, 60, 101)) # nothing special
met.supply.append(FoodCount(100, 197, 197)) #near max and near max supply made
met.supply.append(FoodCount(130, 197, 200)) #near max
met.supply.append(FoodCount(140, 198, 200)) #near max
met.supply.append(FoodCount(150, 199, 200)) #near max
met.supply.append(FoodCount(180, 200, 200)) #first max
met.supply.append(FoodCount(200, 189, 200)) #dipped below max
met.supply.append(FoodCount(220, 200, 200)) #got to max again
met.resources.append(ResourceCount(10, 500, 120))
met.resources.append(ResourceCount(20, 750, 300))
met.resources.append(ResourceCount(40, 1100, 250))
met.resources.append(ResourceCount(180, 3100, 850))
met.resources.append(ResourceCount(200, 3300, 1050))
met.resources.append(ResourceCount(220, 3000, 950))
self.assertEqual(round(met.aur_pre_max(), 1), 380)
def test_aur_pre_max_when_never_maxed(self):
met = Sc2MetricAnalyzer()
met.supply.append(FoodCount(0, 15, 15)) # 0 second initial supply
met.supply.append(FoodCount(50, 60, 101)) # nothing special
met.supply.append(FoodCount(100, 197, 197)) #near max and near max supply made
met.resources.append(ResourceCount(10, 500, 120))
met.resources.append(ResourceCount(20, 750, 300))
met.resources.append(ResourceCount(40, 1100, 250))
self.assertEqual(round(met.aur_pre_max(), 1), 223.3)
def test_avg_rcr(self):
met = Sc2MetricAnalyzer()
met.resources.append(ResourceCount(10, 500, 120))
met.resources.append(ResourceCount(20, 750, 300))
met.resources.append(ResourceCount(40, 1100, 250))
# shouldn't avg rcr care about the time (second) that this value was read when calculating average?
self.assertEqual(round(met.avg_rcr(), 1), 783.3)
def test_avg_rcr_at_time(self):
met = Sc2MetricAnalyzer()
met.resources.append(ResourceCount(10, 500, 120))
met.resources.append(ResourceCount(20, 750, 300))
met.resources.append(ResourceCount(40, 1100, 250))
self.assertIsNone(met.avg_rcr_at_time(5)) # bad input
self.assertEqual(round(met.avg_rcr_at_time(50), 1), 783.3) # at a time later than last recorded
self.assertEqual(round(met.avg_rcr_at_time(30), 1), 625.0) # in between 2 times
def test_avg_rcr_pre_max(self):
met = Sc2MetricAnalyzer()
met.supply.append(FoodCount(0, 15, 15)) # 0 second initial supply
met.supply.append(FoodCount(50, 60, 101)) # nothing special
met.supply.append(FoodCount(100, 197, 197)) #near max and near max supply made
met.supply.append(FoodCount(130, 197, 200)) #near max
met.supply.append(FoodCount(140, 198, 200)) #near max
met.supply.append(FoodCount(150, 199, 200)) #near max
met.supply.append(FoodCount(180, 200, 200)) #first max
met.supply.append(FoodCount(200, 189, 200)) #dipped below max
met.supply.append(FoodCount(220, 200, 200)) #got to max again
met.resources.append(ResourceCount(10, 500, 120))
met.resources.append(ResourceCount(20, 750, 300))
met.resources.append(ResourceCount(40, 1100, 250))
met.resources.append(ResourceCount(180, 3100, 850))
met.resources.append(ResourceCount(200, 3300, 1050))
met.resources.append(ResourceCount(220, 3000, 950))
self.assertEqual(round(met.avg_rcr_pre_max(), 1), 1362.5)
def test_avg_rcr_pre_max_when_never_maxed(self):
met = Sc2MetricAnalyzer()
met.supply.append(FoodCount(0, 15, 15)) # 0 second initial supply
met.supply.append(FoodCount(50, 60, 101)) # nothing special
met.supply.append(FoodCount(100, 197, 197)) #near max and near max supply made
met.resources.append(ResourceCount(10, 500, 120))
met.resources.append(ResourceCount(20, 750, 300))
met.resources.append(ResourceCount(40, 1100, 250))
self.assertEqual(round(met.avg_rcr_pre_max(), 1), 783.3)
def test_supply_capped(self):
met = Sc2MetricAnalyzer()
met.supply.append(FoodCount(0, 12, 15))
met.supply.append(FoodCount(5, 14, 15)) # no supply block
self.assertEqual(met.supply_capped(), 0)
met.supply.append(FoodCount(30, 15, 15)) # supply cap begins
met.supply.append(FoodCount(50, 15, 22)) # supply cap resolved by gaining supply buildings
self.assertEqual(met.supply_capped(), 20)
met.supply.append(FoodCount(60, 22, 22)) # supply cap begins
met.supply.append(FoodCount(65, 20, 22)) # supply cap resolved by losing supply
self.assertEqual(met.supply_capped(), 25)
met.supply.append(FoodCount(73, 20, 15)) # supply cap from losing supply building
met.supply.append(FoodCount(80, 20, 22)) # supply cap resolved
self.assertEqual(met.supply_capped(), 32)
met.supply.append(FoodCount(160, 198, 192)) # supply cap begins
met.supply.append(FoodCount(161, 198, 200)) # supply resolved by reaching 200 supply buildings made
self.assertEqual(met.supply_capped(), 33)
def test_first_time_to_supply(self):
met = Sc2MetricAnalyzer()
met.supply.append(FoodCount(0, 10, 15))
met.supply.append(FoodCount(20, 25, 30))
self.assertEqual(met.first_time_to_supply(0), 0) # less than the first supply
self.assertEqual(met.first_time_to_supply(20), 20) # in between supplies
self.assertEqual(met.first_time_to_supply(25), 20) # exact
self.assertEqual(met.first_time_to_supply(26), -1) # more than total supply tracked
def test_time_to_supply(self):
met = Sc2MetricAnalyzer()
met.supply.append(FoodCount(5, 10, 15))
met.supply.append(FoodCount(20, 25, 30))
self.assertEqual(met.supply_at_time(0), 0) # less than the first supply
self.assertEqual(met.supply_at_time(10), 10) # in between
self.assertEqual(met.supply_at_time(20), 25) # exact
self.assertEqual(met.supply_at_time(21), 25) # past the last time
def test_workers_created_at_time(self):
met = Sc2MetricAnalyzer()
self.assertEqual(met.workers_created_at_time(5), 0) # no workers created
met.workers_created.append(SupplyCount(1, 1, 1, True))
met.workers_created.append(SupplyCount(20, 8, 1, True))
met.workers_created.append(SupplyCount(25, 9, 1, True))
self.assertEqual(met.workers_created_at_time(0), 0) # before anything tracked
self.assertEqual(met.workers_created_at_time(20), 2) # exact time
self.assertEqual(met.workers_created_at_time(15), 1) # in between case
self.assertEqual(met.workers_created_at_time(30), 3) # longer than last tracked supply time
def test_army_created_at_time(self):
met = Sc2MetricAnalyzer()
self.assertEqual(met.army_created_at_time(5), 0) # no army created
met.army_created.append(SupplyCount(10, 30, 6, False))
met.army_created.append(SupplyCount(25, 44, 2, False))
met.army_created.append(SupplyCount(30, 48, 4, False))
self.assertEqual(met.army_created_at_time(5), 0) # before anything tracked
self.assertEqual(met.army_created_at_time(25), 44) # exact time
self.assertEqual(met.army_created_at_time(20), 30) # in between case
self.assertEqual(met.army_created_at_time(40), 48) # longer than last tracked supply time
def test_supply_created_at_time(self):
met = Sc2MetricAnalyzer()
met.supply_created.append(SupplyCount(0, 1, 1, True)) # worker supply
met.supply_created.append(SupplyCount(0, 2, 1, True)) # two worker supplies tracked at zero seconds
met.supply_created.append(SupplyCount(20, 4, 2, False)) # army supply
met.supply_created.append(SupplyCount(50, 8, 4, False))
met.supply_created.append(SupplyCount(55, 9, 1, True))
met.supply_created.append(SupplyCount(60, 11, 2, False))
met.supply_created.append(SupplyCount(60, 11, 2, False)) # two army supplies tracked at zero seconds
self.assertEqual(met.supply_created_at_time(0), 2) # 0 seconds
self.assertEqual(met.supply_created_at_time(55), 9) # exact time
self.assertEqual(met.supply_created_at_time(57), 9) # in between 2 times
self.assertEqual(met.supply_created_at_time(70), 11) # greater than greatest time
def test_supply_created_at_time_when_no_supply_created_tracked(self):
met = Sc2MetricAnalyzer()
self.assertEqual(met.supply_created_at_time(0), 0)
def test_supply_created_at_time_when_first_supply_created_occurred_after_supplied_time(self):
met = Sc2MetricAnalyzer()
met.supply_created.append(SupplyCount(1, 1, 1, True)) # worker supply
self.assertEqual(met.supply_created_at_time(0), 0)
def test_time_to_workers_created(self):
met = Sc2MetricAnalyzer()
met.workers_created.append(SupplyCount(0, 1, 1, True))
met.workers_created.append(SupplyCount(20, 8, 1, True))
met.workers_created.append(SupplyCount(25, 9, 1, True))
self.assertIsNone(met.time_to_workers_created(0)) # bad input case
self.assertIsNone(met.time_to_workers_created(6)) # bad input case
self.assertEqual(met.time_to_workers_created(1), 0)
self.assertEqual(met.time_to_workers_created(2), 20)
self.assertEqual(met.time_to_workers_created(3), 25)
def test_time_to_supply_created(self):
met = Sc2MetricAnalyzer()
met.supply_created.append(SupplyCount(0, 1, 1, True)) # worker supply
met.supply_created.append(SupplyCount(0, 2, 1, True)) # two worker supplies tracked at zero seconds
met.supply_created.append(SupplyCount(20, 4, 2, False)) # army supply
met.supply_created.append(SupplyCount(50, 8, 4, False))
met.supply_created.append(SupplyCount(55, 9, 1, True))
met.supply_created.append(SupplyCount(60, 11, 2, False))
met.supply_created.append(SupplyCount(60, 11, 2, False)) # two army supplies tracked at zero seconds
self.assertEqual(met.time_to_supply_created(0), 0) # bad input case
self.assertEqual(met.time_to_supply_created(8), 50) # exact supply
self.assertEqual(met.time_to_supply_created(10), 60) # in between 2 supplies tracked
self.assertEqual(met.time_to_supply_created(20), None) # more supply than what was tracked
def test_time_to_supply_created_when_no_supply_was_created(self):
met = Sc2MetricAnalyzer()
self.assertEqual(met.time_to_supply_created(1), None)
def test_time_to_supply_created_max_workers(self):
met = Sc2MetricAnalyzer()
met.supply_created.append(SupplyCount(0, 1, 1, True)) # worker supply
met.supply_created.append(SupplyCount(0, 2, 1, True)) # two worker supplies tracked at zero seconds
met.supply_created.append(SupplyCount(20, 4, 2, False)) # army supply
met.supply_created.append(SupplyCount(50, 8, 4, False))
met.supply_created.append(SupplyCount(55, 9, 1, True))
met.supply_created.append(SupplyCount(60, 11, 2, False))
met.supply_created.append(SupplyCount(60, 11, 2, False)) # two army supplies tracked at zero seconds
# bad inputs
self.assertIsNone(met.time_to_supply_created_max_workers(0, 1))
# max workers > total workers tracked
self.assertEqual(met.time_to_supply_created_max_workers(8, 10), 50) # exact supply
self.assertEqual(met.time_to_supply_created_max_workers(10, 10), 60) # in between 2 supplies
self.assertEqual(met.time_to_supply_created_max_workers(20, 10), 60) # more supply than what was tracked
# max workers < total workers tracked
self.assertEqual(met.time_to_supply_created_max_workers(7, 1), 50) # exact supply
self.assertEqual(met.time_to_supply_created_max_workers(8, 1), 60)
self.assertIsNone(met.time_to_supply_created_max_workers(20, 2)) # more supply than what was tracked
def test_time_to_bases_created(self):
met = Sc2MetricAnalyzer()
met.bases_created.append(BaseCount(0))
met.bases_created.append(BaseCount(45))
met.bases_created.append(BaseCount(80))
self.assertIsNone(met.time_to_bases_created(0)) # bad input case
self.assertIsNone(met.time_to_bases_created(4)) # bad input case
self.assertEqual(met.time_to_bases_created(1), 0)
self.assertEqual(met.time_to_bases_created(2), 45)
self.assertEqual(met.time_to_bases_created(3), 80)
if __name__ == '__main__':
unittest.main()
| 48.346253 | 113 | 0.64046 | 2,463 | 18,710 | 4.714982 | 0.084856 | 0.074399 | 0.076208 | 0.121932 | 0.892362 | 0.839835 | 0.799707 | 0.705244 | 0.662619 | 0.620598 | 0 | 0.090767 | 0.249813 | 18,710 | 386 | 114 | 48.471503 | 0.736606 | 0.147354 | 0 | 0.543796 | 0 | 0 | 0.001037 | 0 | 0 | 0 | 0 | 0 | 0.255474 | 1 | 0.094891 | false | 0 | 0.025547 | 0 | 0.124088 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
6b417748278025bd00fc24d13e656b4661550dbc | 322 | py | Python | ldap3/run_test.py | nikicc/anaconda-recipes | 9c611a5854bf41bbc5e7ed9853dc71c0851a62ef | [
"BSD-3-Clause"
] | 130 | 2015-07-28T03:41:21.000Z | 2022-03-16T03:07:41.000Z | ldap3/run_test.py | nikicc/anaconda-recipes | 9c611a5854bf41bbc5e7ed9853dc71c0851a62ef | [
"BSD-3-Clause"
] | 119 | 2015-08-01T00:54:06.000Z | 2021-01-05T13:00:46.000Z | ldap3/run_test.py | nikicc/anaconda-recipes | 9c611a5854bf41bbc5e7ed9853dc71c0851a62ef | [
"BSD-3-Clause"
] | 72 | 2015-07-29T02:35:56.000Z | 2022-02-26T14:31:15.000Z | import ldap3.core
import ldap3.abstract
import ldap3.operation
import ldap3.protocol
import ldap3.protocol.sasl
import ldap3.protocol.schemas
import ldap3.protocol.formatters
import ldap3.strategy
import ldap3.utils
import ldap3.extend
import ldap3.extend.novell
import ldap3.extend.microsoft
import ldap3.extend.standard
| 23 | 32 | 0.860248 | 45 | 322 | 6.155556 | 0.333333 | 0.516245 | 0.274368 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.043919 | 0.080745 | 322 | 13 | 33 | 24.769231 | 0.891892 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
86239ee4ffd23b2f2e0ffd8183e3c0e2d39937f3 | 4,996 | py | Python | fixture/group_contact.py | eksam1993/python_training | 59fcdbcba52ca3daef52e59ce52223921a24daa4 | [
"Apache-2.0"
] | null | null | null | fixture/group_contact.py | eksam1993/python_training | 59fcdbcba52ca3daef52e59ce52223921a24daa4 | [
"Apache-2.0"
] | null | null | null | fixture/group_contact.py | eksam1993/python_training | 59fcdbcba52ca3daef52e59ce52223921a24daa4 | [
"Apache-2.0"
] | null | null | null |
class GroupHelper:
def __init__(self, app):
self.app = app
def open_groups_page(self):
wd = self.app.wd
wd.find_element_by_link_text("groups").click()
def create(self, group):
wd = self.app.wd
self.open_groups_page()
# init group creation
wd.find_element_by_name("new").click()
self.fill_group_form(group)
# submit group creation
wd.find_element_by_name("submit").click()
self.return_to_groups_page()
def change_field_value(self, field_name, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(text)
def fill_group_form(self, group):
wd = self.app.wd
self.change_field_value("group_name", group.name)
self.change_field_value("group_header", group.header)
self.change_field_value("group_footer", group.footer)
def fill_first_contact(self, contacts):
wd = self.app.wd
wd.find_element_by_name("firstname").click()
wd.find_element_by_name("firstname").clear()
wd.find_element_by_name("firstname").send_keys(contacts.firstname)
wd.find_element_by_name("lastname").click()
wd.find_element_by_name("lastname").clear()
wd.find_element_by_name("lastname").send_keys(contacts.lastname)
wd.find_element_by_name("nickname").click()
wd.find_element_by_name("nickname").clear()
wd.find_element_by_name("nickname").send_keys(contacts.nickname)
wd.find_element_by_name("title").click()
wd.find_element_by_name("title").clear()
wd.find_element_by_name("title").send_keys(contacts.title)
wd.find_element_by_name("company").click()
wd.find_element_by_name("company").clear()
wd.find_element_by_name("company").send_keys(contacts.company)
wd.find_element_by_name("email").click()
wd.find_element_by_name("email").clear()
wd.find_element_by_name("email").send_keys(contacts.email)
def delete_first_group(self):
wd = self.app.wd
self.open_groups_page()
self.select_first_group()
# submint first group
wd.find_element_by_name("delete").click()
self.return_to_groups_page()
def select_first_group(self):
# select first group
wd = self.app.wd
wd.find_element_by_name("selected[]").click()
def return_to_groups_page(self):
wd = self.app.wd
wd.find_element_by_link_text("group page").click()
def add_new_contact(self, contacts):
wd = self.app.wd
# add new contact
wd.find_element_by_link_text("add new").click()
wd.find_element_by_name("firstname").click()
wd.find_element_by_name("firstname").clear()
wd.find_element_by_name("firstname").send_keys(contacts.firstname)
wd.find_element_by_name("lastname").click()
wd.find_element_by_name("lastname").clear()
wd.find_element_by_name("lastname").send_keys(contacts.lastname)
wd.find_element_by_name("nickname").click()
wd.find_element_by_name("nickname").clear()
wd.find_element_by_name("nickname").send_keys(contacts.nickname)
wd.find_element_by_name("title").click()
wd.find_element_by_name("title").clear()
wd.find_element_by_name("title").send_keys(contacts.title)
wd.find_element_by_name("company").click()
wd.find_element_by_name("company").clear()
wd.find_element_by_name("company").send_keys(contacts.company)
wd.find_element_by_name("email").click()
wd.find_element_by_name("email").clear()
wd.find_element_by_name("email").send_keys(contacts.email)
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
def delete_first_contact(self):
wd = self.app.wd
self.select_first_contact()
# submint first contact
wd.find_element_by_name("Delete").click()
# confirm contact removal
wd.switch_to_alert().accept()
self.return_to_groups_page()
def select_first_contact(self):
# select first contact
wd = self.app.wd
wd.find_element_by_name("selected[]").click()
def mod_first_group(self, group):
wd = self.app.wd
self.open_groups_page()
self.select_first_group()
wd.find_element_by_name("edit").click()
self.fill_group_form(group)
wd.find_element_by_name("update").click()
self.return_to_groups_page()
def mod_first_contact(self, contacts):
wd = self.app.wd
self.select_first_contact()
wd.find_element_by_xpath("//table[@id='maintable']/tbody/tr[2]/td[8]/a/img").click()
self.fill_first_contact(contacts)
wd.find_element_by_name("update").click()
| 38.728682 | 92 | 0.656725 | 686 | 4,996 | 4.41691 | 0.109329 | 0.104951 | 0.227393 | 0.262376 | 0.838284 | 0.790429 | 0.750165 | 0.614521 | 0.553135 | 0.549505 | 0 | 0.001022 | 0.216373 | 4,996 | 128 | 93 | 39.03125 | 0.772925 | 0.032626 | 0 | 0.653465 | 0 | 0 | 0.093173 | 0.017223 | 0 | 0 | 0 | 0 | 0 | 1 | 0.138614 | false | 0 | 0 | 0 | 0.148515 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
866dbc7f77e586029044e354f6f28f0bd6af4d22 | 23 | py | Python | src/RegularExpressions.py | vit-shreyansh-kumar/code-droplets | 6fede290131014887bc131639d16237550c0cca9 | [
"MIT"
] | null | null | null | src/RegularExpressions.py | vit-shreyansh-kumar/code-droplets | 6fede290131014887bc131639d16237550c0cca9 | [
"MIT"
] | null | null | null | src/RegularExpressions.py | vit-shreyansh-kumar/code-droplets | 6fede290131014887bc131639d16237550c0cca9 | [
"MIT"
] | null | null | null |
import re
re.search() | 5.75 | 11 | 0.695652 | 4 | 23 | 4 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.173913 | 23 | 4 | 11 | 5.75 | 0.842105 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
8691e89173ae229037550269dd42ceea78ff79b1 | 51 | py | Python | main.py | myelmer/piratebays | 380078220eb23187f380252ffc4ea0a75e4ec170 | [
"MIT"
] | null | null | null | main.py | myelmer/piratebays | 380078220eb23187f380252ffc4ea0a75e4ec170 | [
"MIT"
] | null | null | null | main.py | myelmer/piratebays | 380078220eb23187f380252ffc4ea0a75e4ec170 | [
"MIT"
] | null | null | null | import search
search.search_final('hello world') | 17 | 34 | 0.784314 | 7 | 51 | 5.571429 | 0.714286 | 0.615385 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.117647 | 51 | 3 | 34 | 17 | 0.866667 | 0 | 0 | 0 | 0 | 0 | 0.22 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
86af03d66c025d1cb096d869f4f5527f32db249b | 275 | py | Python | classification/utils/__init__.py | heyPooPy/Knowledge-Distillation-PyTorch | 39e1d70b7f13ea3a59d2b657de35d2fd799fcc65 | [
"MIT"
] | 15 | 2019-07-16T09:00:54.000Z | 2021-07-30T12:48:55.000Z | classification/utils/__init__.py | heyPooPy/Knowledge-Distillation-PyTorch | 39e1d70b7f13ea3a59d2b657de35d2fd799fcc65 | [
"MIT"
] | null | null | null | classification/utils/__init__.py | heyPooPy/Knowledge-Distillation-PyTorch | 39e1d70b7f13ea3a59d2b657de35d2fd799fcc65 | [
"MIT"
] | 4 | 2020-10-17T11:12:04.000Z | 2022-03-17T03:57:55.000Z | """Useful utils
"""
from utils.misc import *
from utils.logger import *
from utils.visualize import *
from utils.eval import *
# progress bar
import os
import sys
from utils.progress.bar import Bar as Bar
sys.path.append(os.path.join(os.path.dirname(__file__), "progress"))
| 21.153846 | 68 | 0.756364 | 43 | 275 | 4.744186 | 0.418605 | 0.220588 | 0.220588 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.127273 | 275 | 12 | 69 | 22.916667 | 0.85 | 0.094545 | 0 | 0 | 0 | 0 | 0.033058 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.875 | 0 | 0.875 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
86c7ae1ca3c3e8fa91c371c5cfc25c738aa9d18a | 828 | py | Python | lessons/WebDevelopment/BackEndWorkspaceFiles/1_flask_exercise/worldbankapp/routes.py | aauss/DSND_Term2 | ff1ff8edc208652c29bfc25f18c610a02dc9d299 | [
"MIT"
] | null | null | null | lessons/WebDevelopment/BackEndWorkspaceFiles/1_flask_exercise/worldbankapp/routes.py | aauss/DSND_Term2 | ff1ff8edc208652c29bfc25f18c610a02dc9d299 | [
"MIT"
] | null | null | null | lessons/WebDevelopment/BackEndWorkspaceFiles/1_flask_exercise/worldbankapp/routes.py | aauss/DSND_Term2 | ff1ff8edc208652c29bfc25f18c610a02dc9d299 | [
"MIT"
] | null | null | null | from worldbankapp import app
from flask import render_template
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
@app.route('/project-one')
def project_one():
return render_template('project_one.html')
@app.route('/hidden_page')
def hidden_page():
return render_template('hidden_page.html')
# TODO: Add another route. You can use any names you want
# Then go into the templates folder and add an html file that matches the file name you put in the render_template method. You can create a new file by going to the + sign at the top of the workspace and clicking on Create New File. Make sure to place the new html file in the templates folder.
# TODO: Start the web app per the instructions in the instructions.md file and make sure your new html file renders correctly. | 41.4 | 294 | 0.756039 | 138 | 828 | 4.463768 | 0.463768 | 0.113636 | 0.097403 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.165459 | 828 | 20 | 295 | 41.4 | 0.891462 | 0.571256 | 0 | 0 | 0 | 0 | 0.207386 | 0 | 0 | 0 | 0 | 0.05 | 0 | 1 | 0.25 | true | 0 | 0.166667 | 0.25 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 5 |
86c90a971a29d3a96b8ae3d51015e8972a5e5f56 | 586 | py | Python | Unsolved/atm/actions/make_withdrawal.py | briggslalor/Test-Repository- | a17ac1d6502be4e3533a8cd9c91cbc9614b75363 | [
"ADSL"
] | null | null | null | Unsolved/atm/actions/make_withdrawal.py | briggslalor/Test-Repository- | a17ac1d6502be4e3533a8cd9c91cbc9614b75363 | [
"ADSL"
] | null | null | null | Unsolved/atm/actions/make_withdrawal.py | briggslalor/Test-Repository- | a17ac1d6502be4e3533a8cd9c91cbc9614b75363 | [
"ADSL"
] | null | null | null | """Withdrawal Dialog."""
import sys
import questionary
def make_withdrawal(account):
"""Withdrawal Dialog."""
# @TODO: Use questionary to capture the withdrawal and set it equal to amount variable. Be sure that amount is a floating point number.
# @TODO: Validates amount of withdrawal. If less than or equal to 0 system exits with error message.
# @TODO: Validates if withdrawal amount is less than or equal to account balance, processes withdrawal and returns account.
# Else system exits with error messages indicating that the account is short of funds.
| 36.625 | 140 | 0.74744 | 84 | 586 | 5.202381 | 0.559524 | 0.048055 | 0.045767 | 0.06865 | 0.077803 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002119 | 0.194539 | 586 | 15 | 141 | 39.066667 | 0.923729 | 0.819113 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.066667 | 0 | 1 | 0.333333 | false | 0 | 0.666667 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
810a2fe883ef1d5fd2738b6c8565d8bd9e48e513 | 48,328 | py | Python | DOT_assignment/post_process/plot.py | kachark/FormFlight | 94189581ecd28ab5d9d30e2b171a3fa3296029a7 | [
"MIT"
] | 5 | 2019-11-03T06:35:28.000Z | 2021-05-25T16:21:28.000Z | DOT_assignment/post_process/plot.py | kachark/FormFlight | 94189581ecd28ab5d9d30e2b171a3fa3296029a7 | [
"MIT"
] | null | null | null | DOT_assignment/post_process/plot.py | kachark/FormFlight | 94189581ecd28ab5d9d30e2b171a3fa3296029a7 | [
"MIT"
] | null | null | null |
""" @file ploy.py
"""
import re
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.patches import Circle
from mpl_toolkits import mplot3d
from matplotlib.collections import PatchCollection
import matplotlib.ticker as ticker
import scipy.stats as sts
from . import post_process
# # TeX fonts
# import matplotlib
# matplotlib.rcParams['mathtext.fontset'] = 'custom'
# matplotlib.rcParams['mathtext.rm'] = 'Bitstream Vera Sans'
# matplotlib.rcParams['mathtext.it'] = 'Bitstream Vera Sans:italic'
# matplotlib.rcParams['mathtext.bf'] = 'Bitstream Vera Sans:bold'
# # matplotlib.pyplot.title(r'ABC123 vs $\mathrm{ABC123}^{123}$')
# matplotlib.rcParams['mathtext.fontset'] = 'stix'
# matplotlib.rcParams['font.family'] = 'STIXGeneral'
# # matplotlib.pyplot.title(r'ABC123 vs $\mathrm{ABC123}^{123}$')
from matplotlib import rc
rc('text', usetex=True)
rc('font', family='serif')
# rc('font', size=14)
# rc('legend', fontsize=13)
# rc('text.latex', preamble=r'\usepackage{cmbright}')
def plot_costs(unpacked):
""" Plots costs
"""
linewidth = 4
labelsize = 40
fontsize = 40
fig, axs = plt.subplots(1,1)
axs.set_xlabel('Time (s)', fontsize=fontsize)
# axs.set_ylabel('Cost', fontsize=fontsize)
axs.set_ylabel('Normalized Cost', fontsize=fontsize)
# axs.set_title('Cost VS Time')
for sim_name, metrics in unpacked.items():
tout = metrics['tout']
yout = metrics['yout']
final_cost = metrics['final_cost']
cost_to_go = metrics['cost_to_go']
optimal_cost = metrics['optimal_cost']
summed_opt_cost = np.sum(optimal_cost[0, :])
label = sim_name.split('Assignment', 1)[1]
### cost plots
if sim_name == 'AssignmentCustom':
# axs.plot(tout, summed_opt_cost*np.ones((yout.shape[0])), '--k', label='Optimal cost with no switching')
# axs.plot(tout, np.sum(final_cost, axis=1), '--c', label='Cum. Stage Cost'+' '+sim_name)
# axs.plot(tout, np.sum(cost_to_go, axis=1), '--r', label='Cost-to-go'+' '+sim_name)
# normalized costs
axs.plot(tout, np.ones((yout.shape[0])), '--k', linewidth=linewidth, label='Optimal cost')
axs.plot(tout, np.sum(final_cost, axis=1)/summed_opt_cost, '--c', linewidth=linewidth, label='Cum. Stage Cost'+' '+label)
axs.plot(tout, np.sum(cost_to_go, axis=1)/summed_opt_cost, '--r', linewidth=linewidth, label='Cost-to-go'+' '+label)
else:
# axs.plot(tout, np.sum(final_cost, axis=1), '-c', label='Cum. Stage Cost'+' '+sim_name)
## axs.plot(tout, np.sum(cost_to_go, axis=1), '-r', label='Cost-to-go'+' '+sim_name)
# normalized costs
axs.plot(tout, np.sum(final_cost, axis=1)/summed_opt_cost, '-c', linewidth=linewidth, label='Cum. Stage Cost'+' '+label)
axs.xaxis.set_tick_params(labelsize=labelsize)
axs.yaxis.set_tick_params(labelsize=labelsize)
# reorder the legend terms
handles, labels = axs.get_legend_handles_labels()
#TODO hardcoded - fix
try:
labels = [labels[1], labels[0], labels[2], labels[3]]
handles = [handles[1], handles[0], handles[2], handles[3]]
except IndexError:
# # DYN
# labels = [labels[1], labels[0]]
# handles = [handles[1], handles[0]]
labels = [labels[1], labels[0]]
handles = [handles[1], handles[0]]
axs.legend(handles, labels, loc='center right', bbox_to_anchor=(1.0, 0.25), fontsize=fontsize)
# Agent-by-agent cost plots on 1 figure
# plt.figure()
# for sim_name, metrics in unpacked.items():
# nagents = metrics['nagents']
# tout = metrics['tout']
# final_cost = metrics['final_cost']
# cost_to_go = metrics['cost_to_go']
# for zz in range(nagents):
# plt.plot(tout, final_cost[:, zz], '-.c', label='Cum. Stage Cost ({0})'.format(zz))
# plt.plot(tout, cost_to_go[:, zz], '-.r', label='Cost-to-go (assuming no switch) ({0})'.format(zz))
# plt.legend()
def plot_cost_histogram(unpacked_ensemble_metric):
""" Plots histogram of costs
"""
fontsize = 32
labelsize = 32
labels = ['Dyn', 'EMD']
fig, axs = plt.subplots(1,1)
axs.set_xlabel('Control Expenditure Difference (EMD - Dyn)/Dyn', fontsize=fontsize)
axs.set_ylabel('Frequency', fontsize=fontsize)
axs.hist(unpacked_ensemble_metric, histtype='bar', stacked=True, bins=10, align='left', label=labels)
axs.xaxis.set_tick_params(labelsize=labelsize)
axs.yaxis.set_tick_params(labelsize=labelsize)
axs.xaxis.offsetText.set_fontsize(fontsize)
axs.legend(fontsize=fontsize)
# TODO move to a different file
def atoi(text):
return int(text) if text.isdigit() else text
# TODO move to a different file
def natural_keys(text):
'''
alist.sort(key=natural_keys) sorts in human order
http://nedbatchelder.com/blog/200712/human_sorting.html
(See Toothy's implementation in the comments)
'''
return [ atoi(c) for c in re.split(r'(\d+)', text) ]
def plot_ensemble_cost_histogram(metrics_to_compare):
""" Plots histogram of agent swarm LQ costs for multiple ensembles
"""
fontsize = 40
labelsize = 40
fig, axs = plt.subplots(1,1)
axs.set_xlabel('Control Expenditure Difference (EMD - Dyn)/Dyn', fontsize=fontsize)
axs.set_ylabel('Frequency', fontsize=fontsize)
# Using DataFrames
labels = []
for ensemble_name in metrics_to_compare.keys():
labels.append(re.search('\d+v\d+', ensemble_name).group())
metrics_df = pd.DataFrame.from_dict(metrics_to_compare)
metrics_df.columns = labels
# order data by number of agents
labels.sort(key=natural_keys)
metrics_df = metrics_df[labels]
for i, (label, data) in enumerate(metrics_df.iteritems()):
nbins = int(len(data)/4)
data.hist(ax=axs, bins=nbins, align='left', edgecolor='k', alpha=0.5, label=label)
# data.plot.kde(ax=axs)
axs.grid(False)
axs.xaxis.set_tick_params(labelsize=labelsize)
axs.yaxis.set_tick_params(labelsize=labelsize)
axs.xaxis.offsetText.set_fontsize(fontsize)
axs.legend(fontsize=fontsize)
def plot_assignments(unpacked):
""" Plots assignments
"""
for sim_name, metrics in unpacked.items():
dx = metrics['dx']
nagents = metrics['nagents']
ntargets = metrics['ntargets']
tout = metrics['tout']
yout = metrics['yout']
assignments = yout[:, nagents*2*dx:].astype(np.int32)
assignment_switches = post_process.find_switches(tout, yout, nagents, nagents, dx, dx)
# recreate assignments per switch
asst_switch_indices = set()
asst_switch_indices.add(0) # add the origin assignment
for ii in range(nagents):
switch_indices = assignment_switches[ii]
for ind in switch_indices:
asst_switch_indices.add(ind)
# order the switch time
asst_switch_indices = sorted(asst_switch_indices) # becomes ordered list
# get assignment switches in increasing time order
asst_to_plot = np.zeros((len(asst_switch_indices), nagents)) # (starting assignment + switches)
asst_to_plot[0, :] = assignments[0, :]
for tt, ind in enumerate(asst_switch_indices):
asst_to_plot[tt, :] = assignments[ind, :]
# PLOT TOO BUSY, deprecate
plt.figure()
# plt.title("Agent-Target Assignments")
plt.xlabel('time (s)')
plt.ylabel('Assigned-to Target')
for ii in range(nagents):
plt.plot(tout, assignments[:, ii], '-', label='A{0}'.format(ii))
plt.legend()
# TEST
fig = plt.figure()
ax = plt.axes(projection='3d')
# fig, ax = plt.subplots()
ax.set_title(sim_name)
asst_array = np.zeros((nagents, tout.shape[0], ntargets)) # want to show propogation of assignment over time in y-axis
# construct assignment array
for tt in range(tout.shape[0]):
time = tout[tt]
for ii in range(nagents): # iterate consecutively through agents
# ax.plot3D(agent_i, tout, target_j, '-r', label=agent_traj_label)
jj = assignments[tt, ii]
asst_array[ii, tt, jj] = 1
# change color and marker if there's a switch
# # stack plots on top of each other
# agents = np.arange(nagents)
# for asst_num, (switch_ind, assignment) in enumerate(zip(asst_switch_indices, asst_to_plot)):
# assigned_to_targets = assignment
# # ax.plot(agents, assigned_to_targets, marker='s', label='Assignment{0}'.format(asst_num))
# ax.plot(agents, assigned_to_targets, label='Assignment{0}'.format(asst_num))
# # if sim_name != 'AssignmentCustom':
# # ax.fill_between(agents, assigned_to_targets, asst_to_plot[1], color='blue')
# ax.set_xlabel('agents')
# ax.set_ylabel('targets')
# ax.legend()
# plot 2d assignment plots in 3d at correct time step
cumulative_asst_label = 'Cumulative Assignment Projection'
agents = np.arange(nagents)
for asst_num, (switch_ind, assignment) in enumerate(zip(asst_switch_indices, asst_to_plot)):
switch_time = tout[switch_ind]
assigned_to_targets = assignment
if asst_num >= 1:
cumulative_asst_label = '__nolabel__'
ax.plot(agents, assigned_to_targets, tout[-1], zdir='y', color='blue', label=cumulative_asst_label)
color = next(ax._get_lines.prop_cycler)['color']
ax.plot(agents, assigned_to_targets, switch_time, '-s', color=color, zdir='y', label='Assignment{0}'.format(asst_num))
ax.scatter(agents, assigned_to_targets, tout[-1], color=color, zdir='y')
ax.add_collection3d(plt.fill_between(agents, assigned_to_targets, asst_to_plot[0], color='blue'), zs=tout[-1], zdir='y')
ax.set_xlabel('agents')
ax.set_ylabel('time (s)')
ax.set_zlabel('targets')
ax.legend()
ax.set_ylim3d(0, tout[-1])
ax.xaxis.set_ticks(np.arange(nagents))
ax.zaxis.set_ticks(np.arange(ntargets))
def plot_ensemble_switch_histogram(metrics_to_compare):
""" Plots histogram of assignment switches for multiple ensembles
"""
fontsize = 40
labelsize = 40
fig, axs = plt.subplots(1,1)
axs.set_xlabel('Assignment Switches', fontsize=fontsize)
axs.set_ylabel('Frequency', fontsize=fontsize)
# Using DataFrames
labels = []
for ensemble_name in metrics_to_compare.keys():
labels.append(re.search('\d+v\d+', ensemble_name).group())
metrics_df = pd.DataFrame.from_dict(metrics_to_compare)
metrics_df.columns = labels
# order data by number of agents
labels.sort(key=natural_keys)
metrics_df = metrics_df[labels]
for i, (label, data) in enumerate(metrics_df.iteritems()):
nbins = int(len(data)/4)
data.hist(ax=axs, bins=nbins, align='left', edgecolor='k', alpha=0.5, label=label)
# data.plot.kde(ax=axs)
axs.grid(False)
axs.xaxis.set_tick_params(labelsize=labelsize)
axs.yaxis.set_tick_params(labelsize=labelsize)
tick_spacing = 1
axs.xaxis.set_major_locator(ticker.MultipleLocator(tick_spacing))
axs.xaxis.offsetText.set_fontsize(fontsize)
axs.legend(fontsize=fontsize)
def plot_ensemble_avg_switch(metrics_to_compare):
""" Plots average number of assignment switches over time for multiple ensembles
"""
fontsize = 40
labelsize = 40
fig, axs = plt.subplots(1,1)
axs.set_xlabel('Agents', fontsize=fontsize)
axs.set_ylabel('Average \# Assign. Switches', fontsize=fontsize)
# Using DataFrames
labels = []
for ensemble_name in metrics_to_compare.keys():
labels.append(re.search('\d+v\d+', ensemble_name).group())
metrics_df = pd.DataFrame(metrics_to_compare, index=[0])
metrics_df.columns = labels
# order data by number of agents
labels.sort(key=natural_keys)
metrics_df = metrics_df[labels]
metrics = {'Ensemble': labels, 'Average Assignment Switches': metrics_df.values.tolist()[0]}
metrics_df = pd.DataFrame(metrics)
# metrics_df.plot.bar(x='Ensemble', rot=0, fontsize=fontsize)
values = metrics_df['Average Assignment Switches'].values.tolist()
xpos = [i for i, _ in enumerate(labels)]
axs.bar(xpos, values, alpha=0.5)
axs.xaxis.set_tick_params(labelsize=labelsize)
axs.yaxis.set_tick_params(labelsize=labelsize)
# tick_spacing = 1
# axs.xaxis.set_major_locator(ticker.MultipleLocator(tick_spacing))
# axs.xaxis.offsetText.set_fontsize(fontsize)
axs.set_xticks(xpos)
axs.set_xticklabels(labels)
# axs.legend(fontsize=fontsize)
def plot_trajectory(unpacked):
""" Plots trajectory in 2D or 3D for homogeneous identical double integrator and linearized quadcopters
"""
dim = 2 # default value
# update dim
for sim_name, metrics in unpacked.items():
dim = metrics['dim']
# want to display all trajectories on same figure
linewidth_3d = 2
linewidth = 4
markersize = 8
scatter_width = markersize**2
textsize = 32
fontsize = 40
fontweight = 'bold'
labelsize = 40
axispad = 18
labelpad = 40
if dim == 2:
fig, ax = plt.subplots()
if dim == 3:
fig = plt.figure()
fig.tight_layout()
fig.subplots_adjust(right=0.8)
ax = plt.axes(projection='3d')
# TEST
# TODO 2d slice
fig2 = plt.figure()
ax2 = fig2.add_subplot(111)
for sim_name, metrics in unpacked.items():
dx = metrics['dx']
du = metrics['du']
dim = metrics['dim']
nagents = metrics['nagents']
ntargets = metrics['ntargets']
tout = metrics['tout']
yout = metrics['yout']
stationary_states = metrics['stationary_states']
assignment_switches = post_process.find_switches(tout, yout, nagents, ntargets, dx, dx)
agent_traj_label = 'Agent Path (Custom)'
agent_start_pt_label = 'Agent Start'
target_start_pt_label = 'Target Start'
target_traj_label = 'Target Path'
stationary_pt_label = 'Terminal State'
# TEST # TODO REMOVE EVENTUALLY
if dx == 12:
agent_model = 'Linearized_Quadcopter'
target_model = 'Linearized_Quadcopter'
labels = [agent_traj_label, agent_start_pt_label, target_start_pt_label, target_traj_label, stationary_pt_label]
plot_params = [linewidth, linewidth_3d, markersize, scatter_width, textsize, fontsize, fontweight, labelsize, axispad, labelpad]
figures = [(fig, ax), (fig2, ax2)]
plot_trajectory_qc(figures, plot_params, sim_name, dx, du, dim, nagents, ntargets, tout, yout, stationary_states,
assignment_switches, labels)
continue
if dx == 6:
agent_model = 'Double_Integrator'
target_model = 'Double_Integrator'
if dim == 2: # and agent/target models both double integrator (omit requirement for now)
### Agent / Target Trajectories
# optimal trajectories (solid lines)
if sim_name == 'AssignmentCustom':
for zz in range(nagents):
if zz >= 1:
agent_traj_label = '__nolabel__'
agent_start_pt_label = '__nolabel__'
target_start_pt_label = '__nolabel__'
target_traj_label = '__nolabel__'
# agent state over time
y_agent = yout[:, zz*dx:(zz+1)*dx]
# plot agent trajectory with text
ax.plot(y_agent[0, 0], y_agent[0, 1], 'rs', markersize=markersize, label=agent_start_pt_label)
ax.plot(y_agent[:, 0], y_agent[:, 1], '-r', linewidth=linewidth, label=agent_traj_label)
ax.text(y_agent[0, 0], y_agent[0, 1], 'A{0}'.format(zz), fontsize=textsize)
# # plot location of assignment switches
# patches = []
# for switch_ind in assignment_switches[zz]:
# ci = Circle( (y_agent[switch_ind, 0], y_agent[switch_ind, 1]), 0.2, color='b', fill=True)
# patches.append(ci)
# p = PatchCollection(patches)
# ax.add_collection(p)
# plot target trajectory
y_target = yout[:, (zz+nagents)*dx:(zz+nagents+1)*dx]
ax.plot(y_target[0, 0], y_target[0, 1], 'bs', markersize=markersize, label=target_start_pt_label)
ax.plot(y_target[:, 0], y_target[:, 1], '-b', linewidth=linewidth, label=target_traj_label)
ax.text(y_target[0, 0], y_target[0, 1], 'T{0}'.format(zz), fontsize=textsize)
# TODO deprecated
# ### stationary points
# for zz in range(ntargets):
# if zz >= 1:
# stationary_pt_label = '__nolabel__'
# offset = stationary_states[zz*dx:(zz+1)*dx]
# ax.plot(offset[0], offset[1], 'ks', markersize=markersize, label=stationary_pt_label)
# ax.text(offset[0], offset[1], 'C{0}'.format(zz), fontsize=textsize)
ax.set_xlabel("x", fontweight=fontweight, fontsize=fontsize)
ax.set_ylabel("y", fontweight=fontweight, fontsize=fontsize)
elif sim_name == 'AssignmentEMD':
agent_traj_label = 'Agent Path (EMD)'
# non-optimal trajectories (dotted lines)
for zz in range(nagents):
if zz >= 1:
agent_traj_label = '__nolabel__'
# agent state over time
y_agent = yout[:, zz*dx:(zz+1)*dx]
# plot agent trajectory with text
ax.plot(y_agent[0, 0], y_agent[0, 1], 'rs', markersize=markersize)
ax.plot(y_agent[:, 0], y_agent[:, 1], '--r', linewidth=linewidth, label=agent_traj_label)
ax.text(y_agent[0, 0], y_agent[0, 1], 'A{0}'.format(zz), fontsize=textsize)
# plot location of assignment switches
# patches = []
# for switch_ind in assignment_switches[zz]:
# ci = Circle( (y_agent[switch_ind, 0], y_agent[switch_ind, 1]), 2, color='m', fill=True)
# patches.append(ci)
# ax.plot(y_agent[switch_ind, 0], y_agent[switch_ind, 1], 'ms', markersize=markersize)
# p = PatchCollection(patches)
# ax.add_collection(p)
# plot target trajectory
y_target = yout[:, (zz+nagents)*dx:(zz+nagents+1)*dx]
ax.plot(y_target[0, 0], y_target[0, 1], 'bs', markersize=markersize)
ax.plot(y_target[:, 0], y_target[:, 1], '-b', linewidth=linewidth)
ax.text(y_target[0, 0], y_target[0, 1], 'T{0}'.format(zz), fontsize=textsize)
# TODO deprecated
# ### stationary points
# for zz in range(ntargets):
# if zz >= 1:
# stationary_pt_label = '__nolabel__'
# offset = stationary_states[zz*dx:(zz+1)*dx]
# ax.plot(offset[0], offset[1], 'ks', markersize=markersize)
# ax.text(offset[0], offset[1], 'C{0}'.format(zz), fontsize=textsize)
ax.set_xlabel("x", fontweight=fontweight, fontsize=fontsize)
ax.set_ylabel("y", fontweight=fontweight, fontsize=fontsize)
# dim == 2
ax.xaxis.set_tick_params(labelsize=labelsize)
ax.yaxis.set_tick_params(labelsize=labelsize)
# ax.text2D(0.40, 0.95, 'Agent-Target Trajectories', fontweight='bold', fontsize=14, transform=ax.transAxes)
ax.legend(loc='lower right', fontsize=14)
if dim == 3:
# optimal trajectories (solid lines)
if sim_name == 'AssignmentCustom':
# agent/target trajectories
for zz in range(nagents):
# avoid repeated legend entries
if zz >= 1:
agent_traj_label = '__nolabel__'
agent_start_pt_label = '__nolabel__'
target_start_pt_label = '__nolabel__'
target_traj_label = '__nolabel__'
# agent state over time
y_agent = yout[:, zz*dx:(zz+1)*dx]
# plot agent trajectory with text
ax.scatter3D(y_agent[0, 0], y_agent[0, 1], y_agent[0, 2], color='r', s=scatter_width, label=agent_start_pt_label)
ax.plot3D(y_agent[:, 0], y_agent[:, 1], y_agent[:, 2], '-r', linewidth=linewidth_3d, label=agent_traj_label)
ax.text(y_agent[0, 0], y_agent[0, 1], y_agent[0, 2], 'A{0}'.format(zz), fontsize=textsize)
# # plot location of assignment switches
# for switch_ind in assignment_switches[zz]:
# ax.scatter3D(y_agent[switch_ind, 0], y_agent[switch_ind, 1], y_agent[switch_ind, 2], color='m') # TODO
# plot target trajectory
y_target = yout[:, (zz+nagents)*dx:(zz+nagents+1)*dx]
ax.scatter3D(y_target[0, 0], y_target[0, 1], y_target[0, 2], color='b', s=scatter_width, label=target_start_pt_label)
ax.plot3D(y_target[:, 0], y_target[:, 1], y_target[:, 2], '-b', linewidth=linewidth_3d, label=target_traj_label)
ax.text(y_target[0, 0], y_target[0, 1], y_target[0, 2], 'T{0}'.format(zz), fontsize=textsize)
# TEST
# TODO 2d slice
# trajectories
ax2.plot(y_agent[:, 0], y_agent[:, 1], '-r', linewidth=linewidth, label=agent_traj_label)
ax2.plot(y_target[:, 0], y_target[:, 1], '-b', linewidth=linewidth, label=target_traj_label)
# points
ax2.plot(y_agent[0, 0], y_agent[0, 1], 'ro', markersize=markersize, label=agent_start_pt_label)
ax2.plot(y_target[0, 0], y_target[0, 1], 'bo', markersize=markersize, label=target_start_pt_label)
# text
ax2.text(y_agent[0, 0], y_agent[0, 1], 'A{0}'.format(zz), fontsize=textsize)
ax2.text(y_target[0, 0], y_target[0, 1], 'T{0}'.format(zz), fontsize=textsize)
# TODO deprecated
# ### stationary points
# for zz in range(ntargets):
# if zz >= 1:
# stationary_pt_label = '__nolabel__'
# offset = stationary_states[zz*dx:(zz+1)*dx]
# ax.scatter3D(offset[0], offset[1], offset[2], color='k', s=scatter_width, label=stationary_pt_label)
# ax.text(offset[0], offset[1], offset[2], 'C{0}'.format(zz), fontsize=textsize)
# # TEST
# # TODO 2d slice
# ax2.plot(offset[0], offset[1], 'ko', markersize=markersize, label=stationary_pt_label)
# ax2.text(offset[0], offset[1], 'C{0}'.format(zz), fontsize=textsize)
ax.set_xlabel("x", fontweight=fontweight, fontsize=fontsize)
ax.set_ylabel("y", fontweight=fontweight, fontsize=fontsize)
ax.set_zlabel("z", fontweight=fontweight, fontsize=fontsize)
ax2.set_xlabel("x", fontweight=fontweight, fontsize=fontsize)
ax2.set_ylabel("y", fontweight=fontweight, fontsize=fontsize)
elif sim_name == 'AssignmentEMD':
# non-optimal trajectories (dotted lines)
agent_traj_label = 'Agent Path (EMD)'
# agent/target trajectories
for zz in range(nagents):
# avoid repeated legend entries
if zz >= 1:
agent_traj_label = '__nolabel__'
# agent state over time
y_agent = yout[:, zz*dx:(zz+1)*dx]
# plot agent trajectory with text
ax.scatter3D(y_agent[0, 0], y_agent[0, 1], y_agent[0, 2], color='r')
ax.plot3D(y_agent[:, 0], y_agent[:, 1], y_agent[:, 2], '--r', linewidth=linewidth_3d, label=agent_traj_label)
ax.text(y_agent[0, 0], y_agent[0, 1], y_agent[0, 2], 'A{0}'.format(zz), fontsize=textsize)
# # plot location of assignment switches
# for switch_ind in assignment_switches[zz]:
# ax.scatter3D(y_agent[switch_ind, 0], y_agent[switch_ind, 1], y_agent[switch_ind, 2], color='m') # TODO
# plot target trajectory
y_target = yout[:, (zz+nagents)*dx:(zz+nagents+1)*dx]
ax.scatter3D(y_target[0, 0], y_target[0, 1], y_target[0, 2], color='b')
ax.plot3D(y_target[:, 0], y_target[:, 1], y_target[:, 2], '-b')
ax.text(y_target[0, 0], y_target[0, 1], y_target[0, 2], 'T{0}'.format(zz), fontsize=textsize)
# TEST
# TODO 2d slice
# trajectories
ax2.plot(y_agent[:, 0], y_agent[:, 1], '--r', linewidth=linewidth, label=agent_traj_label)
ax2.plot(y_target[:, 0], y_target[:, 1], '-b', linewidth=linewidth)
# points
ax2.plot(y_agent[0, 0], y_agent[0, 1], 'ro', markersize=markersize)
ax2.plot(y_target[0, 0], y_target[0, 1], 'bo', markersize=markersize)
# text
ax2.text(y_agent[0, 0], y_agent[0, 1], 'A{0}'.format(zz), fontsize=textsize)
ax2.text(y_target[0, 0], y_target[0, 1], 'T{0}'.format(zz), fontsize=textsize)
# TODO deprecated
# # stationary locations
# for zz in range(ntargets):
# offset = stationary_states[zz*dx:(zz+1)*dx]
# ax.scatter3D(offset[0], offset[1], offset[2], color='k')
# ax.text(offset[0], offset[1], offset[2], 'C{0}'.format(zz), fontsize=textsize)
# # TEST
# # TODO 2d slice
# ax2.plot(offset[0], offset[1], 'ko', markersize=markersize)
# ax2.text(offset[0], offset[1], 'C{0}'.format(zz), fontsize=textsize)
ax.set_xlabel("x", fontweight=fontweight, fontsize=fontsize)
ax.set_ylabel("y", fontweight=fontweight, fontsize=fontsize)
ax.set_zlabel("z", fontweight=fontweight, fontsize=fontsize)
ax2.set_xlabel("x", fontweight=fontweight, fontsize=fontsize)
ax2.set_ylabel("y", fontweight=fontweight, fontsize=fontsize)
# dim = 3
tick_spacing = 1000
ax.xaxis.set_major_locator(ticker.MultipleLocator(tick_spacing))
ax.yaxis.set_major_locator(ticker.MultipleLocator(tick_spacing))
ax.zaxis.set_major_locator(ticker.MultipleLocator(tick_spacing))
ax.xaxis.set_tick_params(labelsize=labelsize)
ax.yaxis.set_tick_params(labelsize=labelsize)
ax.zaxis.set_tick_params(labelsize=labelsize)
ax.tick_params(axis='x', which='major', pad=axispad)
ax.tick_params(axis='y', which='major', pad=axispad)
ax.tick_params(axis='z', which='major', pad=axispad)
ax.xaxis.labelpad = labelpad
ax.yaxis.labelpad = labelpad
ax.zaxis.labelpad = labelpad
# TEST
# TODO 2d slice
ax2.xaxis.set_tick_params(labelsize=labelsize)
ax2.yaxis.set_tick_params(labelsize=labelsize)
# ax.text2D(0.40, 0.95, 'Agent-Target Trajectories', fontweight='bold', fontsize=14, transform=ax.transAxes)
# ax.legend(loc='lower right', fontsize=fontsize)
# # reorder the legend terms
# handles, labels = ax.get_legend_handles_labels()
# labels = [labels[1], labels[0], labels[2], labels[3]]
# handles = [handles[1], handles[0], handles[2], handles[3]]
legend = ax.legend(loc='center left', bbox_to_anchor=(1.07, 0.5), fontsize=fontsize)
# legend.remove()
if dim == 2:
ax.legend(loc='center left', bbox_to_anchor=(1.07, 0.5), fontsize=fontsize)
if dim == 3:
# ax2.legend(loc='lower right', fontsize=fontsize-4)
ax2.legend(loc='center left', bbox_to_anchor=(1.07, 0.5), fontsize=fontsize)
# ************ TEST LINEARIZED QC ***************
def plot_trajectory_qc(figures, plot_params, sim_name, dx, du, dim, nagents, ntargets, tout, yout, stationary_states, assignment_switches, labels):
""" Plots trajectory in 2D/3D for homogeneous identical linearized quadcopter
"""
# plot parameters
fig = figures[0][0]
ax = figures[0][1]
fig2 = figures[1][0]
ax2 = figures[1][1]
linewidth = plot_params[0]
linewidth_3d = plot_params[1]
markersize = plot_params[2]
scatter_width = plot_params[3]
textsize = plot_params[4]
fontsize = plot_params[5]
fontweight = plot_params[6]
labelsize = plot_params[7]
axispad = plot_params[8]
labelpad = plot_params[9] + 4
agent_traj_label = labels[0]
agent_start_pt_label = labels[1]
target_start_pt_label = labels[2]
target_traj_label = labels[3]
stationary_pt_label = labels[4]
if dim == 2:
### Agent / Target Trajectories
# optimal trajectories (solid lines)
if sim_name == 'AssignmentCustom':
for zz in range(nagents):
if zz >= 1:
agent_traj_label = '__nolabel__'
agent_start_pt_label = '__nolabel__'
target_start_pt_label = '__nolabel__'
target_traj_label = '__nolabel__'
# agent state over time
y_agent = yout[:, zz*dx:(zz+1)*dx]
# plot agent trajectory with text
ax.plot(y_agent[0, 0], y_agent[0, 1], 'rs', markersize=markersize, label=agent_start_pt_label)
ax.plot(y_agent[:, 0], y_agent[:, 1], '-r', linewidth=linewidth, label=agent_traj_label)
ax.text(y_agent[0, 0], y_agent[0, 1], 'A{0}'.format(zz), fontsize=textsize)
# plot location of assignment switches
patches = []
for switch_ind in assignment_switches[zz]:
ci = Circle( (y_agent[switch_ind, 0], y_agent[switch_ind, 1]), 0.2, color='b', fill=True)
patches.append(ci)
p = PatchCollection(patches)
ax.add_collection(p)
# plot target trajectory
y_target = yout[:, (zz+nagents)*dx:(zz+nagents+1)*dx]
ax.plot(y_target[0, 0], y_target[0, 1], 'bs', markersize=markersize, label=target_start_pt_label)
ax.plot(y_target[:, 0], y_target[:, 1], '-b', linewidth=linewidth, label=target_traj_label)
ax.text(y_target[0, 0], y_target[0, 1], 'T{0}'.format(zz), fontsize=textsize)
# ### stationary points
# for zz in range(ntargets):
# if zz >= 1:
# stationary_pt_label = '__nolabel__'
# offset = stationary_states[zz*dx:(zz+1)*dx]
# ax.plot(offset[0], offset[1], 'ks', markersize=markersize, label=stationary_pt_label)
# ax.text(offset[0], offset[1], 'C{0}'.format(zz), fontsize=textsize)
ax.set_xlabel("x", fontweight=fontweight, fontsize=fontsize)
ax.set_ylabel("y", fontweight=fontweight, fontsize=fontsize)
elif sim_name == 'AssignmentEMD':
agent_traj_label = 'Agent Path (EMD)'
# non-optimal trajectories (dotted lines)
for zz in range(nagents):
if zz >= 1:
agent_traj_label = '__nolabel__'
# agent state over time
y_agent = yout[:, zz*dx:(zz+1)*dx]
# plot agent trajectory with text
ax.plot(y_agent[0, 0], y_agent[0, 1], 'rs', markersize=markersize)
ax.plot(y_agent[:, 0], y_agent[:, 1], '--r', linewidth=linewidth, label=agent_traj_label)
ax.text(y_agent[0, 0], y_agent[0, 1], 'A{0}'.format(zz), fontsize=textsize)
# plot location of assignment switches
# patches = []
# for switch_ind in assignment_switches[zz]:
# ci = Circle( (y_agent[switch_ind, 0], y_agent[switch_ind, 1]), 2, color='m', fill=True)
# patches.append(ci)
# ax.plot(y_agent[switch_ind, 0], y_agent[switch_ind, 1], 'ms', markersize=markersize)
# p = PatchCollection(patches)
# ax.add_collection(p)
# plot target trajectory
y_target = yout[:, (zz+nagents)*dx:(zz+nagents+1)*dx]
ax.plot(y_target[0, 0], y_target[0, 1], 'bs', markersize=markersize)
ax.plot(y_target[:, 0], y_target[:, 1], '-b', linewidth=linewidth)
ax.text(y_target[0, 0], y_target[0, 1], 'T{0}'.format(zz), fontsize=textsize)
# ### stationary points
# for zz in range(ntargets):
# if zz >= 1:
# stationary_pt_label = '__nolabel__'
# offset = stationary_states[zz*dx:(zz+1)*dx]
# ax.plot(offset[0], offset[1], 'ks', markersize=markersize)
# ax.text(offset[0], offset[1], 'C{0}'.format(zz), fontsize=textsize)
ax.set_xlabel("x", fontweight=fontweight, fontsize=fontsize)
ax.set_ylabel("y", fontweight=fontweight, fontsize=fontsize)
# dim == 2
ax.xaxis.set_tick_params(labelsize=labelsize)
ax.yaxis.set_tick_params(labelsize=labelsize)
# ax.text2D(0.40, 0.95, 'Agent-Target Trajectories', fontweight='bold', fontsize=14, transform=ax.transAxes)
ax.legend(loc='lower right', fontsize=14)
if dim == 3:
# optimal trajectories (solid lines)
if sim_name == 'AssignmentCustom':
# agent/target trajectories
for zz in range(nagents):
# avoid repeated legend entries
if zz >= 1:
agent_traj_label = '__nolabel__'
agent_start_pt_label = '__nolabel__'
target_start_pt_label = '__nolabel__'
target_traj_label = '__nolabel__'
# agent state over time
y_agent = yout[:, zz*dx:(zz+1)*dx]
# plot agent trajectory with text
ax.scatter3D(y_agent[0, 0], y_agent[0, 1], y_agent[0, 2], color='r', s=scatter_width, label=agent_start_pt_label)
ax.plot3D(y_agent[:, 0], y_agent[:, 1], y_agent[:, 2], '-r', linewidth=linewidth_3d, label=agent_traj_label)
ax.text(y_agent[0, 0], y_agent[0, 1], y_agent[0, 2], 'A{0}'.format(zz), fontsize=textsize)
# # plot location of assignment switches
# for switch_ind in assignment_switches[zz]:
# ax.scatter3D(y_agent[switch_ind, 0], y_agent[switch_ind, 1], y_agent[switch_ind, 2], color='m') # TODO
# plot target trajectory
y_target = yout[:, (zz+nagents)*dx:(zz+nagents+1)*dx]
ax.scatter3D(y_target[0, 0], y_target[0, 1], y_target[0, 2], color='b', s=scatter_width, label=target_start_pt_label)
ax.plot3D(y_target[:, 0], y_target[:, 1], y_target[:, 2], '-b', linewidth=linewidth_3d, label=target_traj_label)
ax.text(y_target[0, 0], y_target[0, 1], y_target[0, 2], 'T{0}'.format(zz), fontsize=textsize)
# TEST
# TODO 2d slice
# trajectories
ax2.plot(y_agent[:, 0], y_agent[:, 1], '-r', linewidth=linewidth, label=agent_traj_label)
ax2.plot(y_target[:, 0], y_target[:, 1], '-b', linewidth=linewidth, label=target_traj_label)
# points
ax2.plot(y_agent[0, 0], y_agent[0, 1], 'ro', markersize=markersize, label=agent_start_pt_label)
ax2.plot(y_target[0, 0], y_target[0, 1], 'bo', markersize=markersize, label=target_start_pt_label)
# text
ax2.text(y_agent[0, 0], y_agent[0, 1], 'A{0}'.format(zz), fontsize=textsize)
ax2.text(y_target[0, 0], y_target[0, 1], 'T{0}'.format(zz), fontsize=textsize)
# ### stationary points
# for zz in range(ntargets):
# if zz >= 1:
# stationary_pt_label = '__nolabel__'
# offset = stationary_states[zz*dx:(zz+1)*dx]
# ax.scatter3D(offset[0], offset[1], offset[2], color='k', s=scatter_width, label=stationary_pt_label)
# ax.text(offset[0], offset[1], offset[2], 'C{0}'.format(zz), fontsize=textsize)
# # TEST
# # TODO 2d slice
# ax2.plot(offset[0], offset[1], 'ko', markersize=markersize, label=stationary_pt_label)
# ax2.text(offset[0], offset[1], 'C{0}'.format(zz), fontsize=textsize)
ax.set_xlabel("x", fontweight=fontweight, fontsize=fontsize)
ax.set_ylabel("y", fontweight=fontweight, fontsize=fontsize)
ax.set_zlabel("z", fontweight=fontweight, fontsize=fontsize)
ax2.set_xlabel("x", fontweight=fontweight, fontsize=fontsize)
ax2.set_ylabel("y", fontweight=fontweight, fontsize=fontsize)
elif sim_name == 'AssignmentEMD':
# non-optimal trajectories (dotted lines)
agent_traj_label = 'Agent Path (EMD)'
# agent/target trajectories
for zz in range(nagents):
# avoid repeated legend entries
if zz >= 1:
agent_traj_label = '__nolabel__'
# agent state over time
y_agent = yout[:, zz*dx:(zz+1)*dx]
# plot agent trajectory with text
ax.scatter3D(y_agent[0, 0], y_agent[0, 1], y_agent[0, 2], color='r')
ax.plot3D(y_agent[:, 0], y_agent[:, 1], y_agent[:, 2], '--r', linewidth=linewidth_3d, label=agent_traj_label)
ax.text(y_agent[0, 0], y_agent[0, 1], y_agent[0, 2], 'A{0}'.format(zz), fontsize=textsize)
# # plot location of assignment switches
# for switch_ind in assignment_switches[zz]:
# ax.scatter3D(y_agent[switch_ind, 0], y_agent[switch_ind, 1], y_agent[switch_ind, 2], color='m') # TODO
# plot target trajectory
y_target = yout[:, (zz+nagents)*dx:(zz+nagents+1)*dx]
ax.scatter3D(y_target[0, 0], y_target[0, 1], y_target[0, 2], color='b')
ax.plot3D(y_target[:, 0], y_target[:, 1], y_target[:, 2], '-b')
ax.text(y_target[0, 0], y_target[0, 1], y_target[0, 2], 'T{0}'.format(zz), fontsize=textsize)
# TEST
# TODO 2d slice
# trajectories
ax2.plot(y_agent[:, 0], y_agent[:, 1], '--r', linewidth=linewidth, label=agent_traj_label)
ax2.plot(y_target[:, 0], y_target[:, 1], '-b', linewidth=linewidth)
# points
ax2.plot(y_agent[0, 0], y_agent[0, 1], 'ro', markersize=markersize)
ax2.plot(y_target[0, 0], y_target[0, 1], 'bo', markersize=markersize)
# text
ax2.text(y_agent[0, 0], y_agent[0, 1], 'A{0}'.format(zz), fontsize=textsize)
ax2.text(y_target[0, 0], y_target[0, 1], 'T{0}'.format(zz), fontsize=textsize)
# # stationary locations
# for zz in range(ntargets):
# offset = stationary_states[zz*dx:(zz+1)*dx]
# ax.scatter3D(offset[0], offset[1], offset[2], color='k')
# ax.text(offset[0], offset[1], offset[2], 'C{0}'.format(zz), fontsize=textsize)
# # TEST
# # TODO 2d slice
# ax2.plot(offset[0], offset[1], 'ko', markersize=markersize)
# ax2.text(offset[0], offset[1], 'C{0}'.format(zz), fontsize=textsize)
ax.set_xlabel("x", fontweight=fontweight, fontsize=fontsize)
ax.set_ylabel("y", fontweight=fontweight, fontsize=fontsize)
ax.set_zlabel("z", fontweight=fontweight, fontsize=fontsize)
ax2.set_xlabel("x", fontweight=fontweight, fontsize=fontsize)
ax2.set_ylabel("y", fontweight=fontweight, fontsize=fontsize)
# dim = 3
tick_spacing = 100
ax.xaxis.set_major_locator(ticker.MultipleLocator(tick_spacing))
ax.yaxis.set_major_locator(ticker.MultipleLocator(tick_spacing))
ax.zaxis.set_major_locator(ticker.MultipleLocator(tick_spacing))
ax.xaxis.set_tick_params(labelsize=labelsize)
ax.yaxis.set_tick_params(labelsize=labelsize)
ax.zaxis.set_tick_params(labelsize=labelsize)
ax.tick_params(axis='x', which='major', pad=axispad)
ax.tick_params(axis='y', which='major', pad=axispad)
ax.tick_params(axis='z', which='major', pad=axispad)
ax.xaxis.labelpad = labelpad
ax.yaxis.labelpad = labelpad
ax.zaxis.labelpad = labelpad
ax.set_zlim3d(-100, 100)
# TEST
# TODO 2d slice
ax2.xaxis.set_tick_params(labelsize=labelsize)
ax2.yaxis.set_tick_params(labelsize=labelsize)
# ax.text2D(0.40, 0.95, 'Agent-Target Trajectories', fontweight='bold', fontsize=14, transform=ax.transAxes)
# ax.legend(loc='lower right', fontsize=fontsize)
legend = ax.legend(loc='center left', bbox_to_anchor=(1.07, 0.5), fontsize=fontsize)
# legend.remove()
if dim == 2:
ax.legend(loc='center left', bbox_to_anchor=(1.07, 0.5), fontsize=fontsize)
if dim == 3:
# ax2.legend(loc='lower right', fontsize=fontsize-4)
ax2.legend(loc='center left', bbox_to_anchor=(1.07, 0.5), fontsize=fontsize)
def plot_assignment_comp_time(unpacked):
""" Plots assignment computational cost over time
"""
linewidth = 4
fontsize = 40
labelsize = 40
fig, axs = plt.subplots(1,1)
axs.set_xlabel('Time (s)', fontsize=fontsize)
axs.set_ylabel('Assignment Cum. Cost (s)', fontsize=fontsize)
for sim_name, sim_diagnostics in unpacked.items():
label = sim_name.split('Assignment', 1)[1]
runtime_diagnostics = sim_diagnostics['runtime_diagnostics']
tout = runtime_diagnostics.iloc[:, 0].to_numpy()
assign_comp_cost = runtime_diagnostics.iloc[:, 1].to_numpy()
dynamics_comp_cost = runtime_diagnostics.iloc[:, 2].to_numpy()
axs.plot(tout, np.cumsum(assign_comp_cost), linewidth=linewidth, label=label)
axs.xaxis.set_tick_params(labelsize=labelsize)
axs.yaxis.set_tick_params(labelsize=labelsize)
axs.legend(fontsize=fontsize)
def plot_runtime_histogram(unpacked_ensemble_diagnostic):
""" Plots histogram of simulation runtime over ensemble of batch simulations
"""
fontsize = 32
labelsize = 32
# fig, axs = plt.subplots(1,1)
# axs.set_xlabel('Simulation runtime (s)', fontsize=fontsize)
# axs.set_ylabel('Frequency', fontsize=fontsize)
# labels = ['Dyn', 'EMD']
# axs.hist(unpacked_ensemble_diagnostic, histtype='bar', stacked=True, bins=10, align='left', label=labels)
# axs.legend(fontsize=fontsize)
fig, axs = plt.subplots(1,1)
axs.xaxis.set_tick_params(labelsize=labelsize)
axs.yaxis.set_tick_params(labelsize=labelsize)
labels = 'EMD Runtime - Dyn Runtime'
axs.set_xlabel('Runtime Difference (EMD - Dyn)', fontsize=fontsize)
axs.set_ylabel('Frequency', fontsize=fontsize)
axs.hist(unpacked_ensemble_diagnostic, histtype='bar', stacked=True, bins=10, align='left')
# axs.legend(fontsize=fontsize)
def plot_runtimes(unpacked_ensemble_diagnostic):
""" Plots runtime
"""
fontsize = 32
labelsize = 32
fig, axs = plt.subplots(1,1)
axs.xaxis.set_tick_params(labelsize=labelsize)
axs.yaxis.set_tick_params(labelsize=labelsize)
labels = ['Custom', 'EMD']
axs.set_xlabel('Simulation', fontsize=fontsize)
axs.set_ylabel('Runtime (s)', fontsize=fontsize)
# NOTE make sure that label is matching up with diagnostics
axs.plot(unpacked_ensemble_diagnostic[0], marker='.', label=labels[0])
axs.plot(unpacked_ensemble_diagnostic[1], marker='.', label=labels[1])
axs.legend(fontsize=fontsize)
def plot_ensemble_avg_runtime(ensemble_diagnostic):
""" Plots average runtimes for multiple ensembles
"""
fontsize = 40
labelsize = 40
fig, axs = plt.subplots(1,1)
axs.set_xlabel('Agents', fontsize=fontsize)
axs.set_ylabel('Average Runtime (s)', fontsize=fontsize)
# Using DataFrames
labels = []
for ensemble_name in ensemble_diagnostic.keys():
labels.append(re.search('\d+v\d+', ensemble_name).group())
metrics_df = pd.DataFrame(ensemble_diagnostic)
metrics_df.columns = labels
# order data by number of agents
labels.sort(key=natural_keys)
metrics_df = metrics_df[labels]
metrics = {'Ensemble': labels, 'Average Runtime (s) - EMD': metrics_df.values[0, :], 'Average Runtime (s) - Dyn': metrics_df.values[1, :]}
# metrics = {'Ensemble': labels, 'Average Runtime (s)': metrics_df.values.tolist()[0]}
metrics_df = pd.DataFrame(metrics)
# metrics_df.plot.bar(x='Ensemble', rot=0, fontsize=fontsize)
nensembles = len(ensemble_diagnostic)
xpos = np.arange(nensembles)
width = 0.35
axs.bar(xpos, metrics_df['Average Runtime (s) - Dyn'].values, width, alpha=0.5, label='Dyn')
axs.bar(xpos+width, metrics_df['Average Runtime (s) - EMD'].values, width, alpha=0.5, label='EMD')
axs.xaxis.set_tick_params(labelsize=labelsize)
axs.yaxis.set_tick_params(labelsize=labelsize)
# tick_spacing = 1
# axs.xaxis.set_major_locator(ticker.MultipleLocator(tick_spacing))
# axs.xaxis.offsetText.set_fontsize(fontsize)
axs.set_xticks(xpos + width / 2)
axs.set_xticklabels(labels)
axs.legend(fontsize=fontsize)
# linewidth = 4
# fontsize = 32
# labelsize = 32
# fig, axs = plt.subplots(1,1)
# axs.set_xlabel('Agents', fontsize=fontsize)
# axs.set_ylabel('Average Runtime (s)', fontsize=fontsize)
# # Using DataFrames
# labels = []
# for ensemble_name in ensemble_diagnostic.keys():
# labels.append(re.search('\d+v\d+', ensemble_name).group())
# diag_df = pd.DataFrame.from_dict(ensemble_diagnostic)
# diag_df.columns = labels
# # order data by number of agents
# labels.sort(key=natural_keys)
# diag_df = diag_df[labels]
# for i, (label, data) in enumerate(diag_df.iteritems()):
# nbins = int(len(data)/4)
# for i, d in enumerate(data):
# if i == 0:
# asst_type = 'EMD'
# elif i == 1:
# asst_type = 'Dyn'
# if label == '5v5':
# nagents = 5
# elif label == '10v10':
# nagents = 10
# elif label == '20v20':
# nagents = 20
# axs.plot(nagents, d, 'o', linewidth=linewidth, label=label+' '+asst_type)
# axs.grid(False)
# axs.xaxis.set_tick_params(labelsize=labelsize)
# axs.yaxis.set_tick_params(labelsize=labelsize)
# # tick_spacing = 1
# # axs.xaxis.set_major_locator(ticker.MultipleLocator(tick_spacing))
# axs.xaxis.offsetText.set_fontsize(fontsize)
# axs.legend(loc='lower right', fontsize=fontsize)
| 40.039768 | 147 | 0.59092 | 6,079 | 48,328 | 4.516697 | 0.073532 | 0.025349 | 0.017336 | 0.022289 | 0.780129 | 0.760353 | 0.739556 | 0.723713 | 0.715155 | 0.704593 | 0 | 0.026182 | 0.276072 | 48,328 | 1,206 | 148 | 40.072968 | 0.758618 | 0.279796 | 0 | 0.650467 | 0 | 0 | 0.054446 | 0.001224 | 0.005607 | 0 | 0 | 0.003317 | 0 | 1 | 0.026168 | false | 0 | 0.020561 | 0.001869 | 0.050467 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
812477503f7a4a2265b21357a6716935ffa57f15 | 145 | py | Python | src/views/__init__.py | Nardri/rbac-service | c5cf6baf60e95a7790156c85e37c76c697efd585 | [
"MIT"
] | null | null | null | src/views/__init__.py | Nardri/rbac-service | c5cf6baf60e95a7790156c85e37c76c697efd585 | [
"MIT"
] | null | null | null | src/views/__init__.py | Nardri/rbac-service | c5cf6baf60e95a7790156c85e37c76c697efd585 | [
"MIT"
] | null | null | null | """Views."""
from .role import RoleResource, RoleListResource, RolePermissionResource
from .service import ServiceListResource, ServiceResource
| 29 | 72 | 0.827586 | 12 | 145 | 10 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.089655 | 145 | 4 | 73 | 36.25 | 0.909091 | 0.041379 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
d4ae8b54b6e80c20e5820653da42470d764b2eb3 | 40 | py | Python | src/rnanorm/__main__.py | mstajdohar/rnaseq-normalization | 395eb45f8626629ff700e29e804f7b1559e5c199 | [
"Apache-2.0"
] | null | null | null | src/rnanorm/__main__.py | mstajdohar/rnaseq-normalization | 395eb45f8626629ff700e29e804f7b1559e5c199 | [
"Apache-2.0"
] | null | null | null | src/rnanorm/__main__.py | mstajdohar/rnaseq-normalization | 395eb45f8626629ff700e29e804f7b1559e5c199 | [
"Apache-2.0"
] | null | null | null | from .normalization import main
main()
| 10 | 31 | 0.775 | 5 | 40 | 6.2 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.15 | 40 | 3 | 32 | 13.333333 | 0.911765 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
d4e3d35d40db09fe93b0a5a780b8974b2c9a5684 | 111 | py | Python | Accuracy, precision, recall & f1/macro_precision.py | CodingWillow/MachineLearning | 340c9d91d4178a2ab56921502bdcee73864a1a59 | [
"CC0-1.0"
] | null | null | null | Accuracy, precision, recall & f1/macro_precision.py | CodingWillow/MachineLearning | 340c9d91d4178a2ab56921502bdcee73864a1a59 | [
"CC0-1.0"
] | null | null | null | Accuracy, precision, recall & f1/macro_precision.py | CodingWillow/MachineLearning | 340c9d91d4178a2ab56921502bdcee73864a1a59 | [
"CC0-1.0"
] | null | null | null | def macro_precision(model):
mac_precision = sum(micro_precisions(model)) / len(model)
return mac_precision
| 27.75 | 59 | 0.783784 | 15 | 111 | 5.533333 | 0.666667 | 0.289157 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.117117 | 111 | 3 | 60 | 37 | 0.846939 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 5 |
d4f3ac19c698adfe2200f8e953e4761fb1f2d067 | 225 | py | Python | src/home/admin.py | MetricsGroup/IERT-Webapp | 9e43f1775767412898f9340b9cc84196eb4abfdb | [
"MIT"
] | 3 | 2019-04-25T11:19:22.000Z | 2020-05-10T20:41:12.000Z | src/home/admin.py | MetricsGroup/IERT-Webapp | 9e43f1775767412898f9340b9cc84196eb4abfdb | [
"MIT"
] | 5 | 2020-06-17T05:16:27.000Z | 2022-01-13T02:15:56.000Z | src/home/admin.py | MetricsGroup/IERT-Webapp | 9e43f1775767412898f9340b9cc84196eb4abfdb | [
"MIT"
] | 3 | 2020-06-13T10:40:27.000Z | 2021-10-13T15:45:50.000Z | from django.contrib import admin
from .models import *
admin.site.register(message_from_about_us)
admin.site.register(degree_detail)
admin.site.register(gallery_pic)
admin.site.register(holiday)
# admin.site.register(popup)
| 25 | 42 | 0.826667 | 33 | 225 | 5.484848 | 0.515152 | 0.248619 | 0.469613 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.066667 | 225 | 8 | 43 | 28.125 | 0.861905 | 0.115556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
d4f9e5fb1a17960b0a7a8057a2e82905e35ed8af | 116 | py | Python | travelize/address/admin.py | TraMZzz/travelize | d8e4b08dad9eefeefa426b08edfdf3493293cbb1 | [
"MIT"
] | null | null | null | travelize/address/admin.py | TraMZzz/travelize | d8e4b08dad9eefeefa426b08edfdf3493293cbb1 | [
"MIT"
] | null | null | null | travelize/address/admin.py | TraMZzz/travelize | d8e4b08dad9eefeefa426b08edfdf3493293cbb1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Address
admin.site.register(Address)
| 16.571429 | 32 | 0.732759 | 16 | 116 | 5.3125 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.01 | 0.137931 | 116 | 6 | 33 | 19.333333 | 0.84 | 0.181034 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
be127a6b862c68dfbaa4017b1b5eecd33156dc8d | 958 | py | Python | ls/joyous/models/__init__.py | mikiec84/ls.joyous | db2308ce59cd072b83a465b02b9f29c00269fdbb | [
"BSD-3-Clause"
] | null | null | null | ls/joyous/models/__init__.py | mikiec84/ls.joyous | db2308ce59cd072b83a465b02b9f29c00269fdbb | [
"BSD-3-Clause"
] | null | null | null | ls/joyous/models/__init__.py | mikiec84/ls.joyous | db2308ce59cd072b83a465b02b9f29c00269fdbb | [
"BSD-3-Clause"
] | null | null | null | from .events import EventCategory
from .events import EventBase
from .events import SimpleEventPage
from .events import MultidayEventPage
from .events import RecurringEventPage
from .events import MultidayRecurringEventPage
from .events import EventExceptionBase
from .events import ExtraInfoPage
from .events import CancellationPage
from .events import RescheduleEventBase
from .events import PostponementPage
from .events import RescheduleMultidayEventPage
from .events import getAllEventsByDay
from .events import getAllEventsByWeek
from .events import getAllUpcomingEvents
from .events import getAllPastEvents
from .events import getGroupUpcomingEvents
from .events import getEventFromUid
from .events import getAllEvents
from .events import removeContentPanels
from .calendar import CalendarPage
from .calendar import CalendarPageForm
from .calendar import SpecificCalendarPage
from .calendar import GeneralCalendarPage
from .groups import GroupPage
| 33.034483 | 47 | 0.866388 | 100 | 958 | 8.3 | 0.3 | 0.240964 | 0.385542 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.107516 | 958 | 28 | 48 | 34.214286 | 0.97076 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
be1d51e469990ebf96e749de9f28408b68e9e02f | 15 | py | Python | __init__.py | alonreches/EMAbin | 42336a998611938409b5e40d4b7adcc77ff573b0 | [
"MIT"
] | 1 | 2018-07-09T12:55:29.000Z | 2018-07-09T12:55:29.000Z | __init__.py | alonreches/EMAbin | 42336a998611938409b5e40d4b7adcc77ff573b0 | [
"MIT"
] | null | null | null | __init__.py | alonreches/EMAbin | 42336a998611938409b5e40d4b7adcc77ff573b0 | [
"MIT"
] | null | null | null | print("start!") | 15 | 15 | 0.666667 | 2 | 15 | 5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1 | 15 | 15 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0.375 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 5 |
07b1e9f84cbd37e12efbc149f993c2c30b6f21e3 | 717 | py | Python | SUAVE/SUAVE-2.5.0/trunk/SUAVE/Methods/Missions/Segments/Cruise/__init__.py | Vinicius-Tanigawa/Undergraduate-Research-Project | e92372f07882484b127d7affe305eeec2238b8a9 | [
"MIT"
] | null | null | null | SUAVE/SUAVE-2.5.0/trunk/SUAVE/Methods/Missions/Segments/Cruise/__init__.py | Vinicius-Tanigawa/Undergraduate-Research-Project | e92372f07882484b127d7affe305eeec2238b8a9 | [
"MIT"
] | null | null | null | SUAVE/SUAVE-2.5.0/trunk/SUAVE/Methods/Missions/Segments/Cruise/__init__.py | Vinicius-Tanigawa/Undergraduate-Research-Project | e92372f07882484b127d7affe305eeec2238b8a9 | [
"MIT"
] | null | null | null | ## @defgroup Methods-Missions-Segments-Cruise Cruise
# Cruise mission methods containing the functions for setting up and solving a mission.
# @ingroup Methods-Missions-Segments
from . import Common
from . import Constant_Mach_Constant_Altitude
from . import Constant_Speed_Constant_Altitude
from . import Constant_Mach_Constant_Altitude_Loiter
from . import Constant_Throttle_Constant_Altitude
from . import Variable_Cruise_Distance
from . import Constant_Dynamic_Pressure_Constant_Altitude_Loiter
from . import Constant_Acceleration_Constant_Altitude
from . import Constant_Pitch_Rate_Constant_Altitude
from . import Constant_Dynamic_Pressure_Constant_Altitude
from . import Constant_Speed_Constant_Altitude_Loiter | 47.8 | 87 | 0.877266 | 91 | 717 | 6.527473 | 0.351648 | 0.185185 | 0.272727 | 0.262626 | 0.589226 | 0.505051 | 0.323232 | 0.185185 | 0 | 0 | 0 | 0 | 0.090656 | 717 | 15 | 88 | 47.8 | 0.911043 | 0.237099 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
07e838e66c3262f04a86d136c58d1b99d6276aca | 191 | py | Python | {{cookiecutter.project_slug}}/backend/app/app/monitor/tasks.py | addr/flask-gql-mongo-docker | cecf3209399f541750a3faeda7d7a6e70e6541d4 | [
"MIT"
] | null | null | null | {{cookiecutter.project_slug}}/backend/app/app/monitor/tasks.py | addr/flask-gql-mongo-docker | cecf3209399f541750a3faeda7d7a6e70e6541d4 | [
"MIT"
] | null | null | null | {{cookiecutter.project_slug}}/backend/app/app/monitor/tasks.py | addr/flask-gql-mongo-docker | cecf3209399f541750a3faeda7d7a6e70e6541d4 | [
"MIT"
] | 1 | 2019-02-22T16:47:13.000Z | 2019-02-22T16:47:13.000Z | from .init_monitor import celery_app
@celery_app.task(
name="monitor_test", bind=True, default_retry_delay=5, max_retries=None)
def monitor_test(self):
return "Celery monitor test"
| 23.875 | 76 | 0.769634 | 29 | 191 | 4.793103 | 0.724138 | 0.23741 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006024 | 0.13089 | 191 | 7 | 77 | 27.285714 | 0.831325 | 0 | 0 | 0 | 0 | 0 | 0.162304 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.2 | 0.2 | 0.6 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 5 |
07f0d4bee1de42d4a6b23d69ef8184057c5927e0 | 10,266 | py | Python | goodstech-20191230/python/alibabacloud_goodstech20191230/models.py | alibabacloud-sdk-swift/alibabacloud-sdk | afd43b41530abb899076a34ceb96bdef55f74460 | [
"Apache-2.0"
] | null | null | null | goodstech-20191230/python/alibabacloud_goodstech20191230/models.py | alibabacloud-sdk-swift/alibabacloud-sdk | afd43b41530abb899076a34ceb96bdef55f74460 | [
"Apache-2.0"
] | null | null | null | goodstech-20191230/python/alibabacloud_goodstech20191230/models.py | alibabacloud-sdk-swift/alibabacloud-sdk | afd43b41530abb899076a34ceb96bdef55f74460 | [
"Apache-2.0"
] | null | null | null | # This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
class RecognizeFurnitureAttributeRequest(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class RecognizeFurnitureAttributeResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = RecognizeFurnitureAttributeResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class RecognizeFurnitureAttributeResponseData(TeaModel):
def __init__(self, pred_style_id=None, pred_style=None, pred_probability=None):
self.pred_style_id = pred_style_id
self.pred_style = pred_style
self.pred_probability = pred_probability
def validate(self):
self.validate_required(self.pred_style_id, 'pred_style_id')
self.validate_required(self.pred_style, 'pred_style')
self.validate_required(self.pred_probability, 'pred_probability')
def to_map(self):
result = {}
result['PredStyleId'] = self.pred_style_id
result['PredStyle'] = self.pred_style
result['PredProbability'] = self.pred_probability
return result
def from_map(self, map={}):
self.pred_style_id = map.get('PredStyleId')
self.pred_style = map.get('PredStyle')
self.pred_probability = map.get('PredProbability')
return self
class RecognizeFurnitureAttributeAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None):
self.image_urlobject = image_urlobject
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
return self
class RecognizeFurnitureSpuRequest(TeaModel):
def __init__(self, image_url=None, xlength=None, ylength=None, zlength=None):
self.image_url = image_url
self.xlength = xlength
self.ylength = ylength
self.zlength = zlength
def validate(self):
self.validate_required(self.image_url, 'image_url')
self.validate_required(self.xlength, 'xlength')
self.validate_required(self.ylength, 'ylength')
self.validate_required(self.zlength, 'zlength')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
result['XLength'] = self.xlength
result['YLength'] = self.ylength
result['ZLength'] = self.zlength
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
self.xlength = map.get('XLength')
self.ylength = map.get('YLength')
self.zlength = map.get('ZLength')
return self
class RecognizeFurnitureSpuResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = RecognizeFurnitureSpuResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class RecognizeFurnitureSpuResponseData(TeaModel):
def __init__(self, pred_cate_id=None, pred_cate=None, pred_probability=None):
self.pred_cate_id = pred_cate_id
self.pred_cate = pred_cate
self.pred_probability = pred_probability
def validate(self):
self.validate_required(self.pred_cate_id, 'pred_cate_id')
self.validate_required(self.pred_cate, 'pred_cate')
self.validate_required(self.pred_probability, 'pred_probability')
def to_map(self):
result = {}
result['PredCateId'] = self.pred_cate_id
result['PredCate'] = self.pred_cate
result['PredProbability'] = self.pred_probability
return result
def from_map(self, map={}):
self.pred_cate_id = map.get('PredCateId')
self.pred_cate = map.get('PredCate')
self.pred_probability = map.get('PredProbability')
return self
class RecognizeFurnitureSpuAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None, xlength=None, ylength=None, zlength=None):
self.image_urlobject = image_urlobject
self.xlength = xlength
self.ylength = ylength
self.zlength = zlength
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
self.validate_required(self.xlength, 'xlength')
self.validate_required(self.ylength, 'ylength')
self.validate_required(self.zlength, 'zlength')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
result['XLength'] = self.xlength
result['YLength'] = self.ylength
result['ZLength'] = self.zlength
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
self.xlength = map.get('XLength')
self.ylength = map.get('YLength')
self.zlength = map.get('ZLength')
return self
class ClassifyCommodityRequest(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class ClassifyCommodityResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = ClassifyCommodityResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class ClassifyCommodityResponseDataCategories(TeaModel):
def __init__(self, score=None, category_name=None, category_id=None):
self.score = score
self.category_name = category_name
self.category_id = category_id
def validate(self):
self.validate_required(self.score, 'score')
self.validate_required(self.category_name, 'category_name')
self.validate_required(self.category_id, 'category_id')
def to_map(self):
result = {}
result['Score'] = self.score
result['CategoryName'] = self.category_name
result['CategoryId'] = self.category_id
return result
def from_map(self, map={}):
self.score = map.get('Score')
self.category_name = map.get('CategoryName')
self.category_id = map.get('CategoryId')
return self
class ClassifyCommodityResponseData(TeaModel):
def __init__(self, categories=None):
self.categories = []
def validate(self):
self.validate_required(self.categories, 'categories')
if self.categories:
for k in self.categories:
if k :
k.validate()
def to_map(self):
result = {}
result['Categories'] = []
if self.categories is not None:
for k in self.categories:
result['Categories'].append(k.to_map() if k else None)
else:
result['Categories'] = None
return result
def from_map(self, map={}):
self.categories = []
if map.get('Categories') is not None:
for k in map.get('Categories'):
temp_model = ClassifyCommodityResponseDataCategories()
temp_model = temp_model.from_map(k)
self.categories.append(temp_model)
else:
self.categories = None
return self
class ClassifyCommodityAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None):
self.image_urlobject = image_urlobject
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
return self
| 31.981308 | 87 | 0.631989 | 1,175 | 10,266 | 5.302979 | 0.06383 | 0.043813 | 0.089873 | 0.107848 | 0.770342 | 0.751725 | 0.700369 | 0.665062 | 0.643717 | 0.60536 | 0 | 0 | 0.259692 | 10,266 | 320 | 88 | 32.08125 | 0.819868 | 0.004968 | 0 | 0.75 | 1 | 0 | 0.081171 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.003846 | 0 | 0.353846 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
58001253bba2a26e26fff776e5ce3814ff99f303 | 144 | py | Python | pyboltzmann/test/test_sampler.py | towink/boltzmann-planar-graph | fcfc3a04f10039f94ff74db58111007e86a31fee | [
"BSD-3-Clause"
] | null | null | null | pyboltzmann/test/test_sampler.py | towink/boltzmann-planar-graph | fcfc3a04f10039f94ff74db58111007e86a31fee | [
"BSD-3-Clause"
] | null | null | null | pyboltzmann/test/test_sampler.py | towink/boltzmann-planar-graph | fcfc3a04f10039f94ff74db58111007e86a31fee | [
"BSD-3-Clause"
] | null | null | null | # This will automatically run the examples when nose is executed.
from pyboltzmann.examples import test_examples
# TODO Make some real tests.
| 24 | 65 | 0.805556 | 21 | 144 | 5.47619 | 0.904762 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.159722 | 144 | 5 | 66 | 28.8 | 0.950413 | 0.625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
6affd56a61e88714bb42cc7b8e5dc5abddb471ad | 563 | py | Python | GPyFlow/errors.py | niu-lab/GPyFlow-CLI | 6f210b20f4c722ff103e29d6caa72d7e17666b18 | [
"MIT"
] | null | null | null | GPyFlow/errors.py | niu-lab/GPyFlow-CLI | 6f210b20f4c722ff103e29d6caa72d7e17666b18 | [
"MIT"
] | null | null | null | GPyFlow/errors.py | niu-lab/GPyFlow-CLI | 6f210b20f4c722ff103e29d6caa72d7e17666b18 | [
"MIT"
] | null | null | null | class MacroError(Exception):
def __init__(self, macro):
self.macro = macro
def __str__(self):
return "{} value error.".format(self.macro)
class CycleInWorkflowError(Exception):
def __init__(self, step_names):
self.step_names = step_names
pass
def __str__(self):
return "cycle in workflow: {}".format(",".join(self.step_names))
class RunCmdException(Exception):
def __init__(self, cmd):
self.cmd = cmd
pass
def __str__(self):
return "CMDERR:{cmd}".format(cmd=self.cmd)
| 22.52 | 72 | 0.630551 | 66 | 563 | 4.954545 | 0.348485 | 0.110092 | 0.146789 | 0.183486 | 0.122324 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.245115 | 563 | 24 | 73 | 23.458333 | 0.769412 | 0 | 0 | 0.294118 | 0 | 0 | 0.087034 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.352941 | false | 0.117647 | 0 | 0.176471 | 0.705882 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 5 |
ed88f58832382e406681c61a9054eed6e9c126d7 | 199 | py | Python | util/test/tests/Vulkan/VK_Resource_Lifetimes.py | songtm/renderdoc | 7533c6b7ac7cac7cfab2d1a1ddc011c693202a47 | [
"MIT"
] | 1 | 2019-11-14T08:52:26.000Z | 2019-11-14T08:52:26.000Z | util/test/tests/Vulkan/VK_Resource_Lifetimes.py | songtm/renderdoc | 7533c6b7ac7cac7cfab2d1a1ddc011c693202a47 | [
"MIT"
] | 2 | 2019-04-23T21:46:42.000Z | 2019-05-09T18:33:36.000Z | util/test/tests/Vulkan/VK_Resource_Lifetimes.py | songtm/renderdoc | 7533c6b7ac7cac7cfab2d1a1ddc011c693202a47 | [
"MIT"
] | 1 | 2019-09-12T03:37:52.000Z | 2019-09-12T03:37:52.000Z | import renderdoc as rd
import rdtest
class VK_Resource_Lifetimes(rdtest.TestCase):
demos_test_name = 'VK_Resource_Lifetimes'
def check_capture(self):
self.check_final_backbuffer()
| 19.9 | 45 | 0.768844 | 26 | 199 | 5.538462 | 0.730769 | 0.138889 | 0.263889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.165829 | 199 | 9 | 46 | 22.111111 | 0.86747 | 0 | 0 | 0 | 0 | 0 | 0.105528 | 0.105528 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.333333 | 0 | 0.833333 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
71f19b157de65a5939099f84a5a787786ffc7167 | 24,521 | py | Python | st2common/tests/unit/test_rbac_resolvers_action.py | FairwindsOps/st2 | 2b76ca740c4af0d6b2c1d1ba5534ce4133fd16fa | [
"Apache-2.0"
] | 1 | 2021-04-08T03:21:49.000Z | 2021-04-08T03:21:49.000Z | st2common/tests/unit/test_rbac_resolvers_action.py | FairwindsOps/st2 | 2b76ca740c4af0d6b2c1d1ba5534ce4133fd16fa | [
"Apache-2.0"
] | null | null | null | st2common/tests/unit/test_rbac_resolvers_action.py | FairwindsOps/st2 | 2b76ca740c4af0d6b2c1d1ba5534ce4133fd16fa | [
"Apache-2.0"
] | null | null | null | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.rbac.types import PermissionType
from st2common.rbac.types import ResourceType
from st2common.persistence.auth import User
from st2common.persistence.rbac import Role
from st2common.persistence.rbac import UserRoleAssignment
from st2common.persistence.rbac import PermissionGrant
from st2common.persistence.action import Action
from st2common.models.db.auth import UserDB
from st2common.models.db.rbac import RoleDB
from st2common.models.db.rbac import UserRoleAssignmentDB
from st2common.models.db.rbac import PermissionGrantDB
from st2common.models.db.action import ActionDB
from st2common.models.api.action import ActionAPI
from st2common.rbac.resolvers import ActionPermissionsResolver
from tests.unit.test_rbac_resolvers import BasePermissionsResolverTestCase
__all__ = [
'ActionPermissionsResolverTestCase'
]
class ActionPermissionsResolverTestCase(BasePermissionsResolverTestCase):
def setUp(self):
super(ActionPermissionsResolverTestCase, self).setUp()
# Create some mock users
user_1_db = UserDB(name='1_role_action_pack_grant')
user_1_db = User.add_or_update(user_1_db)
self.users['custom_role_action_pack_grant'] = user_1_db
user_2_db = UserDB(name='1_role_action_grant')
user_2_db = User.add_or_update(user_2_db)
self.users['custom_role_action_grant'] = user_2_db
user_3_db = UserDB(name='custom_role_pack_action_all_grant')
user_3_db = User.add_or_update(user_3_db)
self.users['custom_role_pack_action_all_grant'] = user_3_db
user_4_db = UserDB(name='custom_role_action_all_grant')
user_4_db = User.add_or_update(user_4_db)
self.users['custom_role_action_all_grant'] = user_4_db
user_5_db = UserDB(name='custom_role_action_execute_grant')
user_5_db = User.add_or_update(user_5_db)
self.users['custom_role_action_execute_grant'] = user_5_db
user_6_db = UserDB(name='action_pack_action_create_grant')
user_6_db = User.add_or_update(user_6_db)
self.users['action_pack_action_create_grant'] = user_6_db
user_7_db = UserDB(name='action_pack_action_all_grant')
user_7_db = User.add_or_update(user_7_db)
self.users['action_pack_action_all_grant'] = user_7_db
user_8_db = UserDB(name='action_action_create_grant')
user_8_db = User.add_or_update(user_8_db)
self.users['action_action_create_grant'] = user_8_db
user_9_db = UserDB(name='action_action_all_grant')
user_9_db = User.add_or_update(user_9_db)
self.users['action_action_all_grant'] = user_9_db
user_10_db = UserDB(name='custom_role_action_list_grant')
user_10_db = User.add_or_update(user_10_db)
self.users['custom_role_action_list_grant'] = user_10_db
# Create some mock resources on which permissions can be granted
action_1_db = ActionDB(pack='test_pack_1', name='action1', entry_point='',
runner_type={'name': 'run-local'})
action_1_db = Action.add_or_update(action_1_db)
self.resources['action_1'] = action_1_db
action_2_db = ActionDB(pack='test_pack_1', name='action2', entry_point='',
runner_type={'name': 'run-local'})
action_2_db = Action.add_or_update(action_1_db)
self.resources['action_2'] = action_2_db
action_3_db = ActionDB(pack='test_pack_2', name='action3', entry_point='',
runner_type={'name': 'run-local'})
action_3_db = Action.add_or_update(action_3_db)
self.resources['action_3'] = action_3_db
# Create some mock roles with associated permission grants
# Custom role 2 - one grant on parent pack
# "action_view" on pack_1
grant_db = PermissionGrantDB(resource_uid=self.resources['pack_1'].get_uid(),
resource_type=ResourceType.PACK,
permission_types=[PermissionType.ACTION_VIEW])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_3_db = RoleDB(name='custom_role_action_pack_grant',
permission_grants=permission_grants)
role_3_db = Role.add_or_update(role_3_db)
self.roles['custom_role_action_pack_grant'] = role_3_db
# Custom role 4 - one grant on action
# "action_view" on action_3
grant_db = PermissionGrantDB(resource_uid=self.resources['action_3'].get_uid(),
resource_type=ResourceType.ACTION,
permission_types=[PermissionType.ACTION_VIEW])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_4_db = RoleDB(name='custom_role_action_grant', permission_grants=permission_grants)
role_4_db = Role.add_or_update(role_4_db)
self.roles['custom_role_action_grant'] = role_4_db
# Custom role - "action_all" grant on a parent action pack
grant_db = PermissionGrantDB(resource_uid=self.resources['pack_1'].get_uid(),
resource_type=ResourceType.PACK,
permission_types=[PermissionType.ACTION_ALL])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_4_db = RoleDB(name='custom_role_pack_action_all_grant',
permission_grants=permission_grants)
role_4_db = Role.add_or_update(role_4_db)
self.roles['custom_role_pack_action_all_grant'] = role_4_db
# Custom role - "action_all" grant on action
grant_db = PermissionGrantDB(resource_uid=self.resources['action_1'].get_uid(),
resource_type=ResourceType.ACTION,
permission_types=[PermissionType.ACTION_ALL])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_4_db = RoleDB(name='custom_role_action_all_grant', permission_grants=permission_grants)
role_4_db = Role.add_or_update(role_4_db)
self.roles['custom_role_action_all_grant'] = role_4_db
# Custom role - "action_execute" on action_1
grant_db = PermissionGrantDB(resource_uid=self.resources['action_1'].get_uid(),
resource_type=ResourceType.ACTION,
permission_types=[PermissionType.ACTION_EXECUTE])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_5_db = RoleDB(name='custom_role_action_execute_grant',
permission_grants=permission_grants)
role_5_db = Role.add_or_update(role_5_db)
self.roles['custom_role_action_execute_grant'] = role_5_db
# Custom role - "action_create" grant on pack_1
grant_db = PermissionGrantDB(resource_uid=self.resources['pack_1'].get_uid(),
resource_type=ResourceType.PACK,
permission_types=[PermissionType.ACTION_CREATE])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_6_db = RoleDB(name='action_pack_action_create_grant',
permission_grants=permission_grants)
role_6_db = Role.add_or_update(role_6_db)
self.roles['action_pack_action_create_grant'] = role_6_db
# Custom role - "action_all" grant on pack_1
grant_db = PermissionGrantDB(resource_uid=self.resources['pack_1'].get_uid(),
resource_type=ResourceType.PACK,
permission_types=[PermissionType.ACTION_ALL])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_7_db = RoleDB(name='action_pack_action_all_grant',
permission_grants=permission_grants)
role_7_db = Role.add_or_update(role_7_db)
self.roles['action_pack_action_all_grant'] = role_7_db
# Custom role - "action_create" grant on action_1
grant_db = PermissionGrantDB(resource_uid=self.resources['action_1'].get_uid(),
resource_type=ResourceType.ACTION,
permission_types=[PermissionType.ACTION_CREATE])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_8_db = RoleDB(name='action_action_create_grant',
permission_grants=permission_grants)
role_8_db = Role.add_or_update(role_8_db)
self.roles['action_action_create_grant'] = role_8_db
# Custom role - "action_all" grant on action_1
grant_db = PermissionGrantDB(resource_uid=self.resources['action_1'].get_uid(),
resource_type=ResourceType.ACTION,
permission_types=[PermissionType.ACTION_ALL])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_9_db = RoleDB(name='action_action_all_grant',
permission_grants=permission_grants)
role_9_db = Role.add_or_update(role_9_db)
self.roles['action_action_all_grant'] = role_9_db
# Custom role - "action_list" grant
grant_db = PermissionGrantDB(resource_uid=None,
resource_type=None,
permission_types=[PermissionType.ACTION_LIST])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_10_db = RoleDB(name='custom_role_action_list_grant',
permission_grants=permission_grants)
role_10_db = Role.add_or_update(role_10_db)
self.roles['custom_role_action_list_grant'] = role_10_db
# Create some mock role assignments
user_db = self.users['custom_role_action_pack_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['custom_role_action_pack_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['custom_role_action_grant']
role_assignment_db = UserRoleAssignmentDB(user=user_db.name,
role=self.roles['custom_role_action_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['custom_role_pack_action_all_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['custom_role_pack_action_all_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['custom_role_action_all_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['custom_role_action_all_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['custom_role_action_execute_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['custom_role_action_execute_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['action_pack_action_create_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['action_pack_action_create_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['action_pack_action_all_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['action_pack_action_all_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['action_action_create_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['action_action_create_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['action_action_all_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['action_action_all_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['custom_role_action_list_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['custom_role_action_list_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
def test_user_has_permission(self):
resolver = ActionPermissionsResolver()
# Admin user, should always return true
user_db = self.users['admin']
self.assertTrue(resolver.user_has_permission(user_db=user_db,
permission_type=PermissionType.ACTION_LIST))
# Observer, should always return true for VIEW permissions
user_db = self.users['observer']
self.assertTrue(resolver.user_has_permission(user_db=user_db,
permission_type=PermissionType.ACTION_LIST))
# No roles, should return false for everything
user_db = self.users['no_roles']
self.assertFalse(resolver.user_has_permission(user_db=user_db,
permission_type=PermissionType.ACTION_LIST))
# Custom role with no permission grants, should return false for everything
user_db = self.users['1_custom_role_no_permissions']
self.assertFalse(resolver.user_has_permission(user_db=user_db,
permission_type=PermissionType.ACTION_LIST))
# Custom role with "action_list" grant
user_db = self.users['custom_role_action_list_grant']
self.assertTrue(resolver.user_has_permission(user_db=user_db,
permission_type=PermissionType.ACTION_LIST))
def test_user_has_resource_api_permission(self):
resolver = ActionPermissionsResolver()
# Admin user, should always return true
user_db = self.users['admin']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertTrue(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
# Observer, should return false
user_db = self.users['observer']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertFalse(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
# No roles, should return false
user_db = self.users['no_roles']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertFalse(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
# Custom role with no permission grants, should return false
user_db = self.users['1_custom_role_no_permissions']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertFalse(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
# Custom role with "action_create" grant on parent pack
user_db = self.users['action_pack_action_create_grant']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertTrue(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
# Custom role with "action_all" grant on the parent pack
user_db = self.users['action_pack_action_all_grant']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertTrue(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
# Custom role with "action_create" grant directly on the resource
user_db = self.users['action_action_create_grant']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertTrue(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
# Custom role with "action_all" grant directly on the resource
user_db = self.users['action_action_all_grant']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertTrue(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
def test_user_has_resource_db_permission(self):
resolver = ActionPermissionsResolver()
all_permission_types = PermissionType.get_valid_permissions_for_resource_type(
ResourceType.ACTION)
# Admin user, should always return true
resource_db = self.resources['action_1']
user_db = self.users['admin']
self.assertTrue(self._user_has_resource_db_permissions(
resolver=resolver,
user_db=user_db,
resource_db=resource_db,
permission_types=all_permission_types))
# Observer, should always return true for VIEW permission
user_db = self.users['observer']
self.assertTrue(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_1'],
permission_type=PermissionType.ACTION_VIEW))
self.assertTrue(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_2'],
permission_type=PermissionType.ACTION_VIEW))
self.assertFalse(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_1'],
permission_type=PermissionType.ACTION_MODIFY))
self.assertFalse(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_2'],
permission_type=PermissionType.ACTION_DELETE))
# No roles, should return false for everything
user_db = self.users['no_roles']
self.assertFalse(self._user_has_resource_db_permissions(
resolver=resolver,
user_db=user_db,
resource_db=resource_db,
permission_types=all_permission_types))
# Custom role with no permission grants, should return false for everything
user_db = self.users['1_custom_role_no_permissions']
self.assertFalse(self._user_has_resource_db_permissions(
resolver=resolver,
user_db=user_db,
resource_db=resource_db,
permission_types=all_permission_types))
# Custom role with unrelated permission grant to parent pack
user_db = self.users['custom_role_pack_grant']
self.assertFalse(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_1'],
permission_type=PermissionType.ACTION_VIEW))
self.assertFalse(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_1'],
permission_type=PermissionType.ACTION_EXECUTE))
# Custom role with with grant on the parent pack
user_db = self.users['custom_role_action_pack_grant']
self.assertTrue(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_1'],
permission_type=PermissionType.ACTION_VIEW))
self.assertTrue(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_2'],
permission_type=PermissionType.ACTION_VIEW))
self.assertFalse(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_2'],
permission_type=PermissionType.ACTION_EXECUTE))
# Custom role with a direct grant on action
user_db = self.users['custom_role_action_grant']
self.assertTrue(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_3'],
permission_type=PermissionType.ACTION_VIEW))
self.assertFalse(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_2'],
permission_type=PermissionType.ACTION_EXECUTE))
self.assertFalse(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_3'],
permission_type=PermissionType.ACTION_EXECUTE))
# Custom role - "action_all" grant on the action parent pack
user_db = self.users['custom_role_pack_action_all_grant']
resource_db = self.resources['action_1']
self.assertTrue(self._user_has_resource_db_permissions(
resolver=resolver,
user_db=user_db,
resource_db=resource_db,
permission_types=all_permission_types))
# Custom role - "action_all" grant on the action
user_db = self.users['custom_role_action_all_grant']
resource_db = self.resources['action_1']
self.assertTrue(self._user_has_resource_db_permissions(
resolver=resolver,
user_db=user_db,
resource_db=resource_db,
permission_types=all_permission_types))
# Custom role - "action_execute" grant on action_1
user_db = self.users['custom_role_action_execute_grant']
resource_db = self.resources['action_1']
self.assertTrue(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=resource_db,
permission_type=PermissionType.ACTION_EXECUTE))
# "execute" also grants "view"
self.assertTrue(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=resource_db,
permission_type=PermissionType.ACTION_VIEW))
permission_types = [
PermissionType.ACTION_CREATE,
PermissionType.ACTION_MODIFY,
PermissionType.ACTION_DELETE
]
self.assertFalse(self._user_has_resource_db_permissions(
resolver=resolver,
user_db=user_db,
resource_db=resource_db,
permission_types=permission_types))
| 47.4294 | 100 | 0.67591 | 2,973 | 24,521 | 5.169526 | 0.060209 | 0.042553 | 0.045806 | 0.032208 | 0.87221 | 0.838506 | 0.77806 | 0.740842 | 0.688204 | 0.630295 | 0 | 0.009483 | 0.24738 | 24,521 | 516 | 101 | 47.521318 | 0.8233 | 0.106032 | 0 | 0.695313 | 0 | 0 | 0.116436 | 0.095628 | 0 | 0 | 0 | 0 | 0.085938 | 1 | 0.010417 | false | 0 | 0.039063 | 0 | 0.052083 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
9c11133a7bd2adf8eb0405725ed7f6a72268c5df | 13,939 | py | Python | lib/medzoo/Densenet3D.py | McMasterAI/RadiologyandAI-MedicalZooPytorch | 606a1654f08b8bae7c265608694d55fecc1001ed | [
"MIT"
] | 995 | 2019-07-23T11:34:22.000Z | 2022-03-30T21:10:52.000Z | lib/medzoo/Densenet3D.py | pyushkevich/MedicalZooPytorch | c6831d8ddebfbc1b33c04f8cec0d01c2ceb828f6 | [
"MIT"
] | 18 | 2020-04-27T03:38:22.000Z | 2022-01-18T20:55:20.000Z | lib/medzoo/Densenet3D.py | pyushkevich/MedicalZooPytorch | c6831d8ddebfbc1b33c04f8cec0d01c2ceb828f6 | [
"MIT"
] | 209 | 2019-08-21T13:41:13.000Z | 2022-03-30T08:01:52.000Z | import torch.nn as nn
import torch
import torch.nn.functional as F
from torchsummary import summary
from lib.medzoo.BaseModelClass import BaseModel
"""
Implementations based on the HyperDenseNet paper: https://arxiv.org/pdf/1804.02967.pdf
"""
class _HyperDenseLayer(nn.Sequential):
def __init__(self, num_input_features, num_output_channels, drop_rate):
super(_HyperDenseLayer, self).__init__()
self.add_module('norm1', nn.BatchNorm3d(num_input_features)),
self.add_module('relu1', nn.ReLU(inplace=True)),
self.add_module('conv1', nn.Conv3d(num_input_features,
num_output_channels, kernel_size=3, stride=1, padding=1,
bias=False)),
self.drop_rate = drop_rate
def forward(self, x):
new_features = super(_HyperDenseLayer, self).forward(x)
if self.drop_rate > 0:
new_features = F.dropout(new_features, p=self.drop_rate,
training=self.training)
return torch.cat([x, new_features], 1)
class _HyperDenseBlock(nn.Sequential):
"""
Constructs a series of dense-layers based on in and out kernels list
"""
def __init__(self, num_input_features, drop_rate):
super(_HyperDenseBlock, self).__init__()
out_kernels = [1, 25, 25, 25, 50, 50, 50, 75, 75, 75]
self.number_of_conv_layers = 9
in_kernels = [num_input_features]
for j in range(1, len(out_kernels)):
temp = in_kernels[-1]
in_kernels.append(temp + out_kernels[j])
print("out:", out_kernels)
print("in:", in_kernels)
for i in range(self.number_of_conv_layers):
layer = _HyperDenseLayer(in_kernels[i], out_kernels[i + 1], drop_rate)
self.add_module('denselayer%d' % (i + 1), layer)
class _HyperDenseBlockEarlyFusion(nn.Sequential):
def __init__(self, num_input_features, drop_rate):
super(_HyperDenseBlockEarlyFusion, self).__init__()
out_kernels = [1, 25, 25, 50, 50, 50, 75, 75, 75]
self.number_of_conv_layers = 8
in_kernels = [num_input_features]
for j in range(1, len(out_kernels)):
temp = in_kernels[-1]
in_kernels.append(temp + out_kernels[j])
print("out:", out_kernels)
print("in:", in_kernels)
for i in range(self.number_of_conv_layers):
layer = _HyperDenseLayer(in_kernels[i], out_kernels[i + 1], drop_rate)
self.add_module('denselayer%d' % (i + 1), layer)
class SinglePathDenseNet(BaseModel):
def __init__(self, in_channels, classes=4, drop_rate=0.1, return_logits=True, early_fusion=False):
super(SinglePathDenseNet, self).__init__()
self.return_logits = return_logits
self.features = nn.Sequential()
self.num_classes = classes
self.input_channels = in_channels
if early_fusion:
block = _HyperDenseBlockEarlyFusion(num_input_features=in_channels, drop_rate=drop_rate)
if in_channels == 52:
total_conv_channels = 477
else:
if in_channels == 3:
total_conv_channels = 426
else:
total_conv_channels = 503
else:
block = _HyperDenseBlock(num_input_features=in_channels, drop_rate=drop_rate)
if in_channels == 2:
total_conv_channels = 452
else:
total_conv_channels = 451
self.features.add_module('denseblock1', block)
self.features.add_module('conv1x1_1', nn.Conv3d(total_conv_channels,
400, kernel_size=1, stride=1, padding=0,
bias=False))
self.features.add_module('drop_1', nn.Dropout(p=0.5))
self.features.add_module('conv1x1_2', nn.Conv3d(400,
200, kernel_size=1, stride=1, padding=0,
bias=False))
self.features.add_module('drop_2', nn.Dropout(p=0.5))
self.features.add_module('conv1x1_3', nn.Conv3d(200,
150, kernel_size=1, stride=1, padding=0,
bias=False))
self.features.add_module('drop_3', nn.Dropout(p=0.5))
self.classifier = nn.Sequential()
self.classifier.add_module('classifier', nn.Conv3d(150,
self.num_classes, kernel_size=1, stride=1, padding=0,
bias=False))
def forward(self, x):
features = self.features(x)
if self.return_logits:
out = self.classifier(features)
return out
else:
return features
def test(self,device='cpu'):
input_tensor = torch.rand(1, self.input_channels, 12, 12, 12)
ideal_out = torch.rand(1, self.num_classes, 12, 12, 12)
out = self.forward(input_tensor)
assert ideal_out.shape == out.shape
summary(self.to(torch.device(device)), (self.input_channels, 12, 12, 12),device=device)
# import torchsummaryX
# torchsummaryX.summary(self, input_tensor.to(device))
print("DenseNet3D-1 test is complete")
class DualPathDenseNet(BaseModel):
def __init__(self, in_channels, classes=4, drop_rate=0, fusion='concat'):
"""
2-stream and 3-stream implementation with late fusion
:param in_channels: 2 or 3 (dual or triple path based on paper specifications).
Channels are the input modalities i.e T1,T2 etc..
:param drop_rate: dropout rate for dense layers
:param classes: number of classes to segment
:param fusion: 'concat or 'sum'
"""
super(DualPathDenseNet, self).__init__()
self.input_channels = in_channels
self.num_classes = classes
self.fusion = fusion
if self.fusion == "concat":
in_classifier_channels = self.input_channels * 150
else:
in_classifier_channels = 150
if self.input_channels == 2:
# here!!!!
self.stream_1 = SinglePathDenseNet(in_channels=1, drop_rate=drop_rate, classes=classes,
return_logits=False, early_fusion=True)
self.stream_2 = SinglePathDenseNet(in_channels=1, drop_rate=drop_rate, classes=classes,
return_logits=False, early_fusion=True)
if self.input_channels == 3:
self.stream_1 = SinglePathDenseNet(in_channels=1, drop_rate=drop_rate, classes=classes,
return_logits=False)
self.stream_2 = SinglePathDenseNet(in_channels=1, drop_rate=drop_rate, classes=classes,
return_logits=False)
self.stream_3 = SinglePathDenseNet(in_channels=1, drop_rate=drop_rate, classes=classes,
return_logits=False)
self.classifier = nn.Sequential()
self.classifier.add_module('classifier', nn.Conv3d(in_classifier_channels,
classes, kernel_size=1, stride=1, padding=0,
bias=False))
def forward(self, multi_channel_medical_img):
"""
:param multi_channel_medical_img: shape of [batch, input_channels, height, width, depth]
:return: late fusion classification predictions
"""
channels = multi_channel_medical_img.shape[1]
if channels != self.input_channels:
print("Network channels does not match input channels, check your model/input!")
return None
else:
if self.input_channels == 2:
in_stream_1 = multi_channel_medical_img[:, 0, ...].unsqueeze(dim=1)
in_stream_2 = multi_channel_medical_img[:, 1, ...].unsqueeze(dim=1)
output_features_t1 = self.stream_1(in_stream_1)
output_features_t2 = self.stream_2(in_stream_2)
if self.fusion == 'concat':
concat_features = torch.cat((output_features_t1, output_features_t2), dim=1)
return self.classifier(concat_features)
else:
features = output_features_t1 + output_features_t2
return self.classifier(features)
elif self.input_channels == 3:
in_stream_1 = multi_channel_medical_img[:, 0, ...].unsqueeze(dim=1)
in_stream_2 = multi_channel_medical_img[:, 1, ...].unsqueeze(dim=1)
in_stream_3 = multi_channel_medical_img[:, 2, ...].unsqueeze(dim=1)
output_features_t1 = self.stream_1(in_stream_1)
output_features_t2 = self.stream_2(in_stream_2)
output_features_t3 = self.stream_3(in_stream_3)
if self.fusion == 'concat':
concat_features = torch.cat((output_features_t1, output_features_t2, output_features_t3), dim=1)
return self.classifier(concat_features)
else:
features = output_features_t1 + output_features_t2 + output_features_t3
return self.classifier(features)
def test(self,device='cpu'):
input_tensor = torch.rand(1, self.input_channels, 12, 12, 12)
ideal_out = torch.rand(1, self.num_classes, 12, 12, 12)
out = self.forward(input_tensor)
assert ideal_out.shape == out.shape
summary(self.to(torch.device(device)), (self.input_channels, 12, 12, 12),device=device)
import torchsummaryX
torchsummaryX.summary(self, input_tensor.to(device))
print("DenseNet3D-2 test is complete!!!!\n\n\n\n\n")
class DualSingleDenseNet(BaseModel):
"""
2-stream and 3-stream implementation with early fusion
dual-single-densenet OR Disentangled modalities with early fusion in the paper
"""
def __init__(self, in_channels, classes=4, drop_rate=0.5,):
"""
:param input_channels: 2 or 3 (dual or triple path based on paper specifications).
Channels are the input modalities i.e T1,T2 etc..
:param drop_rate: dropout rate for dense layers
:param classes: number of classes to segment
:param fusion: 'concat or 'sum'
"""
super(DualSingleDenseNet, self).__init__()
self.input_channels = in_channels
self.num_classes = classes
if self.input_channels == 2:
self.early_conv_1 = _HyperDenseLayer(num_input_features=1, num_output_channels=25, drop_rate=drop_rate)
self.early_conv_2 = _HyperDenseLayer(num_input_features=1, num_output_channels=25, drop_rate=drop_rate)
single_path_channels = 52
self.stream_1 = SinglePathDenseNet(in_channels=single_path_channels, drop_rate=drop_rate,
classes=classes, return_logits=True, early_fusion=True)
self.classifier = nn.Sequential()
if self.input_channels == 3:
self.early_conv_1 = _HyperDenseLayer(num_input_features=1, num_output_channels=25, drop_rate=0)
self.early_conv_2 = _HyperDenseLayer(num_input_features=1, num_output_channels=25, drop_rate=0)
self.early_conv_3 = _HyperDenseLayer(num_input_features=1, num_output_channels=25, drop_rate=0)
single_path_channels = 78
self.stream_1 = SinglePathDenseNet(in_channels=single_path_channels, drop_rate=drop_rate,
classes=classes, return_logits=True, early_fusion=True)
def forward(self, multi_channel_medical_img):
"""
:param multi_channel_medical_img: shape of [batch, input_channels, height, width, depth]
:return: late fusion classification predictions
"""
channels = multi_channel_medical_img.shape[1]
if channels != self.input_channels:
print("Network channels does not match input channels, check your model/input!")
return None
else:
if self.input_channels == 2:
in_1 = multi_channel_medical_img[:, 0, ...].unsqueeze(dim=1)
in_2 = multi_channel_medical_img[:, 1, ...].unsqueeze(dim=1)
y1 = self.early_conv_1(in_1)
y2 = self.early_conv_1(in_2)
print(y1.shape)
print(y2.shape)
in_stream = torch.cat((y1, y2), dim=1)
logits = self.stream_1(in_stream)
return logits
elif self.input_channels == 3:
in_1 = multi_channel_medical_img[:, 0, ...].unsqueeze(dim=1)
in_2 = multi_channel_medical_img[:, 1, ...].unsqueeze(dim=1)
in_3 = multi_channel_medical_img[:, 2, ...].unsqueeze(dim=1)
y1 = self.early_conv_1(in_1)
y2 = self.early_conv_2(in_2)
y3 = self.early_conv_3(in_3)
in_stream = torch.cat((y1, y2, y3), dim=1)
logits = self.stream_1(in_stream)
return logits
def test(self,device='cpu'):
input_tensor = torch.rand(1, self.input_channels, 12, 12, 12)
ideal_out = torch.rand(1, self.num_classes, 12, 12, 12)
out = self.forward(input_tensor)
assert ideal_out.shape == out.shape
summary(self.to(torch.device(device)), (self.input_channels, 12, 12, 12),device=device)
# import torchsummaryX
# torchsummaryX.summary(self, input_tensor.to(device))
print("DenseNet3D-3 test is complete\n\n")
| 44.964516 | 116 | 0.59581 | 1,667 | 13,939 | 4.703659 | 0.112178 | 0.039791 | 0.043362 | 0.044892 | 0.773116 | 0.749139 | 0.725418 | 0.711389 | 0.704885 | 0.684479 | 0 | 0.037867 | 0.308487 | 13,939 | 309 | 117 | 45.110032 | 0.775599 | 0.086305 | 0 | 0.545872 | 0 | 0 | 0.032925 | 0.001771 | 0 | 0 | 0 | 0 | 0.013761 | 1 | 0.059633 | false | 0 | 0.027523 | 0 | 0.165138 | 0.050459 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
9c1f21d0a94ba3cf0f4d0ab860318fdf07d8cb41 | 47,740 | py | Python | mainboard.py | mikey787/Michael | 452a09197504fdd6738175fc3fcea896e94e35ed | [
"MIT"
] | 17 | 2018-06-08T16:51:44.000Z | 2022-03-21T13:45:58.000Z | mainboard.py | mikey787/Michael | 452a09197504fdd6738175fc3fcea896e94e35ed | [
"MIT"
] | 1 | 2020-05-11T17:12:52.000Z | 2020-05-11T17:12:52.000Z | mainboard.py | mikey787/Michael | 452a09197504fdd6738175fc3fcea896e94e35ed | [
"MIT"
] | 10 | 2019-10-25T11:10:13.000Z | 2022-02-05T08:21:41.000Z | import pygame,time
import functions,Property,player,firstpage
pygame.init()
#initialising all the hard coding values
display_width = 1430
display_height = 800
card_length = 130
card_breadth = 60
blockl = 120
blockh = 50
boxl = 350
boxb = 215
gapv = (display_height - 2*boxl)/3
gaph = (display_width - display_height - 2*boxb)/3
#initialising all the colour with the respective RGB values
white = (255,255,255)
black = (0,0,0)
yellow = (255,255,0)
red = (200,0,0)
blue = (0,0,255)
green = (0,150,0)
lblue = (0,0,100)
llblue = (0,160,160)
maroon = (100,10,100)
grey = (160,160,160)
orange = (228,142,88)
#initialising all the checkpoints used throughout the program
player_index = 0
rollonce = 0
card_display = 0
endturn = 0
key = 0
place = " "
timer = 8
n=0
incometax = 0
gotojail = 0
cround = [0,0]
round_complete = 0
spcard_display = 0
railway = 0
rent = 0
rolloncejail = 0
temporary = 0
chance = 0
comm = 0
gameover = 0
timerr = 8
risk = 0
__font = pygame.font.Font('freesansbold.ttf',15)
clock = pygame.time.Clock()
#the main funtion
def mainscreen():
gameExit = False
while not gameExit:
for event in pygame.event.get():
if event.type == pygame.QUIT:
gameExit = True
#updating the screen again and again
drawing()
clock.tick(40)
#this sketches the screen
def drawing():
#global variables called
global key,timer,incometax,gotojail,round_complete,cround,rent,railway,temporary,chance,rollonce,endturn,timerr,risk
functions.gameDisplay.fill(lblue)
pygame.draw.rect(functions.gameDisplay, white, [0,0,display_height,display_height])
functions.addimage('images/image.png',1100,150)
pygame.draw.rect(functions.gameDisplay, black, [card_length,card_length,display_height-2*card_length,display_height - 2*card_length])
_font = pygame.font.Font('freesansbold.ttf',20)
pygame.draw.rect(functions.gameDisplay,white, [display_height + gaph,gapv,boxb,boxl])
pygame.draw.rect(functions.gameDisplay,white, [display_height + gaph,boxl + 2*gapv,boxb,boxl])
#board sketched
functions.addimage('images/go.png',display_height-card_length,display_height-card_length)
functions.addimage('images/gotojail.png',display_height-card_length,0)
functions.addimage('images/parking.png',0,0)
functions.addimage('images/jail.png',0,display_height-card_length)
functions.addimage('images/chance1.png',card_length+2*card_breadth,display_height-card_length)
functions.addimage('images/chance3.png',display_height-card_length,card_length+5*card_breadth)
functions.addimage('images/chance2.png',card_breadth+card_length,0)
functions.addimage('images/commChest1.png',7*card_breadth+card_length,display_height-card_length)
functions.addimage('images/commChest2.png',0,2*card_breadth+card_length)
functions.addimage('images/commChest2.png',display_height-card_length,2*card_breadth+card_length)
functions.addimage('images/water.png',7*card_breadth+card_length,0)
functions.addimage('images/elec.png',0,7*card_breadth+card_length)
functions.addimage('images/luxury.png',display_height-card_length,7*card_breadth+card_length)
functions.addimage('images/income.png',card_length+5*card_breadth,display_height-card_length)
#sketching dice
functions.text_in_box("Player %r's turn "%(player_index+1),_font,blue,card_length,card_length,(display_height-2*card_length)/2,blockh)
if functions.a == 1:
functions.addimage('images/dice1.png',display_height/2 - 30,card_length + 10)
if functions.a == 2:
functions.addimage('images/dice2.png',display_height/2 - 30,card_length + 10)
if functions.a == 3:
functions.addimage('images/dice3.png',display_height/2 - 30,card_length + 10)
if functions.a == 4:
functions.addimage('images/dice4.png',display_height/2 - 30,card_length + 10)
if functions.a == 5:
functions.addimage('images/dice5.png',display_height/2 - 30,card_length + 10)
if functions.a == 6:
functions.addimage('images/dice6.png',display_height/2 - 30,card_length + 10)
if functions.b== 1:
functions.addimage('images/dice1.png',display_height/2 + 10,card_length + 10)
if functions.b == 2:
functions.addimage('images/dice2.png',display_height/2 + 10,card_length + 10)
if functions.b == 3:
functions.addimage('images/dice3.png',display_height/2 + 10,card_length + 10)
if functions.b == 4:
functions.addimage('images/dice4.png',display_height/2 + 10,card_length + 10)
if functions.b == 5:
functions.addimage('images/dice5.png',display_height/2 + 10,card_length + 10)
if functions.b == 6:
functions.addimage('images/dice6.png',display_height/2 + 10,card_length + 10)
#sketching buttons
Button("ROLL DICE",(display_height-blockl)/2,(display_height/2+card_length)/2 - 1.25*blockh,blockl,blockh,yellow,llblue,"roll",red)
Button("MORTGAGE",(display_height-blockl-card_length),(+card_length),blockl,blockh,red,llblue,"mort",yellow)
Button("END TURN",(display_height-blockl)/2,(display_height/2+card_length)/2 + 0.25*blockh,blockl,blockh,yellow,llblue,"endturn",red)
Button("BUILD",(display_height-3*blockl)/2 - 0.2*blockl,(display_height/2+card_length)/2 - 0.5*blockh,blockl,blockh,yellow,llblue,"build",red)
Button("SELL",(display_height+1*blockl)/2 + 0.2*blockl,(display_height/2+card_length)/2 - 0.5*blockh,blockl,blockh,yellow,llblue,"sell",red)
#sketching properties on board
Property._property["delhi"].locmaker()
Property._property["mumbai"].locmaker()
Property._property["banglore"].locmaker()
Property._property["newyork"].locmaker()
Property._property["washingtondc"].locmaker()
Property._property["sanfrancisco"].locmaker()
Property._property["london"].locmaker()
Property._property["manchester"].locmaker()
Property._property["oxford"].locmaker()
Property._property["melbourne"].locmaker()
Property._property["canberra"].locmaker()
Property._property["sydney"].locmaker()
Property._property["tokyo"].locmaker()
Property._property["osaka"].locmaker()
Property._property["hiroshima"].locmaker()
Property._property["beijing"].locmaker()
Property._property["hongkong"].locmaker()
Property._property["shanghai"].locmaker()
Property._property["moscow"].locmaker()
Property._property["saintpetersburg"].locmaker()
Property._property["capetown"].locmaker()
Property._property["durban"].locmaker()
Property.sproperty["rail1"].locmaker()
Property.sproperty["rail2"].locmaker()
Property.sproperty["rail3"].locmaker()
Property.sproperty["rail4"].locmaker()
_font_ = pygame.font.Font('freesansbold.ttf',50)
#checking if someone reached winning amount
if player.player[0].total_wealth >= firstpage.p[0].winamount or player.player[1].total_wealth >= firstpage.p[0].winamount:
rollonce = 1
endturn = 1
if player.player[0].total_wealth >= firstpage.p[0].winamount:
functions.gameDisplay.fill(black)
functions.text_in_box("%r Won...Congratulations!!!"%firstpage.p[0].name,_font_,orange,card_length,(display_height/2+card_length)/2 + 1.25*blockh,display_height- 2*card_length,display_height/2 - ((display_height/2+card_length)/2 + 1.25*blockh))
if player.player[1].total_wealth >= firstpage.p[0].winamount:
functions.gameDisplay.fill(black)
functions.text_in_box("%r Won...Congratulations!!!"%firstpage.p[1].name,_font_,orange,card_length,(display_height/2+card_length)/2 + 1.25*blockh,display_height- 2*card_length,display_height/2 - ((display_height/2+card_length)/2 + 1.25*blockh))
if player.player[0].total_wealth <=0:
functions.gameDisplay.fill(black)
functions.text_in_box("%r Won...Congratulations!!!"%firstpage.p[1].name,_font_,orange,card_length,(display_height/2+card_length)/2 + 1.25*blockh,display_height- 2*card_length,display_height/2 - ((display_height/2+card_length)/2 + 1.25*blockh))
if player.player[1].total_wealth <=0:
functions.gameDisplay.fill(black)
functions.text_in_box("%r Won...Congratulations!!!"%firstpage.p[0].name,_font_,orange,card_length,(display_height/2+card_length)/2 + 1.25*blockh,display_height- 2*card_length,display_height/2 - ((display_height/2+card_length)/2 + 1.25*blockh))
#checking if someone cash <=0
if player.player[player_index].cash < 0:
functions.text_in_box("%r ,You are lack of cash, sell your properties/houses"%firstpage.p[player_index].name,_font,orange,card_length,(display_height/2+card_length)/2 + 1.25*blockh,display_height- 2*card_length,display_height/2 - ((display_height/2+card_length)/2 + 1.25*blockh))
risk = 1
endturn = 1
if risk == 1 and player.player[player_index].cash > 0:
endturn = 0
if round_complete == 1:
functions.text_in_box("You Crossed Go , You gained $20000",__font,orange,card_length,(display_height/2+card_length)/2 + 1.25*blockh,display_height- 2*card_length,display_height/2 - ((display_height/2+card_length)/2 + 1.25*blockh))
if timerr == 8:
player.player[player_index].cash += 20000
player.player[player_index].total_wealth += 20000
cround[player_index]-=40
timerr-=1
if timerr == 0:
round_complete = 0
timerr = 8
#running different functions based on the call
if spcard_display == 1:
spcard_displayy()
if railway == 1:
railways()
if card_display == 1:
Prop()
if chance == 1:
Chance()
if comm == 1:
CommChest()
if incometax == 1:
if key == 0:
player.player[player_index].total_wealth = 0.9*player.player[player_index].total_wealth
player.player[player_index].cash = 0.9*(player.player[player_index].total_wealth*10/9)
key = 2
if key == 2:
timer-=1
functions.text_in_box("You paid income tax of %r"%(0.1*(player.player[player_index].total_wealth*10/9)),__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
if timer == 0:
incometax = 0
timer = 8
key = 1
elif incometax == 2:
if key == 0:
player.player[player_index].total_wealth -= 30000
player.player[player_index].cash -= 30000
key = 2
if key == 2:
timer-=1
functions.text_in_box("You paid luxury tax of $30000",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
if timer == 0:
incometax = 0
timer = 8
key = 1
player.player[1].draw()
player.player[0].draw()
#sketching the players boxes
for item,tempo in Property._property.items():
Property._property[item].squares()
Property.sproperty["electric"].squares()
Property.sproperty["water"].squares()
Property.sproperty["rail1"].squares()
Property.sproperty["rail2"].squares()
Property.sproperty["rail3"].squares()
Property.sproperty["rail4"].squares()
if Property.tflag == 1:
if Property.temo == Property.sproperty["rail1"] or Property.temo == Property.sproperty["rail2"] or Property.temo == Property.sproperty["rail3"] or Property.temo == Property.sproperty["rail4"]:
Property.temo.rcard()
else :
Property.temo.card()
if gotojail == 1:
GoToJail()
functions.text_in_box(firstpage.p[0].name,_font,maroon,display_height + gaph,gapv,boxb,0.1*boxl)
functions.text_in_box("Cash %r"%player.player[0].cash,_font,maroon,display_height + gaph,gapv + 0.1*boxl ,boxb,0.1*boxl)
functions.text_in_box("Net Worth %r"%player.player[0].total_wealth,_font,maroon,display_height + gaph,gapv+0.9*boxl,boxb,0.1*boxl)
functions.text_in_box(firstpage.p[1].name,_font,maroon,display_height + gaph,2*gapv + boxl,boxb,0.1*boxl)
functions.text_in_box("Cash %r"%player.player[1].cash,_font,maroon,display_height + gaph,2*gapv+boxl + 0.1*boxl ,boxb,0.1*boxl)
functions.text_in_box("Net Worth %r"%player.player[1].total_wealth,_font,maroon,display_height + gaph,2*gapv+boxl+0.9*boxl,boxb,0.1*boxl)
pygame.display.update()
def Button(msg,x,y,l,h,ac,ic,function,tc): #for drawing buttons
global player_index,place,card_display,spcard_display,railway,rolloncejail,temporary,timer,timerr
global rollonce,endturn,key,n,incometax,gotojail,cround,round_complete
pygame.draw.rect(functions.gameDisplay, ic, [x,y,l,h])
mouse = pygame.mouse.get_pos()
click = pygame.mouse.get_pressed()
if x < mouse[0] < x+l and y < mouse[1] < y+h:
pygame.draw.rect(functions.gameDisplay, ac, [x,y,l,h])
if click[0]==1: #checking if pressed
if function == "roll":
if gotojail == 0 and player.player[player_index].released == 1 and rollonce == 0: #diff working done based on the button pressed
n = functions.rolldice()
cround[player_index] += n
player.player[player_index].movement(n)
rollonce = 1
working()
if gotojail == 1 and player.player[player_index].released == 0 and key == 3 and rolloncejail == 0:
n = functions.rolldice()
if functions.a == functions.b:
player.player[player_index].released = 1
rolloncejail = 1
key = 4
endturn = 0
if function == "endturn" and endturn == 0: #if endturn pressed
if player_index == 0:
player_index+=1
elif player_index == 1:
player_index-=1
if player.player[player_index].released == 0:
gotojail = 1
rollonce = 0
card_display = 0
endturn = 1
Property.tflag = 0
spcard_display = 0
railway = 0
timer = 8
timerr = 8
if function == "yes":
player.player[player_index].cash -= Property._property[place].cost
Property._property[place].owner = player_index
player.player[player_index].properties.append(place)
key = 2
if function == "Yes":
player.player[player_index].cash -= Property.sproperty[place].cost
Property.sproperty[place].owner = player_index
key = 2
if function == "YeS":
player.player[player_index].cash -= Property.sproperty[place].cost
Property.sproperty[place].owner = player_index
player.player[player_index].no_of_railways += 1
key = 2
if function == "no":
card_display = 0
spcard_display = 0
railway = 0
if function == "mort":
vvalid = 1
if Property.temo.owner != player_index or Property.temo.no_of_houses > 0:
vvalid =0
for xplace,tempo in Property._property.items():
if Property._property[xplace].country == Property.temo.country:
if Property._property[xplace].no_of_houses > 0 :
vvalid = 0
break
if vvalid ==1:
Property.temo.owner = None
player.player[player_index].cash += Property.temo.mortgage
player.player[player_index].total_wealth += Property.temo.mortgage
player.player[player_index].total_wealth -= Property.temo.cost
if function == "build":
valid = 1
if Property.temo.owner != player_index or Property.temo.no_of_houses == 4:
valid = 0
for xplace,tempo in Property._property.items():
if Property._property[xplace].country == Property.temo.country:
if (Property._property[xplace].no_of_houses < Property.temo.no_of_houses) or Property._property[xplace].owner != player_index:
valid = 0
break
if valid == 1:
Property.temo.no_of_houses += 1
player.player[player_index].cash -= Property.temo.cost
if function == "sell":
valid = 1
if Property.temo.owner != player_index or Property.temo.no_of_houses == 0:
valida = 0
for xplace,tempo in Property._property.items():
if Property._property[xplace].country == Property.temo.country:
if (Property._property[xplace].no_of_houses > Property.temo.no_of_houses) or Property._property[xplace].owner != player_index:
valida = 0
break
if valida == 1:
Property.temo.no_of_houses -= 1
player.player[player_index].cash += 0.5*Property.temo.cost
if function == "roll_for_double":
key = 3
rolloncejail = 0
if function == "pay":
key = 5
player.player[player_index].cash -= 5000
player.player[player_index].total_wealth -= 5000
player.player[player_index].released = 1
endturn = 0
_font = pygame.font.Font('freesansbold.ttf',20)
functions.text_in_box(msg, _font,tc,x,y,l,h)
def working(): #decides which checkpoints to on based on the players current position
global player_index,place,card_display,spcard_display,railway,rolloncejail,temporary
global rollonce,endturn,key,n,incometax,gotojail,cround,round_complete,comm,chance
if cround[player_index] >= 40:
round_complete = 1
for tplace,tempo in Property._property.items():
if Property._property[tplace].locx == player.player[player_index].posx and Property._property[tplace].locy == player.player[player_index].posy:
card_display = 1
key = 0
place = tplace
if (player.player[player_index].posx == card_length+2.5*card_breadth and display_height-card_length/2 == player.player[player_index].posy) or (player.player[player_index].posx == card_length+1.5*card_breadth and card_length/2 == player.player[player_index].posy) or (player.player[player_index].posx == display_height - card_length/2 and card_length + 5.5*card_breadth == player.player[player_index].posy):
chance = 1
if (player.player[player_index].posx == card_length+7.5*card_breadth and display_height-card_length/2 == player.player[player_index].posy) or (player.player[player_index].posx == card_length/2 and card_length + 2.5*card_breadth == player.player[player_index].posy) or (player.player[player_index].posx == display_height - card_length/2 and card_length + 2.5*card_breadth == player.player[player_index].posy):
comm = 1
if (player.player[player_index].posx == Property.sproperty["electric"].locx and Property.sproperty["electric"].locy == player.player[player_index].posy) or (player.player[player_index].posx == Property.sproperty["water"].locx and Property.sproperty["water"].locy == player.player[player_index].posy):
if player.player[player_index].posx == Property.sproperty["electric"].locx:
place = "electric"
elif player.player[player_index].posx == Property.sproperty["water"].locy:
place = "water"
spcard_display = 1
key = 0
if (player.player[player_index].posx == Property.sproperty["rail1"].locx and Property.sproperty["rail1"].locy == player.player[player_index].posy) or (player.player[player_index].posx == Property.sproperty["rail2"].locx and Property.sproperty["rail2"].locy == player.player[player_index].posy) or (player.player[player_index].posx == Property.sproperty["rail3"].locx and Property.sproperty["rail3"].locy == player.player[player_index].posy) or (player.player[player_index].posx == Property.sproperty["rail4"].locx and Property.sproperty["rail4"].locy == player.player[player_index].posy):
if (player.player[player_index].posx == Property.sproperty["rail1"].locx and Property.sproperty["rail1"].locy == player.player[player_index].posy):
place = "rail1"
elif (player.player[player_index].posx == Property.sproperty["rail2"].locx and Property.sproperty["rail2"].locy == player.player[player_index].posy):
place = "rail2"
elif (player.player[player_index].posx == Property.sproperty["rail3"].locx and Property.sproperty["rail3"].locy == player.player[player_index].posy):
place = "rail3"
elif (player.player[player_index].posx == Property.sproperty["rail4"].locx and Property.sproperty["rail4"].locy == player.player[player_index].posy):
place = "rail4"
railway = 1
key = 0
if player.player[player_index].posx == (card_length+5*card_breadth + 0.5*card_breadth) and player.player[player_index].posy == (display_height-card_length/2):
incometax = 1
key = 0
if player.player[player_index].posx == display_height-card_length/2 and player.player[player_index].posy == 7*card_breadth+card_length+0.5*card_breadth :
incometax = 2
key = 0
if player.player[player_index].posx == display_height-card_length/2 and player.player[player_index].posy == card_length/2:
player.player[player_index].posx = card_length/2
player.player[player_index].posy = display_height-card_length/2
cround[player_index] -= 20
gotojail = 1
key = 0
temporary = 1
endturn = 0
def railways(): #respectve changes on screen if railways if concersnesd
global key,timer,incometax,gotojail,round_complete,cround,rent,railway,temporary
if Property.sproperty[place].owner != None and key == 0 and player_index != Property.sproperty[place].owner:
Property.sproperty[place].rcard()
if timer == 8:
if player.player[Property.sproperty[place].owner].no_of_railways == 1:
rent = 2500
if player.player[Property.sproperty[place].owner].no_of_railways == 2:
rent = 5000
if player.player[Property.sproperty[place].owner].no_of_railways == 3:
rent = 10000
if player.player[Property.sproperty[place].owner].no_of_railways == 4:
rent = 20000
player.player[player_index].cash -= rent
player.player[player_index].total_wealth -= rent
player.player[Property.sproperty[place].owner].cash += rent
player.player[Property.sproperty[place].owner].total_wealth += rent
functions.text_in_box("You paid rent of %r to player %r?"%(rent,Property.sproperty[place].owner+1),__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
key = 1
timer = 8
if Property.sproperty[place].owner == None and key == 0:
Property.sproperty[place].rcard()
functions.text_in_box("Do you want to purchase %r ?"%Property.sproperty[place].name,__font,orange,display_height/2,display_height/2 - blockh,display_height/2-card_length,display_height/2-card_length)
Button("YES",display_height*3/4 - card_length/2-blockl,display_height*3/4 - card_length/2 + blockh/2,blockl/2,blockh,yellow,llblue,"YeS",red)
Button("NO",display_height*3/4 - card_length/2 + blockl/2,display_height*3/4 - card_length/2 + blockh/2,blockl/2,blockh,yellow,llblue,"no",red)
if key == 2:
Property.sproperty[place].rcard()
functions.text_in_box("Successfully purchased %r"%(Property.sproperty[place].name),__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
key = 1
timer = 8
def spcard_displayy(): #if playaer lands on utillies then the work happening on screeen
global key,timer,incometax,gotojail,round_complete,cround,rent,railway,temporary
if Property.sproperty[place].owner != None and key == 0 and player_index != Property.sproperty[place].owner:
Property.sproperty[place].card()
if timer == 8:
dice_sum = functions.a + functions.b
if Property.sproperty["electric"].owner == Property.sproperty["water"].owner:
rent = 3000*dice_sum
else:
rent = 1000*dice_sum
player.player[player_index].cash -= rent
player.player[player_index].total_wealth -= rent
player.player[Property.sproperty[place].owner].cash += rent
player.player[Property.sproperty[place].owner].total_wealth += rent
functions.text_in_box("You paid rent of %r to player %r?"%(rent,Property.sproperty[place].owner+1),__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
key = 1
timer = 8
if Property.sproperty[place].owner == None and key == 0:
Property.sproperty[place].card()
functions.text_in_box("Do you want to purchase %r ?"%Property.sproperty[place].name,__font,orange,display_height/2,display_height/2 - blockh,display_height/2-card_length,display_height/2-card_length)
Button("YES",display_height*3/4 - card_length/2-blockl,display_height*3/4 - card_length/2 + blockh/2,blockl/2,blockh,yellow,llblue,"Yes",red)
Button("NO",display_height*3/4 - card_length/2 + blockl/2,display_height*3/4 - card_length/2 + blockh/2,blockl/2,blockh,yellow,llblue,"no",red)
if key == 2:
Property.sproperty[place].card()
functions.text_in_box("Successfully purchased %r"%(Property.sproperty[place].name),__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
key = 1
timer = 8
def GoToJail(): #if the player lands on gotojail
global key,timer,incometax,gotojail,round_complete,cround,rent,railway,temporary
if temporary == 1:
player.player[player_index].released = 0
temporary = 0
if key == 0:
timer -= 2
functions.text_in_box("Alert! You are caught... BUSTED!",__font,orange,card_length,(display_height/2+card_length)/2 + 1.25*blockh,display_height- 2*card_length,display_height/2 - ((display_height/2+card_length)/2 + 1.25*blockh))
if timer == 0:
key = 1
timer = 8
if key == 1 or key == 0:
Button("Pay $5000 and come out",card_length + 0.1*(display_height - 2*card_length),display_height/2,0.8*(display_height - 2*card_length),blockh,yellow,llblue,"pay",red)
Button("Roll dice for a double",card_length + 0.1*(display_height - 2*card_length),display_height/2 + 2*blockh,0.8*(display_height - 2*card_length),blockh,yellow,llblue,"roll_for_double",red)
if key == 3:
functions.text_in_box("Roll your dice once",__font,orange,card_length,(display_height/2+card_length)/2 + 1.25*blockh,display_height- 2*card_length,display_height/2 - ((display_height/2+card_length)/2 + 1.25*blockh))
if key == 4 and player.player[player_index].released == 1:
timer -= 2
functions.text_in_box("Lucky Guy! You are released!",__font,orange,card_length,(display_height/2+card_length)/2 + 1.25*blockh,display_height- 2*card_length,display_height/2 - ((display_height/2+card_length)/2 + 1.25*blockh))
if timer == 0:
key = 1
gotojail = 0
timer = 8
if key == 4 and player.player[player_index].released == 0:
timer -= 2
functions.text_in_box("Better Luck next time!",__font,orange,card_length,(display_height/2+card_length)/2 + 1.25*blockh,display_height- 2*card_length,display_height/2 - ((display_height/2+card_length)/2 + 1.25*blockh))
if timer == 0:
key = 1
gotojail = 0
timer = 8
if key == 5:
timer -= 2
functions.text_in_box("You are released after giving a bail of $5000",__font,orange,card_length,(display_height/2+card_length)/2 + 1.25*blockh,display_height- 2*card_length,display_height/2 - ((display_height/2+card_length)/2 + 1.25*blockh))
if timer == 0:
key = 1
gotojail = 0
timer = 8
def Prop(): #if player lands on a property
global key,timer,incometax,gotojail,round_complete,cround,rent,railway,temporary
if Property._property[place].owner != None and key == 0 and player_index != Property._property[place].owner:
Property._property[place].card()
if timer == 8:
player.player[player_index].cash -= Property._property[place].houses[Property._property[place].no_of_houses]
player.player[player_index].total_wealth -= Property._property[place].houses[Property._property[place].no_of_houses]
player.player[Property._property[place].owner].cash += Property._property[place].houses[Property._property[place].no_of_houses]
player.player[Property._property[place].owner].total_wealth += Property._property[place].houses[Property._property[place].no_of_houses]
functions.text_in_box("You paid rent of %r to player %r?"%(Property._property[place].houses[Property._property[place].no_of_houses],Property._property[place].owner+1),__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
key = 1
timer = 8
if Property._property[place].owner == None and key == 0:
Property._property[place].card()
functions.text_in_box("Do you want to purchase %r ?"%Property._property[place].name,__font,orange,display_height/2,display_height/2 - blockh,display_height/2-card_length,display_height/2-card_length)
Button("YES",display_height*3/4 - card_length/2-blockl,display_height*3/4 - card_length/2 + blockh/2,blockl/2,blockh,yellow,llblue,"yes",red)
Button("NO",display_height*3/4 - card_length/2 + blockl/2,display_height*3/4 - card_length/2 + blockh/2,blockl/2,blockh,yellow,llblue,"no",red)
if key == 2:
Property._property[place].card()
functions.text_in_box("Successfully purchased %r"%(Property._property[place].name),__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
key = 1
timer = 8
def Chance(): #if player lands on chance
global key,timer,incometax,gotojail,round_complete,cround,rent,railway,temporary,chance,timerr
n = functions.a + functions.b
if n == 2:
if timer == 8:
player.player[player_index].posx = display_height - card_length/2
player.player[player_index].posy = display_height - card_length/2
cround[player_index] = 40
round_complete = 1
functions.text_in_box("Go to our prime location GO and collect your reward money ;)",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
timer = 8
chance = 0
if n == 3:
timer -= 1
if timer == 0:
player.player[player_index].posx = card_length + card_breadth/2
player.player[player_index].posy = display_height - card_length/2
if cround[player_index]>9:
cround[player_index] = 49
round_complete = 1
else:
cround[player_index] = 9
functions.text_in_box("You are given a free trip to Beijing ,Enjoy the delight of it!",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
if timer == 0:
timer = 8
chance = 0
working()
if n == 4:
if timer == 8:
player.player[player_index].cash -= 10000
player.player[player_index].total_wealth -= 10000
functions.text_in_box("Oops! You broke the window of Mr. William's Car,Pay him $10000",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
timer = 8
chance = 0
if n == 5:
if timer == 8:
player.player[player_index].posx = display_height - card_length/2
player.player[player_index].posy = card_length/2
cround[player_index] = 30
functions.text_in_box("How Dare you burst crackers in front of parliament,living in India! Busted!!",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
timer = 8
chance = 0
working()
if n == 6:
timer -= 1
if timer == 0:
player.player[player_index].movement(37)
cround[player_index] = cround[player_index]%40
functions.text_in_box("Earthquake expected! Go back three spaces",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
if timer == 0:
timer = 8
chance = 0
working()
if n == 7:
if timer == 8:
player.player[player_index].cash -= 30000
player.player[player_index].total_wealth -= 30000
functions.text_in_box("Its you birthday,Now give party to your friends...cost $30000",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
timer = 8
chance = 0
if n == 8:
if timer == 8:
player.player[player_index].cash += 40000
player.player[player_index].total_wealth += 40000
functions.text_in_box("Congo! You won lottery prize of $40000",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
timer = 8
chance = 0
if n == 9:
timer -= 2
if timer == 0:
player.player[player_index].posx = card_length + card_breadth/2
player.player[player_index].posy = card_length/2
if cround[player_index]>21:
cround[player_index] = 61
round_complete = 1
else :
cround[player_index] = 21
functions.text_in_box("You are given a free trip to Delhi ,Enjoy the delight of it!",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
if timer == 0:
timer = 8
chance = 0
working()
if n == 10:
timer -= 2
if timer == 0:
player.player[player_index].posx = card_length/2
player.player[player_index].posy = display_height - card_length - 1.5*card_breadth
functions.text_in_box("Go to Electric Company and feel the shock!",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
if timer == 0:
timer = 8
chance = 0
working()
if n == 11:
if timer == 8:
player.player[player_index].cash += 30000
player.player[player_index].total_wealth += 30000
functions.text_in_box("You won the first prize as a hotel manager, collect $30000",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
timer = 8
chance = 0
if n == 12:
if timer == 8:
player.player[player_index].cash -= 20000
player.player[player_index].total_wealth -= 20000
functions.text_in_box("Smoking kills! clear your bills , pay $20000",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
timer = 8
chance = 0
def CommChest(): #if player lands on community chest
global key,timer,incometax,gotojail,round_complete,cround,rent,railway,temporary,chance,comm,timerr
n = functions.a + functions.b
if n == 2:
if timer == 8:
player.player[player_index].posx = display_height - card_length/2
player.player[player_index].posy = display_height - card_length/2
cround[player_index] = 40
round_complete = 1
functions.text_in_box("Go to our prime location GO and collect your reward money ;)",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
timer = 8
comm = 0
if n == 3:
timer -= 2
if timer == 0:
player.player[player_index].posx = card_length + card_breadth/2
player.player[player_index].posy = display_height - card_length/2
if cround[player_index]>9:
cround[player_index] = 49
round_complete = 1
else:
cround[player_index] = 9
functions.text_in_box("You are given a free trip to Beijing ,Enjoy the delight of it!",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
if timer == 0:
timer = 8
comm = 0
working()
if n == 4:
if timer == 8:
player.player[player_index].cash -= 10000
player.player[player_index].total_wealth -= 10000
functions.text_in_box("Oops! You broke the window of Mr. William's Car,Pay him $10000",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
timer = 8
comm = 0
if n == 5:
timer -= 2
if timer == 0:
player.player[player_index].posx = display_height - card_length/2
player.player[player_index].posy = card_length/2
cround[player_index] = 30
functions.text_in_box("How Dare you burst crackers in front of parliament,living in India! Busted!!",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
if timer == 0:
timer = 8
comm = 0
working()
if n == 6:
timer -= 2
if timer == 0:
player.player[player_index].movement(37)
cround[player_index] = cround[player_index]%40
functions.text_in_box("Earthquake expected! Go back three spaces",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
if timer == 0:
timer = 8
comm = 0
working()
if n == 7:
if timer == 8:
player.player[player_index].cash -= 30000
player.player[player_index].total_wealth -= 30000
functions.text_in_box("Its you birthday,Now give party to your friends...cost $30000",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
timer = 8
comm = 0
if n == 8:
if timer == 8:
player.player[player_index].cash += 40000
player.player[player_index].total_wealth += 40000
functions.text_in_box("Congo! You won lottery prize of $40000",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
timer = 8
comm = 0
if n == 9:
timer -= 2
if timer == 0:
player.player[player_index].posx = card_length + card_breadth/2
player.player[player_index].posy = card_length/2
if cround[player_index]>21:
cround[player_index] = 61
round_complete = 1
else :
cround[player_index] = 21
functions.text_in_box("You are given a free trip to Delhi ,Enjoy the delight of it!",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
if timer == 0:
timer = 8
comm = 0
working()
if n == 10:
timer -= 2
if timer == 0:
player.player[player_index].posx = card_length/2
player.player[player_index].posy = display_height - card_length - 1.5*card_breadth
functions.text_in_box("Go to Electric Company and feel the shock!",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
if timer == 0:
timer = 8
comm = 0
working()
if n == 11:
if timer == 8:
player.player[player_index].cash += 30000
player.player[player_index].total_wealth += 30000
functions.text_in_box("You won the first prize as a hotel manager, collect $30000",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
timer = 8
comm = 0
if n == 12:
if timer == 8:
player.player[player_index].cash -= 20000
player.player[player_index].total_wealth -= 20000
functions.text_in_box("Smoking kills! clear your bills , pay $20000",__font,orange,display_height/2,display_height/2 ,display_height/2-card_length,display_height/2-card_length)
timer -= 1
if timer == 0:
timer = 8
comm = 0
| 56.765755 | 608 | 0.574843 | 5,691 | 47,740 | 4.637146 | 0.069232 | 0.122774 | 0.107692 | 0.108071 | 0.809625 | 0.786283 | 0.753164 | 0.730883 | 0.699166 | 0.657408 | 0 | 0.041631 | 0.318224 | 47,740 | 840 | 609 | 56.833333 | 0.769172 | 0.018517 | 0 | 0.573925 | 0 | 0 | 0.065108 | 0.003395 | 0 | 0 | 0 | 0 | 0 | 1 | 0.013441 | false | 0 | 0.002688 | 0 | 0.016129 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
9c26c115e44fe6345ac94e12b377778e755f46e1 | 189,237 | py | Python | py_Req_dummy/DLN_req.py | delmeyer/PC_MC_DLN_DelroyMeyer | ffc72e2faab09f526c3fa628e4ec07a96d109d53 | [
"MIT"
] | null | null | null | py_Req_dummy/DLN_req.py | delmeyer/PC_MC_DLN_DelroyMeyer | ffc72e2faab09f526c3fa628e4ec07a96d109d53 | [
"MIT"
] | null | null | null | py_Req_dummy/DLN_req.py | delmeyer/PC_MC_DLN_DelroyMeyer | ffc72e2faab09f526c3fa628e4ec07a96d109d53 | [
"MIT"
] | null | null | null |
# coding: utf-8
# # Structural durability analyses for carbon/epoxy laminates
#
# ## §1: Introduction
# # DLN Contents
#
# 0. [Materials Characterization Laboratory DLN | A Showcase for Convergent Manufacturing Group Ltd](DLN_0_About_Me.ipynb) - An 'Welcome' message to the Convergent Manufacturing - Materials Characterization Group, explaining the concept of these DLN entries, why I made them out of interest for the team's *Characterization Lab Technician/Scientist* opening, and presenting a brief 'About Me' StoryMap
# <br>
#
# 1. [§1: Structural durability analyses of carbon fibre & epoxy-based composites - Introduction](DLN_1_Introduction.ipynb) - An introduction to the quasi-fatigue experiments performed on carbon fibre/epoxy composite specimens.
# <br>
#
# 2. [§2: Structural durability analyses of carbon fibre & epoxy-based composites - Laminate mechanics theory](DLN_2_Theory.ipynb) - A discussion of composite laminate theory, as a basis for performing stress-strain-deformation calculations to characterize the structural durability of composite laminate layups.
# <br>
#
# 3. [§3: Structural durability analyses of carbon fibre & epoxy-based composites - Experimental results](DLN_3_Experimental.ipynb) - Using Python scientific programming libraries to explore and visualize quasi-fatigue tensile & compressive loading experiments on carbon fibre/epoxy composite test coupons.
# <br>
#
# 4. [§4: Structural durability analyses of carbon fibre & epoxy-based composites - Matrix calculations](DLN_4_Calculations.ipynb) - Using MATLAB to perform structural durability matrix calculations from carbon fibre/epoxy composite test coupon experimental data.
# # 0. DLN Introduction
#
# This digital laboratory notebook (DLN) is comprised of four linked entries that are listed in the DLN contents section above. The DLN provides a comprehensive platform that:
#
# * Discuss composite fatigue and pertinent composite mechanics theory
# * Provide an overview of tensile and compression quasi-static fatigue tests performed on carbon fibre/epoxy composite laminate coupons with varying fibre orientations and ply layers
# * Provides a scientific computing platform to utilize tensile & compressive quasi-static fatigue experiment data for:
# * Stress-strain data visualization
# * Statistical data analyses
# * Store empirically-determined elastic properties of the tested composite coupons for structural durability calculations
# * Perform matrix calculations to quantify the structural durability of the tested composite laminate coupons materials
# ## I. Experiment log
# * **Date of experiment**: 10.14.2017
# * **Principle investigator**: Delroy Meyer, EIT, BASc
# * **Test operators**: Jürgen Müller, Delroy Meyer, Cintia Oliveria
# * **Lead investigator**: Prof. Dr-mont. Zoltan Major
# * **Course**: LVA Nr. 378.029 - 480ADPTPPBV17 - Polymer Product Design and Engineering III - Graduate Seminar
# * **Location**: Institute of Polymer Materials and Testing (IPMT), JKU Linz - Linz, Austria
# * Compounding and specimen preparation Lab: *Composite coupon specimen preparation*
# * Mechanical Lab: *Tensile & compression testing*
#
# ### i. Experiment test (lab) environment conditions
# *Measurement taken: 10-14-2017 08:45:07*
# <b>
#
# $T_{test} (°C) = 21.23$ (*within* $ 23 ± 3 °C$ *standard laboratory atmosphere range as per ASTM D5229*)
# <br>
# $RH (\%) = 55.7$ (*within* $ 50 ± 10 \%$ *standard laboratory atmosphere range as per ASTM D5229*)
# # 1. Purpose of experiment and data analyses
# These experiments were conducted to investigate the dependency of the structural durability of (high fibre volume fraction) carbon fibre/epoxy laminate composites on fibre orientation. Quasi-static tensile and compressive fatigue tests were performed with unidirectional (UD) carbon/epoxy laminates at angles of 0°, 45° and 90°.
#
# ## 1.1 Why these investigations are needed
# 1. Short and long-fibre reinforced composites offer advantages over conventional metallic materials for the manufacture of components across many different manufacturing industries (*common examples: aerospace/automotive component manufacture, building materials, sports equipment, chemical process industry components*)
# * Polymeric laminated composites, like carbon-fibre/epoxy laminates, present high strength-to-weight and stiffness-to-weight ratios when compared with metallic materials
# <br>
#
# 2. To dimension parts undergoing sustained tensile or compressive forces in application, and the material's consequent deformation response, the quasi-fatigue tensile & compressive characterization testing is necessary
# <br>
#
# 3. *My personal opinion*: Composite materials design offers, for all practical intents and purposes, virtually limitless options for component design, processing and application-use. Consequently, this mean that, for the foreseeable future, there will be an ongoing need to conduct testing for both existing and novel composite materials design and structural analyses; the evolution of innovation in composite materials development and processing technologies will further expand the scope of testing needed.
# ## 1.2 Fatigue mechanics of composite materials - conceptual theory
#
# * Due to their inner structure consisting of continuous fibres and matrix material the macroscopic properties of composite materials are anisotropic
# * UD-lamina layers, themselves, are considered to be transversely isotropic; on planes parallel with the fibre direction they behave orthotropically and on an imaginary plane perpendicular to the fibre direction they behave isotropically
#
# * The interface between the fibre and the matrix resin and its properties influence the performance of the composite as well
#
# * When mechanical quasi-static or fatigue loads are applied, a variety of complex damage mechanisms such as matrix microcracking, interfacial fibre/matrix debonding, transverse rupture, fibre rupture or delamination occur on microscopic scale
#
# * The properties of unidirectional (UD) composites transverse to fibre direction are generally low. Under fatigue loads, the matrix is subjected to strain-controlled fatigue due to the constraint provided by the fibres
#
# * Higher fibre volume fractions increase mechanical properties such as tensile strength and stiffness in fibre direction
#
# * The fibres of an CFRPs are what “deliver” its excellent properties but the resin (the matrix) is still required in order to:
# * provide a cohesive component
# * direct loads into the fibres
# * protect the fibres against environmental influences
# * prevent the fibres from buckling when subjected to compressive stress
# * *However, this means that*: unfortunately stresses will of necessity now arise in the matrix as well, and also at the interface of the fibres and matrix
#
# ## 1.3 Structural durability calculations as part of composite fatigue-life prediction
# Structural durability calculations constitute 'one piece in the puzzle' of forming a complete fatigue-life prediction model for composite materials. These calculations rely on the following studies:
#
# 1. An evaluation of the **load-time-history** of the material
# <br>
#
# 2. Analyses of the **local anisotropic behaviour** of the material:
# <br>
#
# 3. Analyses of the **macroscopic behaviour** of the material:
# * quasi-static stress/time investigations:
# * data for stress-strain behaviour and strength (elasticity and strength properties)
# * cyclic stress/cycle investigations (S-N curves)
# <br>
#
# 4. Consideration of **Lamina input variables**:
# * lamina thickness
# * fibre content
# * type and orientation of reinforcement
#
# A visual representation of the full workflow for creating a composite fatigue-life prediction model, provided by Mösenbacher et. al. [1], shows how these studies contribute to the workflow.
#
# ")
#
# *<center> Fig. 1 - Complete workflow for developing a composite fatigue-life prediction model (Mösenbacher, ECCM16, 2016)</center>*
# ## 1.4 Data analysis & calculations to be determined from this investigation
#
# This DLN will focus on the ***load-time history*** and ***stress-strain*** analysis segments of the **composite fatigue-life prediction model workflow** shown in Figure 1 above.
#
# The following results will be presented from the tensile and compressive quasi-static fatigue loading experiment runs performed on carbon fibre/epoxy laminate coupons of various fibre orientations and ply layers:
#
# 1. Empirical determination of tensile/compressive moduli of elasticity
# <br>
#
# 2. Calculation of stress-strain elastic engineering constants to characterize the structural durability of composite materials undergoing long-term static (force) loading in application
# <br>
###########################################################################################################################################################################
#==========================================================================================================================================================================
###########################################################################################################################################################################
# coding: utf-8
# # <font color = 'green'>'Delroy Meyer - (Future) Engineer | Materials Characterization Group at Convergent' <font>
#
# #### *<font color = 'blue'> An Interactive Notebook to showcase how I can add value to the Materials Characterizaton team and 'nudge' your decision to interview (and hopefully hire) me!<font>*<a class="tocSkip">
#
# * Press ```Alt+r``` to start the slideshow
# * Press ```Spacebar``` to toggle the slides forward
# * Press ```Shift``` + ```Spacebar``` to toggle the slides backwards
# # DLN Contents
#
# 0. [Materials Characterization Laboratory DLN | A Showcase for Convergent Manufacturing Group Ltd](DLN_0_About_Me.ipynb) - An 'Welcome' message to the Convergent Manufacturing - Materials Characterization Group, explaining the concept of these DLN entries, why I made them out of interest for the team's *Characterization Lab Technician/Scientist* opening, and presenting a brief 'About Me' StoryMap
# <br>
#
# 1. [§1: Structural durability analyses of carbon fibre & epoxy-based composites - Introduction](DLN_1_Introduction.ipynb) - An introduction to the quasi-fatigue experiments performed on carbon fibre/epoxy composite specimens.
# <br>
#
# 2. [§2: Structural durability analyses of carbon fibre & epoxy-based composites - Laminate mechanics theory](DLN_2_Theory.ipynb) - A discussion of composite laminate theory, as a basis for performing stress-strain-deformation calculations to characterize the structural durability of composite laminate layups.
# <br>
#
# 3. [§3: Structural durability analyses of carbon fibre & epoxy-based composites - Experimental results](DLN_3_Experimental.ipynb) - Using Python scientific programming libraries to explore and visualize quasi-fatigue tensile & compressive loading experiments on carbon fibre/epoxy composite test coupons.
# <br>
#
# 4. [§4: Structural durability analyses of carbon fibre & epoxy-based composites - Matrix calculations](DLN_4_Calculations.ipynb) - Using MATLAB to perform structural durability matrix calculations from carbon fibre/epoxy composite test coupon experimental data.
# # Purpose of this interactive notebook
# Hi Alastair, Martin, Convergent & Co.! I've created this digital notebook as a supplement to my application package for the [Characterization Lab Technician/Scientist](https://www.indeed.ca/cmp/Convergent-Manufacturing-Techn/jobs/Characterization-Lab-Technician-Scientist-196df99b0a902fca?q=materials+engineer&vjs=3) opening.
#
# I wanted a fun way to show you, and the Materials Characterization Group at Convergent, that I'm the type of self-motivated, hands-on, detail oriented individual that would add value to your group and to the Convergent business; moreover, I believe that this particular role is one in which I can make a strong impact in.
# # The 'Interactive DLN' concept
# This 'notebook concept' is a spin-off idea from a (proprietary) digital laboratory notebook (DLN) that I created for my master thesis project with [nanoleq AG](www.nanoleq.com), the [ETH Zürich Laboratory of Biosensors and Bioelectronics](http://www.lbb.ethz.ch/research.html) and the [JKU Linz Institute for Polymer Materials and Testing](https://www.jku.at/en/institute-of-polymeric-materials-and-testing/research/).
# # The 'Interactive DLN' concept <a class="tocSkip">
# About a week into my thesis project work, I quickly abandoned recording experimental data and observations in the paper laboratory notebook given to me.
#
# Under the mentorship of my principle thesis advisor, who had performed extensive experimental data recording, observation archiving and subsequent data analytics/visualization computing in Python/R programming environments during his PhD, I quickly adopted a similar method to manage my experiments.
# # The 'Interactive DLN' concept <a class="tocSkip">
# I created an interactive digital laboratory notebook, where I could:
#
# **1. Effectively archive my experimental data, observations and procedures**
# * Import standardized test procedures and record how I executed the test procedures (reproducibility)
# * Ensure the systematic archival and organization of all experimental runs performed
# * Assign and apply data-type attributes to independent/dependent physico-chemical variables, to apply object oriented programming routines thereafter
# # The 'Interactive DLN' concept <a class="tocSkip">
# I created an interactive digital laboratory notebook, where I could:
#
# **2. Make use of powerful Python, R and MATLAB libraries, in an interactive computing environment to:**
# * Perform data analyses on my experimental observations
# * Create powerful data visualization graphics (both static and interactive)
# * Conduct interactive presentations with my thesis supervisors and company team members
# # Polymer and composite materials testing & characterization 'Portfolio'
# I've gone ahead and created a small 'portfolio of sorts' of select material characterization projects that I've conducted during my graduate education. Use the 'DLN Contents' navigation cell (*at the beginning of each DLN Jupyter Notebook*), to access specific projects.
# # Getting to know more about me
# I've created a 'StoryMap' of my professional, academic and cultural development over the past 5 years, so you can get a better sense of who I am and how my personality might fit in with your team's personalities. Navigate the StoryMap to find out a bit more about me and invite me for an interview to meet me in person
#
# (*Exit the SlideShow to get a better view of the StoryMap in the Jupyter DLN, or click [here](https://uploads.knightlab.com/storymapjs/2e0e49870f3e1b20bcc6405a8e568761/a-journey-through-the-progression-of-my-profession/index.html) to view it in your browser*)
# In[5]:
from IPython.display import IFrame
IFrame(src='https://uploads.knightlab.com/storymapjs/2e0e49870f3e1b20bcc6405a8e568761/a-journey-through-the-progression-of-my-profession/index.html', width=800, height=600)
# # Maintaining a laboratory notebook - Why go digital? <a class="tocSkip">
# ## Open-Source (Python & Jupyter Notebook) DLN vs. Proprietary DLN Software? <a class="tocSkip">
#
# * Jupyter - Interactive omputer programming environment, compatible with many programming languages including Python, R, Julia, PHP, MATLAB, Mathematica | [Jupyter Kernels for Programming Languages](https://github.com/jupyter/jupyter/wiki/Jupyter-kernels)
#
# ### Pros and Cons of OS-DLN vs. P-DLN <a class="tocSkip">
#
# | DLN Attribute | OS-DLN | P-DLN |
# | ----------- | ----------- | ----------- |
# | Security | Title | Title |
# | Price | Text | Text |
# | Build Effort | Text | Text |
# | Customization | Text | Text |
# | Data Storage | Text | Text |
# | Server requirements | Text | Text |
# | Search functionality | Text | Text |
#
#
# ## The 'Experiment Reproducibility' Problem <a class="tocSkip">
#
# * Retrieval of experimental data -
# * Archiving of experimental data:
# * Electronic archival (document control) system to store DLN, relevant experimental data files in centralized server (Cloud computing options, local server options)
# * 'Standardized' recording of experimental results & observations
# * Quick reference links to electronic resources:
# * Experimental E-SOPs
# * Academic papers
# * Simulation results in ABAQUS (Python Scripting), COMPRO, RAVEN
# * Material models
# * Knowledge transfer and record keeping
#
# ## Data visualization <a class="tocSkip">
#
# * *'Reporting results to engineers and management using Microsoft Word and Excel'* - is there a better way?
# * Data visualization + presentation of experimental results, all done from DLN platform
#
# ## *Continue* <a class="tocSkip">
# # Data Creation & Retrieval with OS-DLN <a class="tocSkip">
# The OS-DLN must be able to create, import, store and retrieve all important data types in digital format, such as:
#
# ## 1. Text processors <a class="tocSkip">
# * WYSIWYG - MS Word | [Jupyter notebook extension for exporting notebook as MS Word doc](https://github.com/innovationOUtside/nb_extension_wordexport)
# * Import MS Word documents to P-DLN | [extract data from MS Word Documents using Python](https://towardsdatascience.com/how-to-extract-data-from-ms-word-documents-using-python-ed3fbb48c122)
#
# ## 2. Spreadsheet tool <a class="tocSkip">
#
# Allows you to create tables, enter and format data, perform calculations and create graphs within the ELN, as well as import from and export to Excel:
#
# * Import Excel and CSV Formats (.xls, .xlsx, .csv) to DLN | [Python Excel Tutorial: The Definitive Guide](https://www.datacamp.com/community/tutorials/python-excel-tutorial)
# * Import Google Sheets to DLN | [Google Sheets + Python](https://www.twilio.com/blog/2017/02/an-easy-way-to-read-and-write-to-a-google-spreadsheet-in-python.html)
#
# ## 3. Images <a class="tocSkip">
#
# The OS-DLN requires the capability to import images and add annotations, keeping in mind the following:
# * Large file sizes: SEM/TEM microscopic images require large storage space in the OS-DLN and robust computing power to render the images in the OS-DLN for all use-cases (DLN editing, presentations)
# * Images must not effect/impact data recording and data analyses
#
# ## 4. Mobile/tablet Apps (nice to have) <a class="tocSkip">
#
# Mobile and tablet apps, as well as responsive design, allowing researchers to use their preferred device to record their experiment notes
#
# ## 5. Search functionality <a class="tocSkip">
#
# The OS-DLN requires simple, effective and sophisticated search functionality, allowing you to retrieve your data by author, tag, unique ID, textual content, timestamp, and/or structured data query (fast keyword search capabillities, Experimental tag ID fast retrieval, etc.). One should be able to find the desired research data in seconds. Archival documentation system should allow 'Quick Reference' to pin-point search 'keywords'.
# # Data storage <a class="tocSkip">
# The OS-DLN must have a secure, robust, well-resourced data storage system in-place. There are basically two categories of solutions:
#
# 1. Cloud-computing solutions
# 2. On-premises, self-hosted servers
#
# For materials characterization purposes, the data storage warehousing of DLNs must also be able to securely and efficiently store all related/integrated data files of different types, such as:
#
# * Simulation models (ex. experimental validation models: ABAQUS/SolidWorks, COMPRO, RAVEN, COMSOL)
# * Experimental data records (Excel, CSV, Google Sheets, Plain Text)
# * Text processing records (MS Word, Plain Text)
# * Literature review (PDF, EPUB, interactive HTML)
# * Material models (MATLAB, Python, React JS, R)
#
# ## Cloud-hosted data storage <a class="tocSkip">
#
# * Store all DLN entries and related related/integrated data files on cloud server
# * Common (secure) cloud server options:
# * Amazon Web Services
# * Microsoft Azure
#
# ### Advantages of cloud-based solutions <a class="tocSkip">
#
# * No setup costs
# * No (on-going) maintenance costs or resources required
# * No administration costs
# * Integrated cloud-computing environment - access content anywhere/anytime a functional Internet connection is available
# * Solution providers work to ensure minimal 'down time'
#
# ### Disadvantages of cloud-based solutions <a class="tocSkip">
#
# * Control of data security and privacy is ultimately handled by the service provider
# * Need to be aware of security controls, encryption capabilities, authentication processes
# * Service depends on fast, reliable LAN/WLAN connectivity
#
#
# ## On-premises, self-hosted data storage <a class="tocSkip">
#
# * Store all DLN entries and related related/integrated data files on own server
#
# ### Advantages <a class="tocSkip">
#
# * IT-Admin has full control over settings, security protocols, encryption, authentication and document control of all content hosted on server
# * Server is dedicated solely to own organization and is accessible offline
#
# ### Disadvantages <a class="tocSkip">
#
# * Higher cost of IT resources to setup, maintain and continuously update/improve server hosting
# * IT is responsible for reliability, speed, security, service performance, software updates, bug fixes and version control of server
# * IT is responsible for document control of all server content
#
#
# ## Cloud solution of self-hosting? <a class="tocSkip">
# The organization’s policy regarding data storage will be the key decision maker here. Large organizations with a big IT department often prefer to have an on-premise solution to be in full control, and are willing to pay extra for that reassurance. Smaller companies and research institutions are generally more willing to take advantage of the capabilities offered by cloud computing, especially those provided by Amazon or Microsoft.
#
# ## Free cloud solution <a class="tocSkip">
#
# * [LabFolder - Free for up to 3 Team Members](https://www.labfolder.com/pricing/industry/)
# * [LabFolder - The Electronic Lab Notebook in 2019: A comprehensive guide](https://www.labfolder.com/electronic-lab-notebook-eln-research-guide/)
###########################################################################################################################################################################
#==========================================================================================================================================================================
###########################################################################################################################################################################
# coding: utf-8
# # Structural durability analyses for carbon/epoxy laminates
#
# ## §2 Composite laminate theory (*based on LEFM*)
#
# Principles of linear elastic composite laminate theory, pertinent to calculating the elastic engineering constants required to characterize the elastic structural durability of various tested carbon fibre/epoxy laminate coupons, are discussed here.
# # DLN Contents
#
# # DLN Contents
#
# 0. [Materials Characterization Laboratory DLN | A Showcase for Convergent Manufacturing Group Ltd](DLN_0_About_Me.ipynb) - An 'Welcome' message to the Convergent Manufacturing - Materials Characterization Group, explaining the concept of these DLN entries, why I made them out of interest for the team's *Characterization Lab Technician/Scientist* opening, and presenting a brief 'About Me' StoryMap
# <br>
#
# 1. [§1: Structural durability analyses of carbon fibre & epoxy-based composites - Introduction](DLN_1_Introduction.ipynb) - An introduction to the quasi-fatigue experiments performed on carbon fibre/epoxy composite specimens.
# <br>
#
# 2. [§2: Structural durability analyses of carbon fibre & epoxy-based composites - Laminate mechanics theory](DLN_2_Theory.ipynb) - A discussion of composite laminate theory, as a basis for performing stress-strain-deformation calculations to characterize the structural durability of composite laminate layups.
# <br>
#
# 3. [§3: Structural durability analyses of carbon fibre & epoxy-based composites - Experimental results](DLN_3_Experimental.ipynb) - Using Python scientific programming libraries to explore and visualize quasi-fatigue tensile & compressive loading experiments on carbon fibre/epoxy composite test coupons.
# <br>
#
# 4. [§4: Structural durability analyses of carbon fibre & epoxy-based composites - Matrix calculations](DLN_4_Calculations.ipynb) - Using MATLAB to perform structural durability matrix calculations from carbon fibre/epoxy composite test coupon experimental data.
# ## 2.1 Structural mechanics theory of UD composite laminates subject to plane stress
# All of the coupons prepared and tested for this investigation were UD composite laminates; the coupons were subjected to either tensile or compressive plane stress in the 1-2 plane of the composite geometry, as shown below. This is to say that the coupons were only subjected to *in-plane* stress loads (i.e. tensile and compressive loading in the fibre direction):
#
# ")
#
# *<center> Fig. X - Unidirectional fibre-reinforced lamina (Jones, 1999)</center>*
#
# ### 2.1.1 A brief overview of linear elastic strain-stress theory (*relevant to the 2-D in-plane stress fields*)
# A comprehensive overview of linear elastic approximations of stress-strain relationships for anisotropic materials can be found in many resources that focus on solid mechanics [(Tuttle, 2004), (Pilkey, 1999)]. A simplification of (linear elastic) stress-strain theory[<sup>1</sup>](#fn1), applied to UD-laminate composites subjected to in-plane stress, is briefly described here. These equations provide the basis for the structural durability calculations of stress-strain effects on the carbon fibre/epoxy coupons.
#
# **Generalized Hooke's Law**
# The generalized Hooke's law relating a stress field (induced by an applied force) to strain (deformation) response, of a particular material, can be written in the following simple notation notation :
#
# $$ \mathbf{\sigma_{i}} = \mathbf{C_{ij}} \cdot \mathbf{\varepsilon_{j}}, \qquad\quad i,j = 1, ..., 6 $$
#
# Where:
# * $\mathbf{\sigma_{i}}$ are the stress components
# * $\mathbf{C_{ij}}$ is the stiffness matrix
# * $\mathbf{\varepsilon_{j}}$ are the strain components
#
# The normal and shear stress fields, induced by an applied force, are pictured in Cartesian coordinates below:
#
# ")
#
# *<center> Fig. X - Stresses on a material element (Jones, 1999)</center>*
#
# **The stain (deformation) response**
# Deformation of the material, responding to the induced stress field, is characterized by *tensor shear strain* ($\mathbf{\varepsilon_{ij}}$) and *engineering shear strain* ($\mathbf{\gamma_{ij}}$). Considering a material element being deformed, the tensor and engineering shear strains are defined, respectively, as:
#
# $$ \mathbf{\varepsilon_{1}} = \frac{\partial u}{\partial x}, \qquad \mathbf{\varepsilon_{2}} = \frac{\partial v}{\partial y}, \qquad \mathbf{\varepsilon_{3}} = \frac{\partial w}{\partial z}$$
#
# $$ \mathbf{\gamma_{23}} = \frac{\partial v}{\partial z} + \frac{\partial w}{\partial y}, \qquad \mathbf{\gamma_{31}} = \frac{\partial w}{\partial x} + \frac{\partial u}{\partial z}, \qquad \mathbf{\gamma_{12}} = \frac{\partial u}{\partial y} + \frac{\partial v}{\partial x}$$
#
# **Stress-strain relationships - Stiffness and compliance elasticity constants**
# The integral of the incremental work done (per unit volume) on a material (subjected to an applied force that induces a stress field and subsequent strain (deformation) response to the load, yields a relation between work done on the material and the resultant tensor shear strain:
#
# $$ \mathbf{W} = \frac{1}{2} \mathbf{C_{ij}} \cdot \mathbf{\varepsilon_{i}} \mathbf{\varepsilon_{j}}, \qquad\quad i,j = 1, ..., 6 $$
#
# This result relates The second order differentiation of Hooke's Law shows that the stiffness matrix ($\mathbf{C_{ij}}$) is symmetric (*i.e* $\mathbf{C_{ij}} = \mathbf{C_{ji}}$). Similarly, by examining the inverse of the stress-strain relations, the work done on the material can be related to the induced stress field:
#
# $$ \mathbf{W} = \frac{1}{2} \mathbf{S_{ij}} \cdot \mathbf{\sigma_{i}} \mathbf{\sigma_{j}}, \qquad\quad i,j = 1, ..., 6 $$
#
# Where:
# * $\mathbf{S_{ij}}$ is the compliance matrix
#
# ***Here it's important to note that hygrothermal effects on the deformation of the material are not being considered.*** *This is valid if experimental testing is done with test environment controls and neglecting material temperature changes during tensile/compressive loading.*
#
# The stiffness and compliance matrices have 36 constants, owing to the six degrees of freedom for considering the (linear elastic) deformation response of a material element. The generalized matrices are as follows:
#
# *Stiffness matrix* ($\mathbf{C_{ij}}$):
# <br>
#
# $$ \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right) =
# \begin{bmatrix} C_{11} & C_{12} & C_{13} & C_{14} & C_{15} & C_{16} \\
# C_{21} & C_{22} & C_{23} & C_{24} & C_{25} & C_{26} \\
# C_{31} & C_{32} & C_{33} & C_{34} & C_{35} & C_{36} \\
# C_{41} & C_{42} & C_{43} & C_{44} & C_{45} & C_{46} \\
# C_{51} & C_{52} & C_{53} & C_{54} & C_{55} & C_{56} \\
# C_{61} & C_{62} & C_{63} & C_{64} & C_{65} & C_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) $$
#
# *Compliance matrix* ($\mathbf{S_{ij}}$):
# <br>
#
# $$ \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) =
# \begin{bmatrix} S_{11} & S_{12} & S_{13} & S_{14} & S_{15} & S_{16} \\
# S_{21} & S_{22} & S_{23} & S_{24} & S_{25} & S_{26} \\
# S_{31} & S_{32} & S_{33} & S_{34} & S_{35} & S_{36} \\
# S_{41} & S_{42} & S_{43} & S_{44} & S_{45} & S_{46} \\
# S_{51} & S_{52} & S_{53} & S_{54} & S_{55} & S_{56} \\
# S_{61} & S_{62} & S_{63} & S_{64} & S_{65} & S_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right)$$
#
# **Significance of coupled stress-strain elements (*described with the compliance matrix*)**
# For anisotropic materials, significant coupling occurs between the applied stress and the various strain responses. Examing the compliance matrix, the following stress-strain responses are coupled:
#
# * The $ S_{11}, S_{22}$ and $S_{33} $ terms each represent extensional response to their respective applied stress components ($ \sigma_{1}, \sigma_{2}, \sigma_{3} $) in the same direction.
#
# * The $ S_{44}, S_{55}$ and $S_{66} $ terms represent shear strain response to an applied shear stress in the same plane
#
# * The $ S_{12}, S_{13}$ and $S_{23} $ terms represent coupling between dissimilar normal stresses and normal strains (extension-extension coupling more commonly known as the Poisson effect)
#
# * The $ S_{14}, S_{15}, S_{16}, S_{24}, S_{25}, S_{26}, S_{34}, S_{35}$ and $S_{36} $ terms represent normal strain response to applied shear stress in a more complex manner than for the preceding compliances (shear-extension coupling)
#
# * Finally, the $ S_{45}, S_{46}$ and $S_{56} $ terms represent shear strain response to shear stress applied in another plane (shear-shear coupling)
#
# * However, less than 36 of the constants can be shown to actually be independent for elastic materials when important characteristics of the strain energy are considered, such as whether the material behaves anisotropically, orthotropically, monoclinically or traversely isotropically when undergoing deformation.
#
# [comment]: <> (------------------------------§2.3.2 Footnotes------------------------------)
# __________________________________
#
# <span id="fn1"> 1. Linear elasticity theory makes a number of assumptions about the elastic/plastic deformation response to stress fields induced by applied force loads, namely that strain responses are infinitesimally small, and that relationships between the components of stress and strain are approximately. Additionally, the theory is valid only for stress states that do not produce yielding.</span>
# ### 2.1.2 UD laminae strain-stress relationships
# The fibre arrangements, of the laminate coupons prepared for these experimental investigations, were classified as orthotropic or transversely isotropic bodies. In UD laminae, all planes whose perpendicular vector is transverse with respect to the fibre direction are planes of symmetry. The UD laminae are transversely isotropic. On planes parallel with the fibre direction it behaves orthotropically (*material properties that differ along three mutually-orthogonal twofold axes of rotational symmetry - a subset of anisotropic materials*)and on an imaginary plane perpendicular to the fibre direction it behaves isotropically (*material properties remain constant in all directions*).
#
# **3-D Orthotropic $\mathbf{\sigma} - \mathbf{\varepsilon}$ relationship**
# <br>
#
# With the application of tensile or compressive force loading, parallel to the fibre direction of the UD laminae (in-plane loading), shear-extension and shear-shear coupling can be neglected. As such, the stress-strain relationship, w.r.t the ***stiffness matrix***, for UD laminae exhibiting orthotropic behaviour is simplified to:
#
# $$ \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right) =
# \begin{bmatrix} C_{11} & C_{12} & C_{13} & 0 & 0 & 0 \\
# C_{21} & C_{22} & C_{23} & 0 & 0 & 0 \\
# C_{31} & C_{32} & C_{33} & 0 & 0 & 0 \\
# 0 & 0 & 0 & C_{44} & 0 & 0 \\
# 0 & 0 & 0 & 0 & C_{55} & 0 \\
# 0 & 0 & 0 & 0 & 0 & C_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) $$
#
# and w.r.t. to the ***compliance matrix***:
#
# $$ \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) =
# \begin{bmatrix} S_{11} & S_{12} & S_{13} & 0 & 0 & 0 \\
# S_{21} & S_{22} & S_{23} & 0 & 0 & 0 \\
# S_{31} & S_{32} & S_{33} & 0 & 0 & 0 \\
# 0 & 0 & 0 & S_{44} & 0 & 0 \\
# 0 & 0 & 0 & 0 & S_{55} & 0 \\
# 0 & 0 & 0 & 0 & 0 & S_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right)$$
#
# **3-D Isotropic $\mathbf{\sigma} - \mathbf{\varepsilon}$ relationship**
# The stress-strain relationship, w.r.t the ***stiffness matrix***, for UD laminae exhibiting isotropic behaviour is simplified to:
#
# $$ \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right) =
# \begin{bmatrix} C_{11} & C_{12} & C_{13} & 0 & 0 & 0 \\
# C_{12} & C_{11} & C_{13} & 0 & 0 & 0 \\
# C_{13} & C_{13} & C_{33} & 0 & 0 & 0 \\
# 0 & 0 & 0 & C_{44} & 0 & 0 \\
# 0 & 0 & 0 & 0 & C_{44} & 0 \\
# 0 & 0 & 0 & 0 & 0 & (C_{11} - C_{12})/2 \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) $$
#
# and w.r.t. to the ***compliance matrix***:
#
# $$ \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) =
# \begin{bmatrix} S_{11} & S_{12} & S_{13} & 0 & 0 & 0 \\
# S_{12} & S_{11} & S_{13} & 0 & 0 & 0 \\
# S_{13} & S_{13} & S_{33} & 0 & 0 & 0 \\
# 0 & 0 & 0 & S_{44} & 0 & 0 \\
# 0 & 0 & 0 & 0 & S_{44} & 0 \\
# 0 & 0 & 0 & 0 & 0 & 2(S_{11} - S_{12}) \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right)$$
# ### 2.1.3 2-D UD laminae strain-stress relationships
# Since the test coupons for this investigation are all UD laminate samples, and tensile/compressive loadings during experiment runs were applied in the 1-2 plane of the samples, the remaining theory discussion will focus on 2-D laminae stress-strain relationships.
#
# The plane-stress state of the UD composite, in the 1-2 plan, is defined by setting:
#
# $$ \sigma_{3} = 0, \tau_{23} = 0, \tau_{31} = 0$$
#
# such that:
#
# $$ \sigma_{1} ≠ 0, \sigma_{2} ≠ 0, \tau_{21} ≠ 0$$
# <br>
#
# where $ \sigma_{1}$ and $ \sigma_{2}$ represent the stress components normal to the 1-2 plane of a UD laminate material, w.r.t to the 1 and 2 directions respectively, and $ \tau_{21}$ represents the intralaminar shear stress w.r.t. the 1-2 plane of a UD laminae material.
#
# **2-D Orthotropic $\mathbf{\sigma} - \mathbf{\varepsilon}$ relationship**
# As such, the 2-D stress-strain relationship, w.r.t the ***stiffness matrix***, for UD laminae exhibiting orthotropic behaviour is simplified to:
#
# $$ \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \tau_{12} \\ \end{array} \right) =
# \begin{bmatrix} C_{11} & C_{12} & 0 \\
# C_{12} & C_{22} & 0 \\
# 0 & 0 & (C_{11} - C_{12})/2 \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \gamma_{12} \\ \end{array} \right) $$
#
# and w.r.t. to the ***compliance matrix***:
#
# $$ \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \gamma_{12} \\ \end{array} \right) =
# \begin{bmatrix} S_{11} & S_{12} & 0 \\
# S_{12} & S_{22} & 0 \\
# 0 & 0 & 2(S_{11} - S_{12}) \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \tau_{12} \\ \end{array} \right)$$
#
# These equations effectively describe the deformation response (described by tensor and engineered shear strain components) of the UD laminae in the 1-2 plane, w.r.t. a shear stress field (described by stress components normal to the 1-2 plane and in-plane shear stress components).
#
# The tensile and compressive tests, performed for the purposes of this investigation, will be conducted with preset force loads. The resulting displacement (strain) measured will determine the engineering constants required to solve the stress-strain equations discussed above.
# ### 2.1.4 Overview of engineering constants for solving strain-stress equations
# A brief overview of the engineering constants required to solve the 2-D UD laminae stress-strain equations is presented below:
#
# 1. The slope of (tensile/compressive) stress-strain curve:
# * $ E = \frac{\sigma}{\varepsilon} $
# * $ E_{i}$ represents the Young's (extension) moduli in the $ i^{th}$ directions, describing the elastic extension of the material in a specific direction
#
# 2. The slope of strain-strain curves (Poisson's ratio):
# * the negative of the ratio of (signed) transverse strain to (signed) axial strain (i.e. extension-extension coupling coefficient)
# * $ \nu_{ij} = \frac{-\varepsilon_{i}}{\varepsilon_{j}} $
#
# 3. The shear modulus, $ G_{ij}$, defining the ratio of shear stress to the shear strain in the (i-j) plane, or rather the material's response to shear stress
#
# **The orthotropic UD laminae case**
# <br>
#
# From these definitions, the **compliance matrix** can be expressed in terms of these engineering constants as follows:
#
# $$S =
# \begin{bmatrix} \frac{1}{E_{11}} & - \frac{\nu_{21}}{E_{22}} & 0 \\
# - \frac{\nu_{12}}{E_{11}} & \frac{1}{E_{22}} & 0 \\
# 0 & 0 & \frac{1}{G_{12}} \\
# \end{bmatrix} $$
#
# Since the stiffness and compliance matrices are mutually inverse, it follows by matrix algebra that their components are related as follows for orthotropic materials (*limited to the 2-D case*):
#
# $$ C_{11} = \frac{S_{22}}{S_{11} S_{22} - S^{2}_{12}} = \frac{E_{1}}{1 - \nu_{12} \nu_{21}} $$
#
# $$ C_{22} = \frac{S_{11}}{S_{11} S_{22} - S^{2}_{12}} = \frac{E_{2}}{1 - \nu_{12} \nu_{21}} $$
#
# $$ C_{12} = \frac{S_{12}}{S_{11} S_{22} - S^{2}_{12}} = \frac{\nu_{12}E_{1}}{1 - \nu_{12} \nu_{21}} =
# \frac{\nu_{21}E_{1}}{1 - \nu_{12} \nu_{21}} $$
#
# $$ \frac{C_{11} - C_{12}}{2} = G_{12} $$
#
# **The isotropic UD laminae case**
# <br>
#
# Note that for orthotropic UD laminae, there are four independent variables, namely $ E_{1}, E_{2}, \nu_{12} $ and $ G_{12} $. For the isotropic case, we note that:
# * $ S_{11} = \frac{1}{E_{1}} = \frac{1}{E_{2}} = \frac{1}{E} = S_{22} $, such that $ E_{1} = E_{2} = E $
# * $ S_{12} = - \frac{\nu_{12}}{E_{1}} = - \frac{\nu_{21}}{E_{2}} = - \frac{\nu}{E} $
# * $ \frac{1}{G_{12}} = \frac{1}{G} = \frac{2(1 + \nu)}{E} $
# ## 2.2 UD laminae in-plane strain-stress relationships
# For in-plane stresses of composite plies we assume that stresses and strains to not vary in certain directions, depending on how the ply is force-loaded. When the aforementioned plane-strain condition exists, the three-dimensional analysis simplifies considerably.
#
# The UD laminate test coupons, from the experimental trials, were subjected to in-plane stresses (via 1-2 plane tensile and compressive force loading. The fibre arrangements, of the experiment laminate coupons, were classified as orthotropic or (symmetric) transversely isotropic bodies.
#
# From the general, anisotropic stress-strain systems of equationss, defined in Eq. [6] and [7] with the stiffness and compliance matrices respectively:
#
# *Stiffness matrix* ($\mathbf{C_{ij}}$):
# <br>
#
# $$ \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right) =
# \begin{bmatrix} C_{11} & C_{12} & C_{13} & C_{14} & C_{15} & C_{16} \\
# C_{21} & C_{22} & C_{23} & C_{24} & C_{25} & C_{26} \\
# C_{31} & C_{32} & C_{33} & C_{34} & C_{35} & C_{36} \\
# C_{41} & C_{42} & C_{43} & C_{44} & C_{45} & C_{46} \\
# C_{51} & C_{52} & C_{53} & C_{54} & C_{55} & C_{56} \\
# C_{61} & C_{62} & C_{63} & C_{64} & C_{65} & C_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) $$
#
# *Compliance matrix* ($\mathbf{S_{ij}}$):
# <br>
#
# $$ \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) =
# \begin{bmatrix} S_{11} & S_{12} & S_{13} & S_{14} & S_{15} & S_{16} \\
# S_{21} & S_{22} & S_{23} & S_{24} & S_{25} & S_{26} \\
# S_{31} & S_{32} & S_{33} & S_{34} & S_{35} & S_{36} \\
# S_{41} & S_{42} & S_{43} & S_{44} & S_{45} & S_{46} \\
# S_{51} & S_{52} & S_{53} & S_{54} & S_{55} & S_{56} \\
# S_{61} & S_{62} & S_{63} & S_{64} & S_{65} & S_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right)$$
#
# These equations reduce to the following for in-plane (*plane-stress*) loading of orthotropic composite plies:
#
# $$ \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \tau_{12} \\ \end{array} \right) =
# \begin{bmatrix} Q_{11} & Q_{12} & Q_{16} \\
# Q_{12} & Q_{22} & Q_{26} \\
# Q_{16} & Q_{26} & Q_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \gamma_{12} \\ \end{array} \right) $$
#
# Where:
#
# $Q_{ij}$ represent the **in-plane** elements of the stiffness matrix subject to the **plane-stress condition** (*differentiated from the general stiffness matrix elements $C_{ij}$*)
#
# The plane-stress conditioned compliance matrix is the inverse of the plane-stress conditioned stiffness matrix:
#
# $$ \begin{bmatrix} S'_{11} & S'_{12} & S'_{16} \\
# S'_{12} & S'_{22} & S'_{26} \\
# S'_{16} & S'_{26} & S'_{66} \\
# \end{bmatrix}^{-1} =
# \begin{bmatrix} Q_{11} & Q_{12} & Q_{16} \\
# Q_{12} & Q_{22} & Q_{26} \\
# Q_{16} & Q_{26} & Q_{66} \\
# \end{bmatrix} $$
#
# Where:
#
# $S'_{ij}$ represent the **in-plane** elements of the compliance matrix subject to the **plane-stress condition** (*differentiated from the general compliance matrix elements $S_{ij}$*)
# ## 2.3 Stress and strain transformations
# Axes transformations are important in stress-strain of materials. Such transformations are required to compute critical values of these (stress-strain) characteristics, as well as to be able to understand the tensorial nature of stress and strain. Other entities, such as moment of inertia and curvature, also transform in a manner similar to stress and strain.
#
# For the purposes of this DLN the relevant theory related to transformations of stress and strain from a local coordinate system to a global coordinate system will briefly be discussed. Further resources on (stress-strain) tensor transformation theory can be found in [Roylance, 2001].
#
# ### 2.3.1 Stress and strain transformations for laminate plies
#
# Stress can be transformed from a local cartesian coordinate system **L(p,q,r)** to a global cartesian coordinate system **G(p,q,r)** via:
#
# $$ \left( \begin{array}{c} \sigma_{G,p} \\ \sigma_{G,q} \\ \sigma_{G,r} \\ \tau_{G,qr} \\ \tau_{G,pr} \\ \tau_{G,pq} \\ \end{array} \right) =
# \begin{bmatrix} T_{\sigma11} & \cdots & T_{\sigma16} \\
# \vdots & \ddots & \vdots \\
# T_{\sigma61} & \cdots & T_{\sigma66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \sigma_{L,p} \\ \sigma_{L,q} \\ \sigma_{L,r} \\ \tau_{L,qr} \\ \tau_{L,pr} \\ \tau_{L,pq} \\ \end{array} \right)$$
#
# Which can be written in the form:
#
# $$ \mathbf{\sigma_{G}} = [\mathbf{\hat{T}_{\sigma}}] \mathbf{\sigma_{L}} $$
#
# For composite laminate plies subjected to plane-strain and plane-stress conditions, one is only interested stresses manifested in the G(p-q) and L(p-q) planes; for UD laminate plies this would mean the planes that characterize stresses occurring only in the fibre direction and transverse to the fibre direction. Then the stresses in the **G(p,q,r)** coordinate system are arrived at by rotation about the 'r' axis of the **L(p,q,r)** coordinate system, namely:
#
# $$ \left( \begin{array}{c} \sigma_{G,p} \\ \sigma_{G,q} \\ \tau_{G,pq} \\ \end{array} \right) =
# \begin{bmatrix} c^{2} & s^{2} & 2cs \\
# s^{2} & c^{2} & -2cs \\
# -cs & cs & c^{2}-s^{2} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \sigma_{L,p} \\ \sigma_{L,q} \\ \tau_{L,pq} \\ \end{array} \right)$$
#
# Where:
# <br>
#
# $ c = cos\Theta \qquad\quad s = sin\Theta $
#
# And can be written in the form:
#
# $$ \mathbf{\sigma_{G}} = [\mathbf{T_{\sigma}}] \mathbf{\sigma_{L}} $$
#
# Meaning that only the three in-plane strain components are transformed.
#
# A similar treatment of the strain tensor, relating strains (on a composite laminate ply material) in the local coordinate system to the global coordinate system, yields:
#
# $$ \left( \begin{array}{c} \varepsilon_{G,p} \\ \varepsilon_{G,q} \\ \gamma_{G,pq} \\ \end{array} \right) =
# \begin{bmatrix} c^{2} & s^{2} & cs \\
# s^{2} & c^{2} & -cs \\
# -2cs & 2cs & c^{2}-s^{2} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon_{L,p} \\ \varepsilon_{L,q} \\ \gamma_{L,pq} \\ \end{array} \right)$$
#
# Where c and s are as previously defined, and can be written alternatively as:
#
# $$ \mathbf{\varepsilon_{G}} = [\mathbf{T_{\varepsilon}}] \mathbf{\varepsilon_{L}} $$
#
# The stiffness and compliance matrices, $[\mathbf{C}]$ and $[\mathbf{S}]$ respectively, can be transformed accordingly (*see [Roylance, 2001] for matrix inversion steps*) to yield:
#
# $$ [\mathbf{C'}] = [\mathbf{\hat{T}_{\sigma}}][\mathbf{C}][\mathbf{\hat{T}_{\varepsilon}}]^{-1} $$
#
# $$ [\mathbf{S'}] = [\mathbf{\hat{T}_{\varepsilon}}][\mathbf{S}][\mathbf{\hat{T}_{\sigma}}]^{-1} $$
#
# Thus the transformed stiffness matrix can be computed for composites with fibres of varying orientations, from a reference local coordinate system and using laminate ply material stiffness constants to a global coordinate system, with the stress and strain transforms being:
#
# $$ [\mathbf{T_{\sigma}}] =
# \begin{bmatrix} c^{2} & s^{2} & 2cs \\
# s^{2} & c^{2} & -2cs \\
# -cs & cs & c^{2}-s^{2} \\
# \end{bmatrix} $$
#
# $$ [\mathbf{T_{\varepsilon}}] =
# \begin{bmatrix} c^{2} & s^{2} & cs \\
# s^{2} & c^{2} & -cs \\
# -2cs & 2cs & c^{2}-s^{2} \\
# \end{bmatrix} $$
#
# ### 2.3.2 In-plane transformed stiffness and compliance matrices
#
# It follows that, for composite plies subjected to plane-stress conditions, the transformed in-plane stress and strain systems of equations can be derived by substituting the plane-stress conditioned stiffness and compliance matrices (Eqn. [23], [24]) into Eqns. [31] and [32]:
#
# $$ [\mathbf{\bar{Q}}] = [\mathbf{\hat{T}_{\sigma}}][\mathbf{Q}][\mathbf{\hat{T}_{\varepsilon}}]^{-1} $$
#
# $$ [\mathbf{\bar{S}}] = [\mathbf{\hat{T}_{\varepsilon}}][\mathbf{S}][\mathbf{\hat{T}_{\sigma}}]^{-1} $$
#
#
# ### 2.3.3 References
#
# 1. Roylance, D. (2001). Transformation of stresses and strains. Lecture Notes for Mechanics of Materials.
# ## 2.4 Laminate structural durability calculations
# ### 2.4.1 Stiffness matrices for in-plane stress conditioned laminates
#
# Eqn.s [35] and [36] represent the plane-stressed conditioned stiffness and compliance matrices for individual plies. To approximate the stress-strain relationships of entire laminates (multi-layer manufactured plies), we define the [A], [B] and [D] stiffness matrices:
#
# $$ [\mathbf{A}] = \int_{-h_{b}}^{h_{t}} [\bar{\mathbf{Q}}]dz $$, and each $[A_{ij}]$ element defined by:
#
# $$ A_{ij} = \int_{-h_{b}}^{h_{t}} \bar{Q_{ij}}dz $$
#
# Where:
#
# * $h_{b}$ represents the distance of the laminate plies from the reference plane to the bottom surface of the entire laminate structure
# * $h_{t}$ represents the distance of the laminate plies from the reference plane to the top surface of the entire laminate structure
# * Recall that $[\bar{Q}]$ represents the in-plane stress conditioned, transformed stiffness matrix of each ply
#
# The variable '$\mathbf{z}$' in Eqn. [37] defines the distance of the '$\mathbf{z^{th}}$' ply from the reference plane
#
# $$ [\mathbf{B}] = \int_{-h_{b}}^{h_{t}} z[\bar{\mathbf{Q}}]dz $$, and each $[B_{ij}]$ element defined by:
#
# $$B_{ij}] = \int_{-h_{b}}^{h_{t}} z\bar{Q_{ij}}dz $$
#
# and
#
# $$ [\mathbf{D}] = \int_{-h_{b}}^{h_{t}} z^{2}[\bar{\mathbf{Q}}]dz $$, and each $[D_{ij}]$ element defined by:
#
# $$ D_{ij} = \int_{-h_{b}}^{h_{t}} z^{2}\bar{Q_{ij}}dz $$
#
# From the assumption that the composite plies and laminates, tested for the quasi-static fatigue loading investigations, exhibit linear elastic behaviour, it is assumed that $[\bar{Q}]$ is constant across each ply. Thus, the laminate stiffness and compliance integrals above can be replaced by the summations:
#
# $$ A_{ij} = \sum_{k=1}^{K} (\bar{Q_{ij}})_{k}(z_{k}-z_{k-1}) $$
#
# $$ B_{ij} = \frac{1}{2}\sum_{k=1}^{K} (\bar{Q_{ij}})_{k}(z^{2}_{k}-z^{2}_{k-1}) $$
#
# $$ D_{ij} = \frac{1}{3}\sum_{k=1}^{K} (\bar{Q_{ij}})_{k}(z^{3}_{k}-z^{3}_{k-1}) $$
# ## 2.5 Mechanics of in-plane stress-conditioned composite laminates
# ### 2.5.1 In-plane forces and moments
#
# From the [A], [B] and [D] in-plane stiffness matrix elements described in Eqns. [43] through [45], the in-plane forces and moments of the laminate can be related to the in-plane strain and curvature response of the laminate. For a laminate subject to the in-plane stress condition in the 1-2 plane, this relationship is:
#
# $$ \left( \begin{array}{c} N_{1} \\ N_{2} \\ N_{1-2} \\ M_{1} \\ M_{2} \\ M_{1-2} \\ \end{array} \right) =
# \begin{bmatrix} A_{11} & A_{12} & A_{16} & B_{11} & B_{12} & B_{16} \\
# A_{21} & A_{22} & A_{26} & B_{21} & B_{22} & B_{26} \\
# A_{61} & A_{62} & A_{66} & B_{61} & B_{62} & B_{66} \\
# B_{11} & B_{12} & B_{16} & D_{11} & D_{12} & D_{16} \\
# B_{21} & B_{22} & B_{26} & D_{21} & D_{22} & D_{26} \\
# B_{61} & B_{62} & B_{66} & D_{61} & D_{62} & D_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon^o_{1} \\ \varepsilon^o_{2} \\ \gamma^o_{1-2} \\ \kappa_{1} \\ \kappa_{2} \\ \kappa_{12} \\ \end{array} \right) $$
#
# Inversion of Eqn. [46] defines the strain and curvature of the laminate in terms of the in-plane force loading and moments of the laminate. For a laminate force-loaded in the 1-2 plane:
#
# $$ \left(\begin{array}{c} \varepsilon^o_{1} \\ \varepsilon^o_{2} \\ \gamma^o_{1-2} \\ \kappa_{1} \\ \kappa_{2} \\ \kappa_{12} \\ \end{array} \right) =
# \begin{bmatrix} \alpha_{11} & \alpha_{12} & \alpha_{16} & \beta_{11} & \beta_{12} & \beta_{16} \\
# \alpha_{21} & \alpha_{22} & \alpha_{26} & \beta_{21} & \beta_{22} & \beta_{26} \\
# \alpha_{61} & \alpha_{62} & \alpha_{66} & \beta_{61} & \beta_{62} & \beta_{66} \\
# \beta_{11} & \beta_{12} & \beta_{16} & \delta_{11} & \delta_{12} & \delta_{16} \\
# \beta_{21} & \beta_{22} & \beta_{26} & \delta_{21} & \delta_{22} & \delta_{26} \\
# \beta_{61} & \beta_{62} & \beta_{66} & \delta_{61} & \delta_{62} & \delta_{66} \\
# \end{bmatrix}
# \cdot
# \left(\begin{array}{c} N_{1} \\ N_{2} \\ N_{1-2} \\ M_{1} \\ M_{2} \\ M_{1-2} \\ \end{array} \right)
# $$
#
# ### 2.5.2 Importance of the [A], [B] and [D] matrices to laminate structural durability analyses
#
# The [A], [B] and [D] matrices characterize the stiffness of the laminates, the degree to which the composite laminate will elastically deform, when subjected to certain force-loading conditions.
#
# For the purposes of the quasi-static fatigue (tensile and compressive) in-plane force-loading of carbon fibre/epoxy laminate composite coupons (the experiments designed to investigate the *linear* elastic structural durability of these composite materials), the significance of these matrices are as follows:
#
# 1. The $A_{ij}$ stiffness matrix elements relate the in-plane forces, imposed on the laminate coupons, to the in-plane (*elastic*) deformations manifested in the laminates (*under tensile or compressive force-loading*)
#
# 2. The $B_{ij}$ stiffness matrix elements are the in-plane–out-of-plane coupling stiffnesses that relate the:
# * in-plane forces, imposed on the laminate coupons, to the resultant curvatures of the laminate
# * moments, imposed on the laminate, to the resultant in-plane deformation of the laminate
#
# 3. The $D_{ij}$ stiffness matrix elements are the bending stiffnesses that relate the moments, imposed on the laminate, to the resultant curvatures of the laminate
#
# Examination of the [A], [B], and [D] matrices show that different types of couplings may occur. For the experimental (tensile and compressive force-loading) of the composite laminate coupons in the 1-2 plane, the following important force-moment-curvature-deformation couplings are worth noting:
#
# 1. **Extension–shear coupling**
# * When the elements $A_{16}$, $A_{26}$ (of the $A_{ij}$ elements) are not zero, in-plane normal forces ($N_{1}, N_{2}$) cause shear deformation ($\gamma^o_{1-2}$), and a twist force ($N_{1-2}$) causes elongations in the 1 and 2 directions
#
# 2. **Bending–twist coupling**
# * When the elements $D_{16}$, $D_{26}$ are not zero, bending moments ($M_{1}, M_{2}$) may cause a twisting of the laminate ($\kappa_{1-2}$), and a twist moment ($M_{1-2}$) causes curvatures in the 1–3 and 2–3 planes
#
# 3. **Extension–twist and bending–shear coupling**
# * When the elements $B_{16}$, $B_{26}$ are not zero, in-plane normal forces ($N_{1}, N_{2}$) cause twist ($\kappa_{1-2}$), and bending moments ($M_{1}, M_{2}$) result in shear deformation ($\gamma^o_{1-2}$)
#
# 4. **In-plane–out-of-plane coupling**
# * When the $B_{ij}$ stiffness matrix elements are not zero, in-plane forces ($N_{1}, N_{2}, N_{1-2}$) cause out-of-plane deformations (curvatures) of the laminate, and moments ($M_{1}, M_{2}, M_{1-2}$) cause in-plane deformations in the 1-2 plane.
#
# It is worth noting that these four types of coupling are characteristic of composite materials and do not occur in homogeneous isotropic materials. The following two couplings occur in both composite and isotropic materials:
#
# 5. **Extension–extension coupling**
# * When the element $A_{12}$ is not zero, a normal force $N_{1}$ causes elongation in the 2 direction ($\varepsilon^o_{2}$), and a normal force $N_{2}$ causes elongation in the 1 direction ($\varepsilon^o_{1}$)
#
# 6. **Bending–bending coupling**
# * When the element $D_{12}$ is not zero, a bending moment $M_{1}$ causes curvature of the laminate in the 2-3 plane ($\kappa_{2}$), and a bending moment $M_{2}$ causes curvature of the laminate in the 1–3 plane ($\kappa_{1}$)
# ## 2.6 Applications of [A], [B], [D] ( [$\alpha$], [$\beta$], [$\delta$] ) matrices to the (*elastic*) structural durability characterization of experiment carbon fibre/epoxy composite coupons
# * [§3: Structural durability analyses of carbon fibre & epoxy-based composites - Experimental results](DLN - §3 - Structural durability analyses of carbon fibre & epoxy-based composites - Experimental.ipynb) is the DLN entry that uses Python scientific programming libraries to explore and visualize quasi-fatigue tensile & compressive loading experiments on carbon fibre/epoxy composite test coupons. From analyses of the experiments, the elastic properties of the test coupons are determined.
# <br>
#
# * [§4: Structural durability analyses of carbon fibre & epoxy-based composites - Matrix calculations](DLN - §2 - Structural durability analyses of carbon fibre & epoxy-based composites - Calculations.ipynb) is the DLN entry that uses MATLAB to perform structural durability matrix calculations from carbon fibre/epoxy composite test coupon experimental data. The $[A], [B], [D]$ $([\alpha], [\beta], [\delta])$ matrices are calculated for each of the test laminate coupons.
#
###########################################################################################################################################################################
#==========================================================================================================================================================================
###########################################################################################################################################################################
# coding: utf-8
# # Structural durability analyses for carbon/epoxy laminates
#
# ## §2 Composite laminate theory (*based on LEFM*)
#
# Principles of linear elastic composite laminate theory, pertinent to calculating the elastic engineering constants required to characterize the elastic structural durability of various tested carbon fibre/epoxy laminate coupons, are discussed here.
# # DLN Contents
#
# # DLN Contents
#
# 0. [Materials Characterization Laboratory DLN | A Showcase for Convergent Manufacturing Group Ltd](DLN_0_About_Me.ipynb) - An 'Welcome' message to the Convergent Manufacturing - Materials Characterization Group, explaining the concept of these DLN entries, why I made them out of interest for the team's *Characterization Lab Technician/Scientist* opening, and presenting a brief 'About Me' StoryMap
# <br>
#
# 1. [§1: Structural durability analyses of carbon fibre & epoxy-based composites - Introduction](DLN_1_Introduction.ipynb) - An introduction to the quasi-fatigue experiments performed on carbon fibre/epoxy composite specimens.
# <br>
#
# 2. [§2: Structural durability analyses of carbon fibre & epoxy-based composites - Laminate mechanics theory](DLN_2_Theory.ipynb) - A discussion of composite laminate theory, as a basis for performing stress-strain-deformation calculations to characterize the structural durability of composite laminate layups.
# <br>
#
# 3. [§3: Structural durability analyses of carbon fibre & epoxy-based composites - Experimental results](DLN_3_Experimental.ipynb) - Using Python scientific programming libraries to explore and visualize quasi-fatigue tensile & compressive loading experiments on carbon fibre/epoxy composite test coupons.
# <br>
#
# 4. [§4: Structural durability analyses of carbon fibre & epoxy-based composites - Matrix calculations](DLN_4_Calculations.ipynb) - Using MATLAB to perform structural durability matrix calculations from carbon fibre/epoxy composite test coupon experimental data.
# ## 2.1 Structural mechanics theory of UD composite laminates subject to plane stress
# All of the coupons prepared and tested for this investigation were UD composite laminates; the coupons were subjected to either tensile or compressive plane stress in the 1-2 plane of the composite geometry, as shown below. This is to say that the coupons were only subjected to *in-plane* stress loads (i.e. tensile and compressive loading in the fibre direction):
#
# ")
#
# *<center> Fig. X - Unidirectional fibre-reinforced lamina (Jones, 1999)</center>*
#
# ### 2.1.1 A brief overview of linear elastic strain-stress theory (*relevant to the 2-D in-plane stress fields*)
# A comprehensive overview of linear elastic approximations of stress-strain relationships for anisotropic materials can be found in many resources that focus on solid mechanics [(Tuttle, 2004), (Pilkey, 1999)]. A simplification of (linear elastic) stress-strain theory[<sup>1</sup>](#fn1), applied to UD-laminate composites subjected to in-plane stress, is briefly described here. These equations provide the basis for the structural durability calculations of stress-strain effects on the carbon fibre/epoxy coupons.
#
# **Generalized Hooke's Law**
# The generalized Hooke's law relating a stress field (induced by an applied force) to strain (deformation) response, of a particular material, can be written in the following simple notation notation :
#
# $$ \mathbf{\sigma_{i}} = \mathbf{C_{ij}} \cdot \mathbf{\varepsilon_{j}}, \qquad\quad i,j = 1, ..., 6 $$
#
# Where:
# * $\mathbf{\sigma_{i}}$ are the stress components
# * $\mathbf{C_{ij}}$ is the stiffness matrix
# * $\mathbf{\varepsilon_{j}}$ are the strain components
#
# The normal and shear stress fields, induced by an applied force, are pictured in Cartesian coordinates below:
#
# ")
#
# *<center> Fig. X - Stresses on a material element (Jones, 1999)</center>*
#
# **The stain (deformation) response**
# Deformation of the material, responding to the induced stress field, is characterized by *tensor shear strain* ($\mathbf{\varepsilon_{ij}}$) and *engineering shear strain* ($\mathbf{\gamma_{ij}}$). Considering a material element being deformed, the tensor and engineering shear strains are defined, respectively, as:
#
# $$ \mathbf{\varepsilon_{1}} = \frac{\partial u}{\partial x}, \qquad \mathbf{\varepsilon_{2}} = \frac{\partial v}{\partial y}, \qquad \mathbf{\varepsilon_{3}} = \frac{\partial w}{\partial z}$$
#
# $$ \mathbf{\gamma_{23}} = \frac{\partial v}{\partial z} + \frac{\partial w}{\partial y}, \qquad \mathbf{\gamma_{31}} = \frac{\partial w}{\partial x} + \frac{\partial u}{\partial z}, \qquad \mathbf{\gamma_{12}} = \frac{\partial u}{\partial y} + \frac{\partial v}{\partial x}$$
#
# **Stress-strain relationships - Stiffness and compliance elasticity constants**
# The integral of the incremental work done (per unit volume) on a material (subjected to an applied force that induces a stress field and subsequent strain (deformation) response to the load, yields a relation between work done on the material and the resultant tensor shear strain:
#
# $$ \mathbf{W} = \frac{1}{2} \mathbf{C_{ij}} \cdot \mathbf{\varepsilon_{i}} \mathbf{\varepsilon_{j}}, \qquad\quad i,j = 1, ..., 6 $$
#
# This result relates The second order differentiation of Hooke's Law shows that the stiffness matrix ($\mathbf{C_{ij}}$) is symmetric (*i.e* $\mathbf{C_{ij}} = \mathbf{C_{ji}}$). Similarly, by examining the inverse of the stress-strain relations, the work done on the material can be related to the induced stress field:
#
# $$ \mathbf{W} = \frac{1}{2} \mathbf{S_{ij}} \cdot \mathbf{\sigma_{i}} \mathbf{\sigma_{j}}, \qquad\quad i,j = 1, ..., 6 $$
#
# Where:
# * $\mathbf{S_{ij}}$ is the compliance matrix
#
# ***Here it's important to note that hygrothermal effects on the deformation of the material are not being considered.*** *This is valid if experimental testing is done with test environment controls and neglecting material temperature changes during tensile/compressive loading.*
#
# The stiffness and compliance matrices have 36 constants, owing to the six degrees of freedom for considering the (linear elastic) deformation response of a material element. The generalized matrices are as follows:
#
# *Stiffness matrix* ($\mathbf{C_{ij}}$):
# <br>
#
# $$ \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right) =
# \begin{bmatrix} C_{11} & C_{12} & C_{13} & C_{14} & C_{15} & C_{16} \\
# C_{21} & C_{22} & C_{23} & C_{24} & C_{25} & C_{26} \\
# C_{31} & C_{32} & C_{33} & C_{34} & C_{35} & C_{36} \\
# C_{41} & C_{42} & C_{43} & C_{44} & C_{45} & C_{46} \\
# C_{51} & C_{52} & C_{53} & C_{54} & C_{55} & C_{56} \\
# C_{61} & C_{62} & C_{63} & C_{64} & C_{65} & C_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) $$
#
# *Compliance matrix* ($\mathbf{S_{ij}}$):
# <br>
#
# $$ \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) =
# \begin{bmatrix} S_{11} & S_{12} & S_{13} & S_{14} & S_{15} & S_{16} \\
# S_{21} & S_{22} & S_{23} & S_{24} & S_{25} & S_{26} \\
# S_{31} & S_{32} & S_{33} & S_{34} & S_{35} & S_{36} \\
# S_{41} & S_{42} & S_{43} & S_{44} & S_{45} & S_{46} \\
# S_{51} & S_{52} & S_{53} & S_{54} & S_{55} & S_{56} \\
# S_{61} & S_{62} & S_{63} & S_{64} & S_{65} & S_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right)$$
#
# **Significance of coupled stress-strain elements (*described with the compliance matrix*)**
# For anisotropic materials, significant coupling occurs between the applied stress and the various strain responses. Examing the compliance matrix, the following stress-strain responses are coupled:
#
# * The $ S_{11}, S_{22}$ and $S_{33} $ terms each represent extensional response to their respective applied stress components ($ \sigma_{1}, \sigma_{2}, \sigma_{3} $) in the same direction.
#
# * The $ S_{44}, S_{55}$ and $S_{66} $ terms represent shear strain response to an applied shear stress in the same plane
#
# * The $ S_{12}, S_{13}$ and $S_{23} $ terms represent coupling between dissimilar normal stresses and normal strains (extension-extension coupling more commonly known as the Poisson effect)
#
# * The $ S_{14}, S_{15}, S_{16}, S_{24}, S_{25}, S_{26}, S_{34}, S_{35}$ and $S_{36} $ terms represent normal strain response to applied shear stress in a more complex manner than for the preceding compliances (shear-extension coupling)
#
# * Finally, the $ S_{45}, S_{46}$ and $S_{56} $ terms represent shear strain response to shear stress applied in another plane (shear-shear coupling)
#
# * However, less than 36 of the constants can be shown to actually be independent for elastic materials when important characteristics of the strain energy are considered, such as whether the material behaves anisotropically, orthotropically, monoclinically or traversely isotropically when undergoing deformation.
#
# [comment]: <> (------------------------------§2.3.2 Footnotes------------------------------)
# __________________________________
#
# <span id="fn1"> 1. Linear elasticity theory makes a number of assumptions about the elastic/plastic deformation response to stress fields induced by applied force loads, namely that strain responses are infinitesimally small, and that relationships between the components of stress and strain are approximately. Additionally, the theory is valid only for stress states that do not produce yielding.</span>
# ### 2.1.2 UD laminae strain-stress relationships
# The fibre arrangements, of the laminate coupons prepared for these experimental investigations, were classified as orthotropic or transversely isotropic bodies. In UD laminae, all planes whose perpendicular vector is transverse with respect to the fibre direction are planes of symmetry. The UD laminae are transversely isotropic. On planes parallel with the fibre direction it behaves orthotropically (*material properties that differ along three mutually-orthogonal twofold axes of rotational symmetry - a subset of anisotropic materials*)and on an imaginary plane perpendicular to the fibre direction it behaves isotropically (*material properties remain constant in all directions*).
#
# **3-D Orthotropic $\mathbf{\sigma} - \mathbf{\varepsilon}$ relationship**
# <br>
#
# With the application of tensile or compressive force loading, parallel to the fibre direction of the UD laminae (in-plane loading), shear-extension and shear-shear coupling can be neglected. As such, the stress-strain relationship, w.r.t the ***stiffness matrix***, for UD laminae exhibiting orthotropic behaviour is simplified to:
#
# $$ \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right) =
# \begin{bmatrix} C_{11} & C_{12} & C_{13} & 0 & 0 & 0 \\
# C_{21} & C_{22} & C_{23} & 0 & 0 & 0 \\
# C_{31} & C_{32} & C_{33} & 0 & 0 & 0 \\
# 0 & 0 & 0 & C_{44} & 0 & 0 \\
# 0 & 0 & 0 & 0 & C_{55} & 0 \\
# 0 & 0 & 0 & 0 & 0 & C_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) $$
#
# and w.r.t. to the ***compliance matrix***:
#
# $$ \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) =
# \begin{bmatrix} S_{11} & S_{12} & S_{13} & 0 & 0 & 0 \\
# S_{21} & S_{22} & S_{23} & 0 & 0 & 0 \\
# S_{31} & S_{32} & S_{33} & 0 & 0 & 0 \\
# 0 & 0 & 0 & S_{44} & 0 & 0 \\
# 0 & 0 & 0 & 0 & S_{55} & 0 \\
# 0 & 0 & 0 & 0 & 0 & S_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right)$$
#
# **3-D Isotropic $\mathbf{\sigma} - \mathbf{\varepsilon}$ relationship**
# The stress-strain relationship, w.r.t the ***stiffness matrix***, for UD laminae exhibiting isotropic behaviour is simplified to:
#
# $$ \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right) =
# \begin{bmatrix} C_{11} & C_{12} & C_{13} & 0 & 0 & 0 \\
# C_{12} & C_{11} & C_{13} & 0 & 0 & 0 \\
# C_{13} & C_{13} & C_{33} & 0 & 0 & 0 \\
# 0 & 0 & 0 & C_{44} & 0 & 0 \\
# 0 & 0 & 0 & 0 & C_{44} & 0 \\
# 0 & 0 & 0 & 0 & 0 & (C_{11} - C_{12})/2 \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) $$
#
# and w.r.t. to the ***compliance matrix***:
#
# $$ \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) =
# \begin{bmatrix} S_{11} & S_{12} & S_{13} & 0 & 0 & 0 \\
# S_{12} & S_{11} & S_{13} & 0 & 0 & 0 \\
# S_{13} & S_{13} & S_{33} & 0 & 0 & 0 \\
# 0 & 0 & 0 & S_{44} & 0 & 0 \\
# 0 & 0 & 0 & 0 & S_{44} & 0 \\
# 0 & 0 & 0 & 0 & 0 & 2(S_{11} - S_{12}) \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right)$$
# ### 2.1.3 2-D UD laminae strain-stress relationships
# Since the test coupons for this investigation are all UD laminate samples, and tensile/compressive loadings during experiment runs were applied in the 1-2 plane of the samples, the remaining theory discussion will focus on 2-D laminae stress-strain relationships.
#
# The plane-stress state of the UD composite, in the 1-2 plan, is defined by setting:
#
# $$ \sigma_{3} = 0, \tau_{23} = 0, \tau_{31} = 0$$
#
# such that:
#
# $$ \sigma_{1} ≠ 0, \sigma_{2} ≠ 0, \tau_{21} ≠ 0$$
# <br>
#
# where $ \sigma_{1}$ and $ \sigma_{2}$ represent the stress components normal to the 1-2 plane of a UD laminate material, w.r.t to the 1 and 2 directions respectively, and $ \tau_{21}$ represents the intralaminar shear stress w.r.t. the 1-2 plane of a UD laminae material.
#
# **2-D Orthotropic $\mathbf{\sigma} - \mathbf{\varepsilon}$ relationship**
# As such, the 2-D stress-strain relationship, w.r.t the ***stiffness matrix***, for UD laminae exhibiting orthotropic behaviour is simplified to:
#
# $$ \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \tau_{12} \\ \end{array} \right) =
# \begin{bmatrix} C_{11} & C_{12} & 0 \\
# C_{12} & C_{22} & 0 \\
# 0 & 0 & (C_{11} - C_{12})/2 \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \gamma_{12} \\ \end{array} \right) $$
#
# and w.r.t. to the ***compliance matrix***:
#
# $$ \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \gamma_{12} \\ \end{array} \right) =
# \begin{bmatrix} S_{11} & S_{12} & 0 \\
# S_{12} & S_{22} & 0 \\
# 0 & 0 & 2(S_{11} - S_{12}) \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \tau_{12} \\ \end{array} \right)$$
#
# These equations effectively describe the deformation response (described by tensor and engineered shear strain components) of the UD laminae in the 1-2 plane, w.r.t. a shear stress field (described by stress components normal to the 1-2 plane and in-plane shear stress components).
#
# The tensile and compressive tests, performed for the purposes of this investigation, will be conducted with preset force loads. The resulting displacement (strain) measured will determine the engineering constants required to solve the stress-strain equations discussed above.
# ### 2.1.4 Overview of engineering constants for solving strain-stress equations
# A brief overview of the engineering constants required to solve the 2-D UD laminae stress-strain equations is presented below:
#
# 1. The slope of (tensile/compressive) stress-strain curve:
# * $ E = \frac{\sigma}{\varepsilon} $
# * $ E_{i}$ represents the Young's (extension) moduli in the $ i^{th}$ directions, describing the elastic extension of the material in a specific direction
#
# 2. The slope of strain-strain curves (Poisson's ratio):
# * the negative of the ratio of (signed) transverse strain to (signed) axial strain (i.e. extension-extension coupling coefficient)
# * $ \nu_{ij} = \frac{-\varepsilon_{i}}{\varepsilon_{j}} $
#
# 3. The shear modulus, $ G_{ij}$, defining the ratio of shear stress to the shear strain in the (i-j) plane, or rather the material's response to shear stress
#
# **The orthotropic UD laminae case**
# <br>
#
# From these definitions, the **compliance matrix** can be expressed in terms of these engineering constants as follows:
#
# $$S =
# \begin{bmatrix} \frac{1}{E_{11}} & - \frac{\nu_{21}}{E_{22}} & 0 \\
# - \frac{\nu_{12}}{E_{11}} & \frac{1}{E_{22}} & 0 \\
# 0 & 0 & \frac{1}{G_{12}} \\
# \end{bmatrix} $$
#
# Since the stiffness and compliance matrices are mutually inverse, it follows by matrix algebra that their components are related as follows for orthotropic materials (*limited to the 2-D case*):
#
# $$ C_{11} = \frac{S_{22}}{S_{11} S_{22} - S^{2}_{12}} = \frac{E_{1}}{1 - \nu_{12} \nu_{21}} $$
#
# $$ C_{22} = \frac{S_{11}}{S_{11} S_{22} - S^{2}_{12}} = \frac{E_{2}}{1 - \nu_{12} \nu_{21}} $$
#
# $$ C_{12} = \frac{S_{12}}{S_{11} S_{22} - S^{2}_{12}} = \frac{\nu_{12}E_{1}}{1 - \nu_{12} \nu_{21}} =
# \frac{\nu_{21}E_{1}}{1 - \nu_{12} \nu_{21}} $$
#
# $$ \frac{C_{11} - C_{12}}{2} = G_{12} $$
#
# **The isotropic UD laminae case**
# <br>
#
# Note that for orthotropic UD laminae, there are four independent variables, namely $ E_{1}, E_{2}, \nu_{12} $ and $ G_{12} $. For the isotropic case, we note that:
# * $ S_{11} = \frac{1}{E_{1}} = \frac{1}{E_{2}} = \frac{1}{E} = S_{22} $, such that $ E_{1} = E_{2} = E $
# * $ S_{12} = - \frac{\nu_{12}}{E_{1}} = - \frac{\nu_{21}}{E_{2}} = - \frac{\nu}{E} $
# * $ \frac{1}{G_{12}} = \frac{1}{G} = \frac{2(1 + \nu)}{E} $
# ## 2.2 UD laminae in-plane strain-stress relationships
# For in-plane stresses of composite plies we assume that stresses and strains to not vary in certain directions, depending on how the ply is force-loaded. When the aforementioned plane-strain condition exists, the three-dimensional analysis simplifies considerably.
#
# The UD laminate test coupons, from the experimental trials, were subjected to in-plane stresses (via 1-2 plane tensile and compressive force loading. The fibre arrangements, of the experiment laminate coupons, were classified as orthotropic or (symmetric) transversely isotropic bodies.
#
# From the general, anisotropic stress-strain systems of equationss, defined in Eq. [6] and [7] with the stiffness and compliance matrices respectively:
#
# *Stiffness matrix* ($\mathbf{C_{ij}}$):
# <br>
#
# $$ \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right) =
# \begin{bmatrix} C_{11} & C_{12} & C_{13} & C_{14} & C_{15} & C_{16} \\
# C_{21} & C_{22} & C_{23} & C_{24} & C_{25} & C_{26} \\
# C_{31} & C_{32} & C_{33} & C_{34} & C_{35} & C_{36} \\
# C_{41} & C_{42} & C_{43} & C_{44} & C_{45} & C_{46} \\
# C_{51} & C_{52} & C_{53} & C_{54} & C_{55} & C_{56} \\
# C_{61} & C_{62} & C_{63} & C_{64} & C_{65} & C_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) $$
#
# *Compliance matrix* ($\mathbf{S_{ij}}$):
# <br>
#
# $$ \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \varepsilon_{3} \\ \gamma_{23} \\ \gamma_{31} \\ \gamma_{12} \\ \end{array} \right) =
# \begin{bmatrix} S_{11} & S_{12} & S_{13} & S_{14} & S_{15} & S_{16} \\
# S_{21} & S_{22} & S_{23} & S_{24} & S_{25} & S_{26} \\
# S_{31} & S_{32} & S_{33} & S_{34} & S_{35} & S_{36} \\
# S_{41} & S_{42} & S_{43} & S_{44} & S_{45} & S_{46} \\
# S_{51} & S_{52} & S_{53} & S_{54} & S_{55} & S_{56} \\
# S_{61} & S_{62} & S_{63} & S_{64} & S_{65} & S_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \sigma_{3} \\ \tau_{23} \\ \tau_{31} \\ \tau_{12} \\ \end{array} \right)$$
#
# These equations reduce to the following for in-plane (*plane-stress*) loading of orthotropic composite plies:
#
# $$ \left( \begin{array}{c} \sigma_{1} \\ \sigma_{2} \\ \tau_{12} \\ \end{array} \right) =
# \begin{bmatrix} Q_{11} & Q_{12} & Q_{16} \\
# Q_{12} & Q_{22} & Q_{26} \\
# Q_{16} & Q_{26} & Q_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon_{1} \\ \varepsilon_{2} \\ \gamma_{12} \\ \end{array} \right) $$
#
# Where:
#
# $Q_{ij}$ represent the **in-plane** elements of the stiffness matrix subject to the **plane-stress condition** (*differentiated from the general stiffness matrix elements $C_{ij}$*)
#
# The plane-stress conditioned compliance matrix is the inverse of the plane-stress conditioned stiffness matrix:
#
# $$ \begin{bmatrix} S'_{11} & S'_{12} & S'_{16} \\
# S'_{12} & S'_{22} & S'_{26} \\
# S'_{16} & S'_{26} & S'_{66} \\
# \end{bmatrix}^{-1} =
# \begin{bmatrix} Q_{11} & Q_{12} & Q_{16} \\
# Q_{12} & Q_{22} & Q_{26} \\
# Q_{16} & Q_{26} & Q_{66} \\
# \end{bmatrix} $$
#
# Where:
#
# $S'_{ij}$ represent the **in-plane** elements of the compliance matrix subject to the **plane-stress condition** (*differentiated from the general compliance matrix elements $S_{ij}$*)
# ## 2.3 Stress and strain transformations
# Axes transformations are important in stress-strain of materials. Such transformations are required to compute critical values of these (stress-strain) characteristics, as well as to be able to understand the tensorial nature of stress and strain. Other entities, such as moment of inertia and curvature, also transform in a manner similar to stress and strain.
#
# For the purposes of this DLN the relevant theory related to transformations of stress and strain from a local coordinate system to a global coordinate system will briefly be discussed. Further resources on (stress-strain) tensor transformation theory can be found in [Roylance, 2001].
#
# ### 2.3.1 Stress and strain transformations for laminate plies
#
# Stress can be transformed from a local cartesian coordinate system **L(p,q,r)** to a global cartesian coordinate system **G(p,q,r)** via:
#
# $$ \left( \begin{array}{c} \sigma_{G,p} \\ \sigma_{G,q} \\ \sigma_{G,r} \\ \tau_{G,qr} \\ \tau_{G,pr} \\ \tau_{G,pq} \\ \end{array} \right) =
# \begin{bmatrix} T_{\sigma11} & \cdots & T_{\sigma16} \\
# \vdots & \ddots & \vdots \\
# T_{\sigma61} & \cdots & T_{\sigma66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \sigma_{L,p} \\ \sigma_{L,q} \\ \sigma_{L,r} \\ \tau_{L,qr} \\ \tau_{L,pr} \\ \tau_{L,pq} \\ \end{array} \right)$$
#
# Which can be written in the form:
#
# $$ \mathbf{\sigma_{G}} = [\mathbf{\hat{T}_{\sigma}}] \mathbf{\sigma_{L}} $$
#
# For composite laminate plies subjected to plane-strain and plane-stress conditions, one is only interested stresses manifested in the G(p-q) and L(p-q) planes; for UD laminate plies this would mean the planes that characterize stresses occurring only in the fibre direction and transverse to the fibre direction. Then the stresses in the **G(p,q,r)** coordinate system are arrived at by rotation about the 'r' axis of the **L(p,q,r)** coordinate system, namely:
#
# $$ \left( \begin{array}{c} \sigma_{G,p} \\ \sigma_{G,q} \\ \tau_{G,pq} \\ \end{array} \right) =
# \begin{bmatrix} c^{2} & s^{2} & 2cs \\
# s^{2} & c^{2} & -2cs \\
# -cs & cs & c^{2}-s^{2} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \sigma_{L,p} \\ \sigma_{L,q} \\ \tau_{L,pq} \\ \end{array} \right)$$
#
# Where:
# <br>
#
# $ c = cos\Theta \qquad\quad s = sin\Theta $
#
# And can be written in the form:
#
# $$ \mathbf{\sigma_{G}} = [\mathbf{T_{\sigma}}] \mathbf{\sigma_{L}} $$
#
# Meaning that only the three in-plane strain components are transformed.
#
# A similar treatment of the strain tensor, relating strains (on a composite laminate ply material) in the local coordinate system to the global coordinate system, yields:
#
# $$ \left( \begin{array}{c} \varepsilon_{G,p} \\ \varepsilon_{G,q} \\ \gamma_{G,pq} \\ \end{array} \right) =
# \begin{bmatrix} c^{2} & s^{2} & cs \\
# s^{2} & c^{2} & -cs \\
# -2cs & 2cs & c^{2}-s^{2} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon_{L,p} \\ \varepsilon_{L,q} \\ \gamma_{L,pq} \\ \end{array} \right)$$
#
# Where c and s are as previously defined, and can be written alternatively as:
#
# $$ \mathbf{\varepsilon_{G}} = [\mathbf{T_{\varepsilon}}] \mathbf{\varepsilon_{L}} $$
#
# The stiffness and compliance matrices, $[\mathbf{C}]$ and $[\mathbf{S}]$ respectively, can be transformed accordingly (*see [Roylance, 2001] for matrix inversion steps*) to yield:
#
# $$ [\mathbf{C'}] = [\mathbf{\hat{T}_{\sigma}}][\mathbf{C}][\mathbf{\hat{T}_{\varepsilon}}]^{-1} $$
#
# $$ [\mathbf{S'}] = [\mathbf{\hat{T}_{\varepsilon}}][\mathbf{S}][\mathbf{\hat{T}_{\sigma}}]^{-1} $$
#
# Thus the transformed stiffness matrix can be computed for composites with fibres of varying orientations, from a reference local coordinate system and using laminate ply material stiffness constants to a global coordinate system, with the stress and strain transforms being:
#
# $$ [\mathbf{T_{\sigma}}] =
# \begin{bmatrix} c^{2} & s^{2} & 2cs \\
# s^{2} & c^{2} & -2cs \\
# -cs & cs & c^{2}-s^{2} \\
# \end{bmatrix} $$
#
# $$ [\mathbf{T_{\varepsilon}}] =
# \begin{bmatrix} c^{2} & s^{2} & cs \\
# s^{2} & c^{2} & -cs \\
# -2cs & 2cs & c^{2}-s^{2} \\
# \end{bmatrix} $$
#
# ### 2.3.2 In-plane transformed stiffness and compliance matrices
#
# It follows that, for composite plies subjected to plane-stress conditions, the transformed in-plane stress and strain systems of equations can be derived by substituting the plane-stress conditioned stiffness and compliance matrices (Eqn. [23], [24]) into Eqns. [31] and [32]:
#
# $$ [\mathbf{\bar{Q}}] = [\mathbf{\hat{T}_{\sigma}}][\mathbf{Q}][\mathbf{\hat{T}_{\varepsilon}}]^{-1} $$
#
# $$ [\mathbf{\bar{S}}] = [\mathbf{\hat{T}_{\varepsilon}}][\mathbf{S}][\mathbf{\hat{T}_{\sigma}}]^{-1} $$
#
#
# ### 2.3.3 References
#
# 1. Roylance, D. (2001). Transformation of stresses and strains. Lecture Notes for Mechanics of Materials.
# ## 2.4 Laminate structural durability calculations
# ### 2.4.1 Stiffness matrices for in-plane stress conditioned laminates
#
# Eqn.s [35] and [36] represent the plane-stressed conditioned stiffness and compliance matrices for individual plies. To approximate the stress-strain relationships of entire laminates (multi-layer manufactured plies), we define the [A], [B] and [D] stiffness matrices:
#
# $$ [\mathbf{A}] = \int_{-h_{b}}^{h_{t}} [\bar{\mathbf{Q}}]dz $$, and each $[A_{ij}]$ element defined by:
#
# $$ A_{ij} = \int_{-h_{b}}^{h_{t}} \bar{Q_{ij}}dz $$
#
# Where:
#
# * $h_{b}$ represents the distance of the laminate plies from the reference plane to the bottom surface of the entire laminate structure
# * $h_{t}$ represents the distance of the laminate plies from the reference plane to the top surface of the entire laminate structure
# * Recall that $[\bar{Q}]$ represents the in-plane stress conditioned, transformed stiffness matrix of each ply
#
# The variable '$\mathbf{z}$' in Eqn. [37] defines the distance of the '$\mathbf{z^{th}}$' ply from the reference plane
#
# $$ [\mathbf{B}] = \int_{-h_{b}}^{h_{t}} z[\bar{\mathbf{Q}}]dz $$, and each $[B_{ij}]$ element defined by:
#
# $$B_{ij}] = \int_{-h_{b}}^{h_{t}} z\bar{Q_{ij}}dz $$
#
# and
#
# $$ [\mathbf{D}] = \int_{-h_{b}}^{h_{t}} z^{2}[\bar{\mathbf{Q}}]dz $$, and each $[D_{ij}]$ element defined by:
#
# $$ D_{ij} = \int_{-h_{b}}^{h_{t}} z^{2}\bar{Q_{ij}}dz $$
#
# From the assumption that the composite plies and laminates, tested for the quasi-static fatigue loading investigations, exhibit linear elastic behaviour, it is assumed that $[\bar{Q}]$ is constant across each ply. Thus, the laminate stiffness and compliance integrals above can be replaced by the summations:
#
# $$ A_{ij} = \sum_{k=1}^{K} (\bar{Q_{ij}})_{k}(z_{k}-z_{k-1}) $$
#
# $$ B_{ij} = \frac{1}{2}\sum_{k=1}^{K} (\bar{Q_{ij}})_{k}(z^{2}_{k}-z^{2}_{k-1}) $$
#
# $$ D_{ij} = \frac{1}{3}\sum_{k=1}^{K} (\bar{Q_{ij}})_{k}(z^{3}_{k}-z^{3}_{k-1}) $$
# ## 2.5 Mechanics of in-plane stress-conditioned composite laminates
# ### 2.5.1 In-plane forces and moments
#
# From the [A], [B] and [D] in-plane stiffness matrix elements described in Eqns. [43] through [45], the in-plane forces and moments of the laminate can be related to the in-plane strain and curvature response of the laminate. For a laminate subject to the in-plane stress condition in the 1-2 plane, this relationship is:
#
# $$ \left( \begin{array}{c} N_{1} \\ N_{2} \\ N_{1-2} \\ M_{1} \\ M_{2} \\ M_{1-2} \\ \end{array} \right) =
# \begin{bmatrix} A_{11} & A_{12} & A_{16} & B_{11} & B_{12} & B_{16} \\
# A_{21} & A_{22} & A_{26} & B_{21} & B_{22} & B_{26} \\
# A_{61} & A_{62} & A_{66} & B_{61} & B_{62} & B_{66} \\
# B_{11} & B_{12} & B_{16} & D_{11} & D_{12} & D_{16} \\
# B_{21} & B_{22} & B_{26} & D_{21} & D_{22} & D_{26} \\
# B_{61} & B_{62} & B_{66} & D_{61} & D_{62} & D_{66} \\
# \end{bmatrix}
# \cdot
# \left( \begin{array}{c} \varepsilon^o_{1} \\ \varepsilon^o_{2} \\ \gamma^o_{1-2} \\ \kappa_{1} \\ \kappa_{2} \\ \kappa_{12} \\ \end{array} \right) $$
#
# Inversion of Eqn. [46] defines the strain and curvature of the laminate in terms of the in-plane force loading and moments of the laminate. For a laminate force-loaded in the 1-2 plane:
#
# $$ \left(\begin{array}{c} \varepsilon^o_{1} \\ \varepsilon^o_{2} \\ \gamma^o_{1-2} \\ \kappa_{1} \\ \kappa_{2} \\ \kappa_{12} \\ \end{array} \right) =
# \begin{bmatrix} \alpha_{11} & \alpha_{12} & \alpha_{16} & \beta_{11} & \beta_{12} & \beta_{16} \\
# \alpha_{21} & \alpha_{22} & \alpha_{26} & \beta_{21} & \beta_{22} & \beta_{26} \\
# \alpha_{61} & \alpha_{62} & \alpha_{66} & \beta_{61} & \beta_{62} & \beta_{66} \\
# \beta_{11} & \beta_{12} & \beta_{16} & \delta_{11} & \delta_{12} & \delta_{16} \\
# \beta_{21} & \beta_{22} & \beta_{26} & \delta_{21} & \delta_{22} & \delta_{26} \\
# \beta_{61} & \beta_{62} & \beta_{66} & \delta_{61} & \delta_{62} & \delta_{66} \\
# \end{bmatrix}
# \cdot
# \left(\begin{array}{c} N_{1} \\ N_{2} \\ N_{1-2} \\ M_{1} \\ M_{2} \\ M_{1-2} \\ \end{array} \right)
# $$
#
# ### 2.5.2 Importance of the [A], [B] and [D] matrices to laminate structural durability analyses
#
# The [A], [B] and [D] matrices characterize the stiffness of the laminates, the degree to which the composite laminate will elastically deform, when subjected to certain force-loading conditions.
#
# For the purposes of the quasi-static fatigue (tensile and compressive) in-plane force-loading of carbon fibre/epoxy laminate composite coupons (the experiments designed to investigate the *linear* elastic structural durability of these composite materials), the significance of these matrices are as follows:
#
# 1. The $A_{ij}$ stiffness matrix elements relate the in-plane forces, imposed on the laminate coupons, to the in-plane (*elastic*) deformations manifested in the laminates (*under tensile or compressive force-loading*)
#
# 2. The $B_{ij}$ stiffness matrix elements are the in-plane–out-of-plane coupling stiffnesses that relate the:
# * in-plane forces, imposed on the laminate coupons, to the resultant curvatures of the laminate
# * moments, imposed on the laminate, to the resultant in-plane deformation of the laminate
#
# 3. The $D_{ij}$ stiffness matrix elements are the bending stiffnesses that relate the moments, imposed on the laminate, to the resultant curvatures of the laminate
#
# Examination of the [A], [B], and [D] matrices show that different types of couplings may occur. For the experimental (tensile and compressive force-loading) of the composite laminate coupons in the 1-2 plane, the following important force-moment-curvature-deformation couplings are worth noting:
#
# 1. **Extension–shear coupling**
# * When the elements $A_{16}$, $A_{26}$ (of the $A_{ij}$ elements) are not zero, in-plane normal forces ($N_{1}, N_{2}$) cause shear deformation ($\gamma^o_{1-2}$), and a twist force ($N_{1-2}$) causes elongations in the 1 and 2 directions
#
# 2. **Bending–twist coupling**
# * When the elements $D_{16}$, $D_{26}$ are not zero, bending moments ($M_{1}, M_{2}$) may cause a twisting of the laminate ($\kappa_{1-2}$), and a twist moment ($M_{1-2}$) causes curvatures in the 1–3 and 2–3 planes
#
# 3. **Extension–twist and bending–shear coupling**
# * When the elements $B_{16}$, $B_{26}$ are not zero, in-plane normal forces ($N_{1}, N_{2}$) cause twist ($\kappa_{1-2}$), and bending moments ($M_{1}, M_{2}$) result in shear deformation ($\gamma^o_{1-2}$)
#
# 4. **In-plane–out-of-plane coupling**
# * When the $B_{ij}$ stiffness matrix elements are not zero, in-plane forces ($N_{1}, N_{2}, N_{1-2}$) cause out-of-plane deformations (curvatures) of the laminate, and moments ($M_{1}, M_{2}, M_{1-2}$) cause in-plane deformations in the 1-2 plane.
#
# It is worth noting that these four types of coupling are characteristic of composite materials and do not occur in homogeneous isotropic materials. The following two couplings occur in both composite and isotropic materials:
#
# 5. **Extension–extension coupling**
# * When the element $A_{12}$ is not zero, a normal force $N_{1}$ causes elongation in the 2 direction ($\varepsilon^o_{2}$), and a normal force $N_{2}$ causes elongation in the 1 direction ($\varepsilon^o_{1}$)
#
# 6. **Bending–bending coupling**
# * When the element $D_{12}$ is not zero, a bending moment $M_{1}$ causes curvature of the laminate in the 2-3 plane ($\kappa_{2}$), and a bending moment $M_{2}$ causes curvature of the laminate in the 1–3 plane ($\kappa_{1}$)
# ## 2.6 Applications of [A], [B], [D] ( [$\alpha$], [$\beta$], [$\delta$] ) matrices to the (*elastic*) structural durability characterization of experiment carbon fibre/epoxy composite coupons
# * [§3: Structural durability analyses of carbon fibre & epoxy-based composites - Experimental results](DLN - §3 - Structural durability analyses of carbon fibre & epoxy-based composites - Experimental.ipynb) is the DLN entry that uses Python scientific programming libraries to explore and visualize quasi-fatigue tensile & compressive loading experiments on carbon fibre/epoxy composite test coupons. From analyses of the experiments, the elastic properties of the test coupons are determined.
# <br>
#
# * [§4: Structural durability analyses of carbon fibre & epoxy-based composites - Matrix calculations](DLN - §2 - Structural durability analyses of carbon fibre & epoxy-based composites - Calculations.ipynb) is the DLN entry that uses MATLAB to perform structural durability matrix calculations from carbon fibre/epoxy composite test coupon experimental data. The $[A], [B], [D]$ $([\alpha], [\beta], [\delta])$ matrices are calculated for each of the test laminate coupons.
#
# coding: utf-8
# # Structural durability analyses for carbon/epoxy laminates
#
# ## §3: Experimental
# In[39]:
#Preamble to hide inputs so that massive code scripts are not cluttering the data visualization output
from IPython.display import HTML
HTML('''<script>
code_show=true;
function code_toggle() {
if (code_show){
$('div.input').hide();
} else {
$('div.input').show();
}
code_show = !code_show
}
$( document ).ready(code_toggle);
</script>
<form action="javascript:code_toggle()"><input type="submit" value="Click here to toggle on/off the raw code."></form>''')
# # DLN Contents
#
# # DLN Contents
#
# 0. [Materials Characterization Laboratory DLN | A Showcase for Convergent Manufacturing Group Ltd](DLN_0_About_Me.ipynb) - An 'Welcome' message to the Convergent Manufacturing - Materials Characterization Group, explaining the concept of these DLN entries, why I made them out of interest for the team's *Characterization Lab Technician/Scientist* opening, and presenting a brief 'About Me' StoryMap
# <br>
#
# 1. [§1: Structural durability analyses of carbon fibre & epoxy-based composites - Introduction](DLN_1_Introduction.ipynb) - An introduction to the quasi-fatigue experiments performed on carbon fibre/epoxy composite specimens.
# <br>
#
# 2. [§2: Structural durability analyses of carbon fibre & epoxy-based composites - Laminate mechanics theory](DLN_2_Theory.ipynb) - A discussion of composite laminate theory, as a basis for performing stress-strain-deformation calculations to characterize the structural durability of composite laminate layups.
# <br>
#
# 3. [§3: Structural durability analyses of carbon fibre & epoxy-based composites - Experimental results](DLN_3_Experimental.ipynb) - Using Python scientific programming libraries to explore and visualize quasi-fatigue tensile & compressive loading experiments on carbon fibre/epoxy composite test coupons.
# <br>
#
# 4. [§4: Structural durability analyses of carbon fibre & epoxy-based composites - Matrix calculations](DLN_4_Calculations.ipynb) - Using MATLAB to perform structural durability matrix calculations from carbon fibre/epoxy composite test coupon experimental data.
# ## I. Experiment log
# * **Date of experiment**: 10.14.2017
# * **Principle investigator**: Delroy Meyer, EIT, BASc
# * **Test operators**: Jürgen Müller, Delroy Meyer, Cintia Oliveria
# * **Lead investigator**: Prof. Dr-mont. Zoltan Major
# * **Course**: LVA Nr. 378.029 - 480ADPTPPBV17 - Polymer Product Design and Engineering III - Graduate Seminar
# * **Location**: Institute of Polymer Materials and Testing (IPMT), JKU Linz - Linz, Austria
# * Compounding and specimen preparation Lab: *Composite coupon specimen preparation*
# * Mechanical Lab: *Tensile & compression testing*
#
# ### i. Experiment test (lab) environment conditions
# *Measurement taken: 10-14-2017 08:45:07*
# <b>
#
# $T_{test} (°C) = 21.23$ (*within* $ 23 ± 3 °C$ *standard laboratory atmosphere range as per ASTM D5229*)
# <br>
# $RH (\%) = 55.7$ (*within* $ 50 ± 10 \%$ *standard laboratory atmosphere range as per ASTM D5229*)
# ## 3.1 Composite specimens to be tested for structural durability analyses
# ### 3.1.1 Properties applicable to all test coupons
# * **Composite type**: CFRP - carbon/epoxy laminates
# * Carbon fibre ply:
# * Unidirectional 0°:
# * Unidirectional 90°:
# * Unidirectional 45°:
# * Unidirectional ±45°: [HiMax™ FCIM151](https://www.hexcel.com/user_area/content_media/raw/FCIM151.pdf)
# * Epoxy resin system: [HexPly® M35-4](https://www.hexcel.com/user_area/content_media/raw/HexPly_M354_DataSheet.pdf)
# * **Void fraction ($v_{f}$, %)**: 55
# * **Test speed (mm/min)**: 1
# * **No. of samples per test**: 3
#
# ### 3.1.2 Properties applicable to all specimens
# The following table details the specimens to be tested for the investigation:
#
# **<center>Table 1. Set of carbon fibre/epoxy laminate coupons for quasi-static fatigue testing</center>**
#
# | Coupon tag | Direction | Orientation [°] | Loading | No. of Layers | Avg. Coupon Width [mm] | Avg. Coupon Thickness [mm] |
# |:----------:|:---------:|:---------------:|:-----------:|:-------------:|:----------------------:|:--------------------------:|
# | UD_0_4_T | UD | 0 | Tension | 4 | 9.98 | 1.02 |
# | UD_90_8_T | UD | 90 | Tension | 8 | 20.02 | 1.98 |
# | BD_±45_8_T | BD | ±45 | Tension | 8 | 20.1 | 1.95 |
# | UD_45_8_T | UD | 45 | Tension | 8 | 20.06 | 2.01 |
# | UD_0_4_C | UD | 0 | Compression | 4 | 9.98 | 1.01 |
# | UD_90_8_C | UD | 90 | Compression | 8 | 19.98 | 2.02 |
#
# As a reference, ply (laminate) layers were layed-up according to the following convention:
#
# 
#
# *<center> Fig. Y - UD lamina lay-up orientations (JKU Linz - IPPE, 2017)</center>*
#
#
# ### 3.1.3 References
# [1] *Mosenbacher, A., Brunbauer, J., Pichler, P. F., Guster, C., & Pinter, G. (2014). Modelling and validation of fatigue life calculation method for short fiber reinforced injection molded parts. In 16th European conference of composite materials.* [Link](http://www.escm.eu.org/eccm16/assets/0329.pdf)
#
# [2] *Jones, R. M. (1999). Mechanics of composite materials. 2nd Ed. CRC press.*
# ## 3.2 Carbon fibre/epoxy test coupon fabrication
# 1. Carbon/epoxy specimens with 55% fibre volume fraction were produced with the following materials:
#
# * **Epoxy resin and system**: HexPly® M35-4
# * **Carbon fibres**: HiMax™ FCIM151
# * **Other**: Epoxy-compatible binder was used to make handling the layup of carbon fibre sheets easier and to prevent distortion during the layup manufacturing
# <br>
#
#
# 2. The specimen laminates were produced according to the cure cycle protocol indicated in the HexPly® M35-4 technical specification, with:
# * the cure temperature set at $100 \pm 2.5°C$
# * cure processing time set at 4.5 hours
# * heat-up and cool-down rates set at 1°C/minute, vacuum applied at -0.85 bar
# * autoclave pressure set to 7.5 bar-g.
# <br>
#
# 3. Unidirectional (UD) carbon/epoxy specimens were milled from plates with diamond blades at angles of 0°, 45°/±45° and 90°
# <br>
#
# 4. Specimen geometries for mechanical tests with carbon/epoxy specimens were chosen according to the following specifications:
# * Rectangular specimens (especially for UD)
# * Tabs for load introduction
# * Tab material had to possess a lower stiffness than tested materials; testing CF/epoxy composite coupons - aluminum tabs were used for fabrication
# <br>
#
# The following figure shows the dimensions used to prepare the composite coupons for testing:
#
# 
#
# *<center> Fig. 3.1 - Test coupon geometries for UD quasi-isotropic and 0° (JKU Linz - IPPE, 2017)</center>*
#
# 4-ply UD specimens had the following geometry:
# * $200 \pm 0.1 mm \quad\quad x \quad\quad 10 \pm 0.025 mm \quad\quad x \quad\quad 1 \pm 0.025 mm$ for UD 0° specimens
#
# 8-ply UD specimens had the following geometry:
# * $200 \pm 0.1 mm \quad\quad x \quad\quad 20 \pm 0.1 mm \quad\quad x \quad\quad 2 \pm 0.025 mm$ for 8-ply/off-axis specimens
#
# * Aluminium tabs with 1.5 mm thickness were adhered on both sides of all carbon/epoxy specimens( For tensile loads, usually 1 mm thickness is chosen for specimens tested in fibre direction)
# ## 3.3 Experimental equipment and data reduction process
# ### 3.3.1 Data analysis
# For data evaluation, all moduli and strengths were calculated with the real cross-sections of the respective tested specimens. Moduli were evaluated between 0.001 and 0.003 absolute strain, as per:
# * ASTM D3039/D3039M: Standard test method for tensile properties of polymer matrix composite materials
#
# * ASTM D3410 / D3410M: Standard Test Method for Compressive Properties of Polymer Matrix Composite Materials
#
# * ASTM_E111-04: Standard Test Method for Young’s Modulus, Tangent Modulus, and Chord Modulus
#
# ### 3.3.2 Equipment
# * All mechanical tests were performed on a Zwick-Roell HA 100 kN servo-hydraulic fatigue test machine designed for loads up to 100 kN at room temperature
#
# ### 3.3.3 Quasi-static tension and compression tests
# * In quasi-static tension and compression tests, specimens were loaded in a displacement controlled way with a test speed of 1 mm/min
# * End-tabs were clamped completely between the Zwick-Roell HA system grips
# * Strains in longitudinal direction were recorded by means of a proprietary digital sensor setup (JKU Linz IPMT)
# * The experiment runs were designed to investigate the in-plane tensile and compressive properties of polymer matrix composite materials reinforced by high-modulus fibers (in this case, carbon fibre/epoxy laminate composites). The applicability of the ASTM test method are limited to continuous fiber or discontinuous fiber-reinforced composite material forms, in which the laminate is balanced and/or symmetric with respect to the test direction
# ## 3.4 Experimental data analyses - Python Preamble
# ### 3.4.1 Premable for python object-oriented programming
# In[40]:
##===============================IMAGES============================================================
#Image import preamble
import IPython
from IPython.display import display, Image, SVG, Math, YouTubeVideo
Image_PATH = "/Users/delroy_m/Desktop/(CMT) Materials Characterization ELN/0.Images/"
# Use 'image drag & drop' IPython Notebook Extension
#IPython.html.nbextensions.install_nbextension('https://raw.github.com/ipython-contrib/IPython-notebook-extensions/master/nbextensions/usability/dragdrop/main.js')
#Load 'image drag & drop' extension
#%javascript
#IPython.load_extensions('usability/dragdrop/main');
#NOTE about 'image drag & drop' extension handling of images
# The image will be uploaded to the server into the directory where your notebook resides. This means, the image is not copied into the notebook itself, it will only be linked to.
##===============================DATA ANALYSES=====================================================
#import PANDAS - A library providing high-performance, easy-to-use data structures and data analysis tools
import pandas as pd
#print("Current Pandas version:", pd.__version__)
# print("plotly version:", __version__)
#import SciPy - A Python-based ecosystem of open-source software for mathematics, science, and engineering
import scipy
from scipy import *
#Import Gaussian distribution STATS package to validate whether experimental data is randomly (normally)
#distributed
from scipy.stats import *
#from scipy.stats import norm
# if using a Jupyter notebook, include:
#%matplotlib inline
#import NumPy - A fundamental package for scientific computing with Python
import numpy as np
#import qgrid - Allows querying of DataFrames with intuitive scrolling, sorting, and filtering controls,
#as well as editing features, for the DataFrames, by double clicking cells
import qgrid
##===============================DATA VISUALIZATION================================================
#import matplotlib - A Python 2D plotting library
#import matplotlib.pyplot as plt
#import Pygal - A Python SVG Charts Creator
import pygal
#import Plotly for online or offline interactive plot rendering
#
#If using Plotly with online server:
#import plotly.plotly as py
#
#If using Plotly offline and saving code/graphs/images locally:
import plotly.graph_objs as go
import plotly as py
from plotly import __version__ #ensures that most up-to-date plotly pckg is being used
from plotly.offline import init_notebook_mode, plot, download_plotlyjs, iplot
import plotly.figure_factory as ff
from plotly import tools
#Improve Plotly figure render responsiveness
import plotly.io as pio
pio.renderers.default = 'iframe'
# #import cufflinks as cf
#import Seaborn - Statistical data visualization using Matplotlib
import seaborn as sns
#from matplotlylib import fig_to_plotly
#import Plotly express - A terse, consistent, high-level wrapper around Plotly.py for rapid data exploration and figure generation
#import plotly_express as px
#Put plotly environment in 'offline mode'
py.offline.init_notebook_mode(connected=True)
#Reinitialize Jupyter Notebook mode
init_notebook_mode()
#For 'online' plotting:
# Learn about API authentication here: https://plot.ly/pandas/getting-started
# Find your api_key here: https://plot.ly/settings/api
#Do I have the most up-to-date plotly package?
#print("Current Plotly version:", __version__)
##===============================SYSTEM COMMANDS====================================================
import glob
import sys
import datetime
import os
##===============================EXCEPTION HANDLING=================================================
#Ignore dataframe slicing copying warnings --> these are annoying, and issue is acknowledged
pd.options.mode.chained_assignment = None # default='warn'
#Mute any annoying compiling warnings that arise when running code
#import warnings
#warnings.filterwarnings("ignore")
# ### 3.4.2 Setup framework for parsing quasi-static fatigue experimental data into Python (Pandas) dataframes
# In[41]:
##===============================Create dataframe from experiment data================================
#Coupon cyclic fatigue testing datasets - formatted according to "Hadley Wickham - Tidy Data"
#"Hadley Wickham - Tidy Data" - http://vita.had.co.nz/papers/tidy-data.pdf
#1. Each variable forms a column
#2. Each observation forms a row
#3. Each type of observational unit forms a table
##----------------------------------------------------------------------------------------------------
##-Naming convention for experiment files-##
#
#[Fiber_direction]-[Orientation_degree]-[Tension/Compression]-[Fibre_type]-[Test_speed (mm/min)]-[Test_temp]...
#-[Strain_in_load_direction]-[#_of_specimens_tested]-[specimen_avg_width (mm)]-[specimen_avg_thickness (mm)].xlsx
#"Experiment data attribute tags
####----------------------------------------------------------------------------------------------------
# 1. Fiber_direction:
# - Unidirectional (UD): 0°, 90° --> Provides longitudinal stiffness
# - Bidirectional (BD): ±45° --> Provides torsional stiffness
# * Attribute_type = [Alpha]
#
# 2. Orientation (°): 0°, ±45°, 90°
# * Attribute_type = [Alphanumeric]
#
# 3. Tension/compression loading:
# - T: Tension
# - C: Compression
# * Attribute_type = [Alpha]
#
# 8. Strain-in-load direction (x, y, x &/OR y):
# - UD: ε,y
# - BD: ε,y &/OR ε,x
# * Attribute_type = [Alphanumeric]
#
# 9. No. of specimens tested (#):
# * Attribute_type = [Numeric]
#
# 10. Specimens avg. width (mm):
# * Attribute_type = [Numeric]
#
# 11. Specimens avg. thickness (mm):
# * Attribute_type = [Numeric]
#
#
#"Experiment data variables
####----------------------------------------------------------------------------------------------------
#Column 1:
# - Tension or compression load [N]
#
##Column 2:
# - Strain [%]
#Custom color palette for plotting
####----------------------------------------------------------------------------------------------------
#Column 1:
dark_turquoise = '#00CED1'
turquoise = '#40E0D0'
medium_turquoise = '#48D1CC'
pale_turquoise = '#AFEEEE'
aqua_marine = '#7FFFD4'
powder_blue = '#B0E0E6'
cadet_blue = '#5F9EA0'
steel_blue = '#4682B4'
corn_flower_blue = '#6495ED'
deep_sky_blue = '#00BFFF'
dodger_blue = '#1E90FF'
light_blue = '#ADD8E6'
sky_blue = '#87CEEB'
light_sky_blue = '#87CEFA'
midnight_blue = '#191970'
navy = '#000080'
dark_blue = '#00008B'
medium_blue = '#0000CD'
blue = '#0000FF'
royal_blue = '#4169E1'
# ### 3.4.3 Parse quasi-static fatigue experimental data into data frame
# In[42]:
#Upload all 'cleaned' experimental data sets for composite coupon fatigue testing
##===============================DEFINE DATA DIRECTORY=============================================
#Data import from local server
#Used so that data files & code are not mixed together + makes it easy to change working
#directory to where data is stored
#Set desired directory path here
desired_dir = r"/Users/delroy_m/Desktop/(CMT) Materials Characterization ELN/2. Cleaned data/Quasi_static_data"
work_dirPath = os.chdir(desired_dir) #Set the current directory to the desired working directory path
verify_cwd_path = os.getcwd()
print("CWD: " + verify_cwd_path)
##===============================Import cleaned experiment data======================================
qsf_expt_data = glob.glob('*.xlsx') # Get all files from all subfolders.
qsf_expt_data
#Define DataFrame to store quasi-static fatigue .xlsx experiment files
qsf_df = pd.DataFrame()
#Enter test (lab) environment measurements for completeness of data parsing
T_test = 21.23
RH_test = 55.7
#Pandas 'read_excel' syntax
#pandas.read_excel(io, sheet_name=0, header=0, names=None, index_col=None, parse_cols=None,
# true_values=None, false_values=None, skiprows=None, nrows=None, na_values=None,
# keep_default_na=True, verbose=False, parse_dates=False, date_parser=None,
# thousands=None, comment=None, skip_footer=0, skipfooter=0, convert_float=True,
# mangle_dupe_cols=True, **kwds)
#loop to establish columns for DataFrame
for i, P in enumerate(qsf_expt_data): #i: counter, P: place holder
#print(P)
eqsf_df = pd.read_excel(P, header=None) #read .xlsx experiment data
# if i == 0:
try:
eqsf_df.columns = ['Force load [N]','ε,y [%]','ε,x [%]', 'σ,qs [MPa]']
except:
#print('Data in old format!')
eqsf_df.columns = ['Static load [N]','ε,y [%]','ε,x [%]', 'σ,qsf [MPa]']
file_info = P.split("_") # Extract info from filename
eqsf_df['Coupon tag'] = file_info[len(file_info)-5] + "_" + file_info[len(file_info)-4] + "_" + file_info[len(file_info)-3] + "_" + file_info[len(file_info)-2]
eqsf_df['Fibre direction'] = file_info[0]
#sample_info = file_info[len(file_info)-1].split("_")
eqsf_df['Orientation (°)'] = file_info[1]
eqsf_df['# of plys'] = file_info[2]
eqsf_df['Loading'] = file_info[3]
if file_info[3] == "T":
eqsf_df['Loading'] = "Tension"
else:
eqsf_df['Loading'] = "Compression"
qsf_df = pd.concat([eqsf_df, qsf_df])
#Label index column as 'Measurement data point'
qsf_df.index.name = 'Data point'
#View entire DataFrame
#qsf_df
#Quick view of head of DataFrame
#qsf_df.head()
#Quick view of tail-end of DataFrame
#qsf_df.tail()
#Create Qgrid query DataFrame to enable me to explore the entire contents of a DataFrame
#using intuitive sorting and filtering controls (and DataFrame won't crash like Excel!)
#
#Qgrid allows editing of the data - let's lock it so users can't accidently change the data
col_opts = { 'editable': False }
#
#col_defs = { 'Fibre-type': { 'editable': False },'Test speed [mm/min]': { 'editable': False },
# 'T (°C)': { 'editable': False }, 'RH (%)': { 'editable': False } }
qsf_qgrid_df = qgrid.show_grid(qsf_df, column_options = col_opts, show_toolbar = True) #, column_definitions=col_defs)
qsf_qgrid_df
# ### 3.4.4 Tensile and compression modulus of elasticity (ASTM E111) data import
# In[43]:
##===============================Data import for modulus calcs =====================================
#Set desired directory path here
desired_dir = r"/Users/delroy_m/Desktop/(CMT) Materials Characterization ELN/2. Cleaned data/QS_elastic_mod_data"
work_dirPath = os.chdir(desired_dir) #Set the current directory to the desired working directory path
verify_cwd_path = os.getcwd()
print("CWD: " + verify_cwd_path)
##===============================Import cleaned experiment data======================================
qsf_mod_data = glob.glob('*.xlsx') # Get all files from all subfolders.
qsf_mod_data
##===============================Data parsing========================================================
#loop to establish columns for DataFrame
for i, P in enumerate(qsf_mod_data): #i: counter, P: place holder
#print(P)
eqsf_mod_df = pd.read_excel(P, header=None) #read .xlsx experiment data
# if i == 0:
try:
eqsf_mod_df.columns = ['Coupon tag','Avg. Width [mm]','Avg. Thickness [mm]', 'XS Area [mm^2]', 'σ @ 1000 με [MPa]', 'σ @ 3000 με [MPa]', 'E, chord [MPa]', 'E, chord [GPa]', 'Loading']
except:
#print('Data in old format!')
eqsf_mod_df.columns = ['Static load [N]','ε,y [%]','ε,x [%]', 'σ,qsf [MPa]']
# Use DataFrame.insert() to add a column of composite coupon descriptors
#
#Custom identifiers for coupon specimens
cols=['UD 0°, 4-Ply, Tension', 'UD 90°, 8-Ply, Tension', 'BD ±45°, 8-Ply, Tension', 'UD 45° 8-Ply, Tension',
'UD 0°, 4-Ply, Compression', 'UD 90°, 8-Ply, Compression']
eqsf_mod_df.insert(0, 'Coupon type', cols, True)
#Label index column as 'Measurement data point'
#qsf_mod_df.index.name = 'Data point'
#View entire DataFrame
eqsf_mod_df
#Quick view of head of DataFrame
#eqsf_mod_df.head()
#Quick view of tail-end of DataFrame
#eqsf_mod_df.tail()
#Create Qgrid query DataFrame to enable me to explore the entire contents of a DataFrame
#using intuitive sorting and filtering controls (and DataFrame won't crash like Excel!)
#
#Qgrid allows editing of the data - let's lock it so users can't accidently change the data
#col_opts = { 'editable': False }
#
#col_defs = { 'Fibre-type': { 'editable': False },'Test speed [mm/min]': { 'editable': False },
# 'T (°C)': { 'editable': False }, 'RH (%)': { 'editable': False } }
#qsf_mod_df_qgrid_df = qgrid.show_grid(eqsf_mod_df, column_options = col_opts, show_toolbar = True) #, column_definitions=col_defs)
#qsf_mod_df_qgrid_df
# ## 3.5 Data reduction results - Data visualization
# ### 3.5.1 Tensile modulus of elasticity for quasi-static fatigue tests
# In[44]:
##=============================== Quasi-static fatigue tests =====================================
# Elastic moduli plotting for
# carbon fibre/epoxy composites
#=================================================================================================
#Custom identifiers for coupon specimens
cols = ['UD 0° Tension', 'UD 90° Tension', 'BD ±45° Tension', 'UD 45° Tension',
'UD 0° Compression', 'UD 90° Compression']
Tcols = ['UD 0° Tension', 'UD 90° Tension', 'BD ±45° Tension', 'UD 45° Tension']
Ccols = ['UD 0° Compression', 'UD 90° Compression']
#Plot titles
plt_title = 'Moduli of elasticity for carbon fibre/epoxy composites'
y_axis_title = 'Modulus of elasticity [MPa]'
#Filter the elastic modulus DataFrame for 'Tensile' loading experiments
eqsf_Tmod_df = eqsf_mod_df[eqsf_mod_df['Loading'] == 'Tension']
#Filter the tensile modulus DataFrame for 'Compression' loading experiments
eqsf_Cmod_df = eqsf_mod_df[eqsf_mod_df['Loading'] == 'Compression']
#Create data structures for plotting calculated tensile and compressive elastic moduli values
#NOTE: THIS CODE DOES NOT RUN, SEEMS TO BE A PROBLEM PASSING VARIABLE SIZED DATA STRUCTURES
# TO PLOTLY 'GO.FIGURE' FUNCTION
data1 = [go.Bar(x = Tcols,
y = eqsf_Tmod_df['E, chord [MPa]'],
name = 'Tensile test'
)]
data2 = [go.Bar(x = Ccols,
y = eqsf_Cmod_df['E, chord [MPa]'],
name = 'Compression test'
)]
#Create framework for bar plot
#mod_data = [data1, data2]
mod_data = [go.Bar(x = cols,
y = eqsf_mod_df['E, chord [MPa]'])]
#Font packages for plotting
font_pkg0=dict(family='Optima', size=22, color='black')
font_pkg1=dict(family='Optima', size=16, color='black')
font_pkg2=dict(family='Optima', size=12, color='black')
#Plot figure layout
layout = go.Layout(title='Elastic moduli of carbon fibre/epoxy test coupons',
font=font_pkg0,
hovermode='closest',
xaxis=dict(
title='Test coupons',
titlefont=dict(font_pkg1),
showticklabels=True,
tickangle=25,
tickfont=dict(font_pkg2),
),
yaxis=dict(
title='Calculated modulus [MPa]',
titlefont=dict(font_pkg1),
showticklabels=True,
tickangle=0,
tickfont=dict(font_pkg2),
type='log',
autorange=False,
range=[0.0001,5.5],
showexponent = 'all', exponentformat = 'power'
)
)
#Render plot
fig = go.Figure(data=mod_data, layout=layout)
py.offline.iplot(fig, filename='qsf_elastic_modulus')
# ### 3.5.2 Quasi-static fatigue experiments - Force & strain measurement statistics data import
# In[45]:
##===============================Data import for STAT calcs import ==================================
#Set desired directory path here
desired_dir = r"/Users/delroy_m/Desktop/(CMT) Materials Characterization ELN/2. Cleaned data/STAT_data"
work_dirPath = os.chdir(desired_dir) #Set the current directory to the desired working directory path
verify_cwd_path = os.getcwd()
print("CWD: " + verify_cwd_path)
##===============================Import cleaned experiment data======================================
qsf_STAT_data = glob.glob('*.xlsx') # Get all files from all subfolders.
qsf_STAT_data
##===============================Data parsing========================================================
#loop to establish columns for DataFrame
for i, P in enumerate(qsf_STAT_data): #i: counter, P: place holder
#print(P)
eqsf_STAT_df = pd.read_excel(P, header=None) #read .xlsx experiment data
# if i == 0:
try:
eqsf_STAT_df.columns = ['Coupon tag','μ ± 3σ','Intervals', 'Force data', 'φ(x) - F', 'Strain y-data', 'φ(x) - ε,y', 'Strain x-data', 'φ(x) - ε,x']
except:
#print('Data in old format!')
eqsf_STAT_df.columns = ['Static load [N]','ε,y [%]','ε,x [%]', 'σ,qsf [MPa]']
#View entire DataFrame
eqsf_STAT_df
#Quick view of head of DataFrame
#eqsf_STAT_df.head()
#Quick view of tail-end of DataFrame
#eqsf_STAT_df.tail()
#Create Qgrid query DataFrame to enable me to explore the entire contents of a DataFrame
#using intuitive sorting and filtering controls (and DataFrame won't crash like Excel!)
##
#eqsf_STAT_df_qgrid_df = qgrid.show_grid(eqsf_STAT_df, column_options = col_opts, show_toolbar = True)
# ## 3.5.3 σ-ε quasi-static fatigue analyses
# ### 3.5.3.1 - UD 0°, 4-Ply, Tension: σ-ε plot
# In[46]:
##====================Create sub-DataFrame for UD 0° CFRE Tension QSF experiment=========================
#Slice UD 0° CFRE - 4 Ply - Tension loading experiment data from DataFrame
UD_0_4_T_df = qsf_df[qsf_df['Coupon tag'] == 'UD_0_4_T']
#Import experimental/calculated data values
Force = UD_0_4_T_df['Force load [N]']
Stress = UD_0_4_T_df['σ,qs [MPa]']
Strain = UD_0_4_T_df['ε,y [%]']
LinearLimit = 1
#NOTE: experimental data is imported as pandas DataFrame; force, stress and strain data are parsed
# as pandas.Series arrays. These need to be converted to 'list' type data to be plotted with Matplotlib
plt_Force = Force.tolist()
plt_Stress = Stress.tolist()
plt_Strain = Strain.tolist()
#True Stress calculation
Stress_True = [ x * (1+y) for y,x in zip(Strain,Stress)]
#True Strain calculation
Strain_True = [math.log(1+x) for x in Strain]
##-----------------+++++++++++++++++++++++-----------------+++++++++++++++++++++++----------------
#Composite durability calculations from expt. data
#---------------------------------------------------------------------------------
#Extract tensile modulus of elasticity from elastic modulus calculation DataFrame
### in MPa
Emod_UD_0_4_T_MPa = eqsf_mod_df.loc[eqsf_mod_df['Coupon tag'] == 'UD_0_4_T']['E, chord [MPa]'].values[0]
### in GPa
Emod_UD_0_4_T_GPa = eqsf_mod_df.loc[eqsf_mod_df['Coupon tag'] == 'UD_0_4_T']['E, chord [GPa]'].values[0]
#UTS - Ultimate tensiles strength (X,t)
UTS = max(Stress)
#Ultimate in-plane shear strength (S)
# S =
#E,11 calculation:
#Call plotly 'Scattergl' function and assign plot data
σvsε_UD04T = go.Scattergl(x = Strain, y = Stress, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = '#1E90FF'),size=2
)
)
#Assign to 'data' variable for plot initialization
σvsε_UD04T_data = [σvsε_UD04T]
#Define axes title fonts
#
#Type-faces I prefer (all sans-serif): PT Sans, SF Mono, Frutiger, Amplitude, Antique Olive, Avenir, Eurostile
# Optima,
#Font packages for plotting
font_pkg0=dict(family='Optima', size=22, color='black')
font_pkg1=dict(family='Optima', size=16, color='black')
font_pkg2=dict(family='Optima', size=12, color='black')
#Include quasi-fatigue properties of composite coupon
#
#Modulus
eqsf_Tmod_df = eqsf_mod_df[eqsf_mod_df['Loading'] == 'Tension']
E_mod_mpa = "Emod = " + "%.3f" % Emod_UD_0_4_T_MPa + " MPa\n"
E_mod_gpa = "Emod = " + "%.3f" % Emod_UD_0_4_T_GPa + " GPa\n"
UTS = "UTS = " + "%.3f" % UTS + " MPa\n"
plc_hldr_title = 'σ vs. ε - UD 0° 4-Ply Coupon, tensile loaded (in-fibre direction)'
plc_hldr_title_2 = '&sigma vs. &epsilon - UD 0° 4-Ply Coupon, tensile loaded (in-fibre direction)'
def_layout = go.Layout(title=plc_hldr_title,
titlefont=font_pkg0,
yaxis=dict(title='Stress [MPa]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Measured Strain', showgrid=True, zeroline=False, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
fig = go.Figure(data=σvsε_UD04T_data, layout=def_layout)
#py.offline.iplot(fig, filename='σvsε_UD04T_data')
pio.show(fig, filename='σvsε_UD04T_data')
# #Save images in PDF or vector file format for publications (LaTeX)
# #
im_Path = Image_PATH
# #.SVG
pio.write_image(fig, im_Path + 'UD04T_stress_strain.svg')
# #.PDF
pio.write_image(fig, im_Path + 'UD04T_stress_strain.pdf')
# #.EPS
pio.write_image(fig, im_Path + 'UD04T_stress_strain.eps')
# #.jpeg
pio.write_image(fig, im_Path + 'UD04T_stress_strain.jpg')
# ##### UD 0° 4-Ply Coupon - Tension-loaded: Stress vs. Strain test summary
#
# * No. of coupons tested: 3
# * Tensile modulus of elasticity ($E_{t}$, *ASTM D3039*): 113.088 GPa
# * Test speed: 1 $\frac{mm}{min}$
# * Failure mode (*ASTM D3039*): GAT
# 
# ### 3.5.3.2 - UD 0°, 4-Ply, Tension Coupon - Experiment statistical analyses
# In[47]:
##=============================== UD 0° 4-Ply Tension-loaded coupon =====================================
# Statistical analyses of experiment data
#
# This is a custom STAT graphics routine. The SciPy package (namely .distplot(), interfaced with plotly)
# seems to poorly handle statistical probability distributions of the σ/ε quasi-fatigue experimental
# measurement data.
#========================================================================================================
#Define composite coupon tag I.D.
c_tag = 'UD_0_4_T'
#Slice coupon tag data from experiment statistics DataFrame
df = eqsf_STAT_df[eqsf_STAT_df['Coupon tag'] == c_tag]
#Call experimental DataFrame to compute mean & st. dev values
#Slice UD 0° CFRE - 4 Ply - Tension loading experiment data from DataFrame
exp_df = qsf_df[qsf_df['Coupon tag'] == c_tag]
#Tensile force measurements STATS
F_stat = df['Force data'] #force statistic data
F_prob = df['φ(x) - F'] #force statistic probability computation
intval1 = df['μ ± 3σ'] #abscissa values for plot
intval2 = df['Intervals'] #abscissa values for plot
F_mean = exp_df['Force load [N]'].mean() #mean of force measurement values
F_std = exp_df['Force load [N]'].std() #st. dev of force measurement values
#Tensile strain measurements STATS
ε_stat = df['Strain y-data'] #strain statistic data
ε_prob = df['φ(x) - ε,y'] #strain statistic probability computation
ε_mean = exp_df['ε,y [%]'].mean() #mean of force measurement values
ε_std = exp_df['ε,y [%]'].std() #st. dev of force measurement values
#Standardize data colour
data1_paint = '#1E90FF' #dodger blue
data2_paint = '#00CED1' #dark turquoise
color_map1 = [data1_paint, data2_paint]
color_map2 = [data2_paint, data1_paint]
#Call plotly 'Scattergl' function and plot Force statistical data
F_prob_plot = go.Scattergl(x = intval2, y = F_prob, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = data1_paint),
size=5
)
)
#Call plotly 'Scattergl' function and plot Strain statistical data
ε_prob_plot = go.Scattergl(x = intval2, y = ε_prob, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = data2_paint),
size=5
)
)
#Assign to 'data' variable for plot initialization
F_prob_plt_data = [F_prob_plot]
ε_prob_plt_data = [ε_prob_plot]
prob_plot_data = [F_prob_plot, ε_prob_plot]
#Define axes title fonts
#
#Type-faces I prefer (all sans-serif): PT Sans, SF Mono, Frutiger, Amplitude, Antique Olive, Avenir, Eurostile
# Optima,
#Font packages for plotting
font_pkg0=dict(family='Optima', size=22, color='black')
font_pkg1=dict(family='Optima', size=16, color='black')
font_pkg2=dict(family='Optima', size=12, color='black')
#Describe plot layout
def_layout_F = go.Layout(title='UD 0° 4-Ply Coupon - Tensile force measurement statistical probability distribution',
titlefont=font_pkg0,
yaxis=dict(title='Probability [φ(x)]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Random variable', showgrid=True, zeroline=True, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
def_layout_ε = go.Layout(title='UD 0° 4-Ply Coupon - Strain measurement statistical probability distribution',
titlefont=font_pkg0,
yaxis=dict(title='Probability [φ(x)]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Random variable', showgrid=True, zeroline=True, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
# anchored_text = AnchoredText("Force measurement mean = " +"%.3f" % F_mean + " N\n" +
# "Force measurement st. dev = " +"%.3f" % F_std +
# "Strain measurement mean = " +"%.3f" % ε_mean +
# "Strain measurement st. dev = " +"%.3f" % ε_std
# ax.add_artist(anchored_text)
#Force probability distribution
fig_F = go.Figure(data=F_prob_plt_data, layout=def_layout_F)
#py.offline.iplot(fig_F, filename='UD04T_F_prob_data')
pio.show(fig_F, filename='UD04T_F_prob_distn')
#Write .svg and .eps plot images to repo
pio.write_image(fig_F, im_Path + 'UD04T_F_prob_distn.svg')
pio.write_image(fig_F, im_Path + 'UD04T_F_prob_distn.eps')
fig_ε = go.Figure(data=ε_prob_plt_data, layout=def_layout_ε)
#py.offline.iplot(fig_ε, filename='UD04T_ε_prob_data')
pio.show(fig_ε, filename='UD04T_ε_prob_distn')
#Write .svg and .eps plot images to repo
pio.write_image(fig_ε, im_Path + 'UD04T_ε_prob_distn.svg')
pio.write_image(fig_ε, im_Path + 'UD04T_ε_prob_distn.eps')
# ##### UD 0° 4-Ply Coupon - Tension-loaded: Experiment statistics summary
# 
#
# 
# ### 3.5.3.3 - UD 90°, 8-Ply, Tension: σ-ε plot
# In[48]:
##====================Create sub-DataFrame for UD 90° CFRE Tension QSF experiment=========================
#Slice CFRE coupon quasi-fatigue loading experiment data from DataFrame
UD_90_8_T_df = qsf_df[qsf_df['Coupon tag'] == 'UD_90_8_T']
#Define DataFrame I.D. tag
df = UD_90_8_T_df
#Define coupon I.D. tag
c_tag = 'UD_90_8_T'
#Import experimental/calculated data values
Force = df['Force load [N]']
Stress = df['σ,qs [MPa]']
Strain = df['ε,y [%]']
LinearLimit = 1
#NOTE: experimental data is imported as pandas DataFrame; force, stress and strain data are parsed
# as pandas.Series arrays. These need to be converted to 'list' type data to be plotted with Matplotlib
plt_Force = Force.tolist()
plt_Stress = Stress.tolist()
plt_Strain = Strain.tolist()
#df_list = df['STYNAME'].tolist()
#True Stress calculation
Stress_True = [ x * (1+y) for y,x in zip(Strain,Stress)]
#True Strain calculation
Strain_True = [math.log(1+x) for x in Strain]
#Composite strength characteristics
##
#Extract tensile modulus of elasticity from elastic modulus calculation DataFrame
##
### in MPa
Emod_MPa = eqsf_mod_df.loc[eqsf_mod_df['Coupon tag'] == c_tag]['E, chord [MPa]'].values[0]
### in GPa
Emod_GPa = eqsf_mod_df.loc[eqsf_mod_df['Coupon tag'] == c_tag]['E, chord [GPa]'].values[0]
##
#UTS - Ultimate tensiles strength (X,t)
UTS = max(Stress)
##
#Ultimate in-plane shear strength (S)
# S =
#Call plotly 'Scattergl' function and assign plot data
σvsε = go.Scattergl(x = Strain, y = Stress, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = '#1E90FF'),size=2
)
)
#Assign to 'data' variable for plot initialization
σvsε_data = [σvsε]
#Define axes title fonts
#
#Type-faces I prefer (all sans-serif): PT Sans, SF Mono, Frutiger, Amplitude, Antique Olive, Avenir, Eurostile
# Optima,
#Font packages for plotting
font_pkg0=dict(family='Optima', size=22, color='black')
font_pkg1=dict(family='Optima', size=16, color='black')
font_pkg2=dict(family='Optima', size=12, color='black')
#Describe plot layout
def_layout = go.Layout(title='σ vs. ε - UD 90° 8-Ply Coupon, tensile loaded (⟂ to fibre direction)',
titlefont=font_pkg0,
yaxis=dict(title='Stress [MPa]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Measured Strain', showgrid=True, zeroline=False, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
#Include quasi-fatigue properties of composite coupon
#
#Modulus
eqsf_Tmod_df = eqsf_mod_df[eqsf_mod_df['Loading'] == 'Tension']
# anchored_text = AnchoredText("Tensile Modulus of Elasticity = " +"%.3f" % Emod_GPa + " GPa\n" +
# "UTS = "+ "%.3f" % UTS +" MPa\n"+
# "Failure Stress = " + "%.5f" % failure_stress +" MPa\n"+
# "Max Strain = "+ "%.5f" % Strain[8], loc='right')
# ax.add_artist(anchored_text)
fig = go.Figure(data=σvsε_data, layout=def_layout)
#py.offline.iplot(fig, filename='σvsε_UD908T_data')
pio.show(fig, filename='σvsε_UD908T_data')
# #Save images in PDF or vector file format for publications (LaTeX)
# #
im_Path = Image_PATH
# #.SVG
pio.write_image(fig, im_Path + 'UD908T_stress_strain.svg')
# #.PDF
#pio.write_image(fig, im_Path + 'UD908T_stress_strain.pdf')
# #.EPS
pio.write_image(fig, im_Path + 'UD908T_stress_strain.eps')
# #.jpeg
#pio.write_image(fig, im_Path + 'UD908T_stress_strain.jpg')
# ##### UD 90° 8-Ply Coupon - Tension-loaded: Stress vs. Strain test summary
#
# * No. of coupons tested: 3
# * Tensile modulus of elasticity ($E_{t}$, *ASTM D3039*): 3.074 GPa
# * Test speed: 1 $\frac{mm}{min}$
# * Failure mode (*ASTM D3039*): LIT
# 
# ### 3.5.3.4 - UD 90°, 8-Ply, Tension Coupon - Experiment statistical analyses
# In[49]:
##=============================== UD 90° 8-Ply Tension-loaded coupon =====================================
# Statistical analyses of experiment data
#
# This is a custom STAT graphics routine. The SciPy package (namely .distplot(), interfaced with plotly)
# seems to poorly handle statistical probability distributions of the σ/ε quasi-fatigue experimental
# measurement data.
#========================================================================================================
#Define composite coupon tag I.D.
c_tag = 'UD_90_8_T'
#Slice coupon tag data from experiment statistics DataFrame
df = eqsf_STAT_df[eqsf_STAT_df['Coupon tag'] == c_tag]
#Call experimental DataFrame to compute mean & st. dev values
#Slice UD 0° CFRE - 4 Ply - Tension loading experiment data from DataFrame
exp_df = qsf_df[qsf_df['Coupon tag'] == c_tag]
#Tensile force measurements STATS
F_stat = df['Force data'] #force statistic data
F_prob = df['φ(x) - F'] #force statistic probability computation
intval1 = df['μ ± 3σ'] #abscissa values for plot
intval2 = df['Intervals'] #abscissa values for plot
F_mean = exp_df['Force load [N]'].mean() #mean of force measurement values
F_std = exp_df['Force load [N]'].std() #st. dev of force measurement values
#Tensile strain measurements STATS
ε_stat = df['Strain y-data'] #strain statistic data
ε_prob = df['φ(x) - ε,y'] #strain statistic probability computation
ε_mean = exp_df['ε,y [%]'].mean() #mean of force measurement values
ε_std = exp_df['ε,y [%]'].std() #st. dev of force measurement values
#Standardize data colour
data1_paint = '#1E90FF' #dodger blue
data2_paint = '#00CED1' #dark turquoise
color_map1 = [data1_paint, data2_paint]
color_map2 = [data2_paint, data1_paint]
#Call plotly 'Scattergl' function and plot Force statistical data
F_prob_plot = go.Scattergl(x = intval2, y = F_prob, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = data1_paint),
size=5
)
)
#Call plotly 'Scattergl' function and plot Strain statistical data
ε_prob_plot = go.Scattergl(x = intval2, y = ε_prob, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = data2_paint),
size=5
)
)
#Assign to 'data' variable for plot initialization
F_prob_plt_data = [F_prob_plot]
ε_prob_plt_data = [ε_prob_plot]
prob_plot_data = [F_prob_plot, ε_prob_plot]
#Define axes title fonts
#
#Type-faces I prefer (all sans-serif): PT Sans, SF Mono, Frutiger, Amplitude, Antique Olive, Avenir, Eurostile
# Optima,
#Font packages for plotting
font_pkg0=dict(family='Optima', size=22, color='black')
font_pkg1=dict(family='Optima', size=16, color='black')
font_pkg2=dict(family='Optima', size=12, color='black')
#Describe plot layout
def_layout_F = go.Layout(title='UD 90° 8-Ply Coupon - Tensile force measurement statistical probability distribution',
titlefont=font_pkg0,
yaxis=dict(title='Probability [φ(x)]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Random variable', showgrid=True, zeroline=True, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
def_layout_ε = go.Layout(title='UD 90° 8-Ply Coupon - Strain measurement statistical probability distribution',
titlefont=font_pkg0,
yaxis=dict(title='Probability [φ(x)]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Random variable', showgrid=True, zeroline=True, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
# anchored_text = AnchoredText("Force measurement mean = " +"%.3f" % F_mean + " N\n" +
# "Force measurement st. dev = " +"%.3f" % F_std +
# "Strain measurement mean = " +"%.3f" % ε_mean +
# "Strain measurement st. dev = " +"%.3f" % ε_std
# ax.add_artist(anchored_text)
fig_F = go.Figure(data=F_prob_plt_data, layout=def_layout_F)
#py.offline.iplot(fig_F, filename='UD908T_F_prob_data')
pio.show(fig_F, filename='UD908T_F_prob_distn')
#Write .svg and .eps plot images to repo
pio.write_image(fig_F, im_Path + 'UD908T_F_prob_distn.svg')
pio.write_image(fig_F, im_Path + 'UD908T_F_prob_distn.eps')
fig_ε = go.Figure(data=ε_prob_plt_data, layout=def_layout_ε)
#py.offline.iplot(fig_ε, filename='UD908T_ε_prob_data')
pio.show(fig_ε, filename='UD908T_ε_prob_distn')
#Write .svg and .eps plot images to repo
pio.write_image(fig_ε, im_Path + 'UD908T_ε_prob_distn.svg')
pio.write_image(fig_ε, im_Path + 'UD908T_ε_prob_distn.eps')
# ##### UD 90° 8-Ply Coupon - Tension-loaded: Experiment statistics summary
# 
#
# 
# ### 3.5.3.5 - ±45°, 8-Ply, Tension: σ-ε plot
# In[50]:
##====================Create sub-DataFrame for BD ±45° CFRE Tension QSF experiment=========================
#Slice CFRE coupon quasi-fatigue loading experiment data from DataFrame
BD_pm45_8_T_df = qsf_df[qsf_df['Coupon tag'] == 'BD_±45_8_T']
#Define DataFrame I.D. tag
df = BD_pm45_8_T_df
#Define coupon I.D. tag
c_tag = 'BD_±45_8_T' #For some unknown reason the '± symbol was interrupting the code compile'
#Added 'c_tag_alt' as alternative coupon I.D. term
c_tag_alt = 'BD ±45°, 8-Ply, Tension'
#Import experimental/calculated data values
Force = df['Force load [N]']
Stress = df['σ,qs [MPa]']
Strain_y = df['ε,y [%]'] #Strain in 1-direction (y-axis reference - in fibre direction)
Strain_x = df['ε,x [%]'] #Strain in 2-direction (x-axis reference - perpendicular to fibre direction)
LinearLimit = 1
#NOTE: experimental data is imported as pandas DataFrame; force, stress and strain data are parsed
# as pandas.Series arrays. These need to be converted to 'list' type data to be plotted with Matplotlib
plt_Force = Force.tolist()
plt_Stress = Stress.tolist()
plt_Strain_y = Strain_y.tolist()
plt_Strain_x = Strain_x.tolist()
#df_list = df['STYNAME'].tolist()
#True Stress calculation
Stress_y_True = [ x * (1+y) for y,x in zip(Strain_y,Stress)]
Stress_x_True = [ x * (1+y) for y,x in zip(Strain_x,Stress)]
#True Strain calculation
Strain_y_True = [math.log(1+x) for x in Strain_y]
Strain_x_True = [math.log(1+x) for x in Strain_x]
#Composite strength characteristics
##
#Extract tensile modulus of elasticity from elastic modulus calculation DataFrame
##
### in MPa
Emod_MPa = eqsf_mod_df.loc[eqsf_mod_df['Coupon tag'] == c_tag]['E, chord [MPa]'].values[0]
#Emod_MPa = eqsf_mod_df.loc[eqsf_mod_df['Coupon type'] == c_tag]['E, chord [MPa]'].values[0]
### in GPa
Emod_GPa = eqsf_mod_df.loc[eqsf_mod_df['Coupon tag'] == c_tag]['E, chord [GPa]'].values[0]
##
#UTS - Ultimate tensiles strength (X,t)
UTS = max(Stress)
##
#Ultimate in-plane shear strength (S)
# S =
#Call plotly 'Scattergl' function and assign plot data
##
#Plot stress-strain response in 1-direction (y-direction)
σvsε_y = go.Scattergl(x = Strain_y, y = Stress, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = '#1E90FF'),size=2
)
)
#Plot stress-strain response in 1-direction (x-direction)
σvsε_x = go.Scattergl(x = Strain_x, y = Stress, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = corn_flower_blue),size=2
)
)
#Assign to 'data' variable for plot initialization
σvsε_y_data = σvsε_y
σvsε_x_data = σvsε_x
plt_data_y = [σvsε_y] #plot y-direction tensile σvsε
plt_data_x = [σvsε_x] #plot x-direction tensile σvsε
plt_data = [σvsε_y_data, σvsε_x_data] #plot in-fibre and perpendicular=to=fibre direction tensile σvsε
#Define axes title fonts
#
#Type-faces I prefer (all sans-serif): PT Sans, SF Mono, Frutiger, Amplitude, Antique Olive, Avenir, Eurostile
# Optima,
#Font packages for plotting
font_pkg0=dict(family='Optima', size=22, color='black')
font_pkg1=dict(family='Optima', size=16, color='black')
font_pkg2=dict(family='Optima', size=12, color='black')
# # #Define y-axis tickvals
# # #y_tick_vals = [ 0.7, 1, 5, 10, 30]
# # #tickvals=y_tick_vals
#Describe plot layout for y-direction stress-strain response
def_layout_y = go.Layout(title='σ vs. ε - BD ±45° 8-Ply Coupon, y-direction tensile loading',
titlefont=font_pkg0,
yaxis=dict(title='Stress [MPa]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Measured Strain', showgrid=True, zeroline=False, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
#Describe plot layout for x-direction stress-strain response
def_layout_x = go.Layout(title='σ vs. ε - BD ±45° 8-Ply Coupon, x-direction tensile loading',
titlefont=font_pkg0,
yaxis=dict(title='Stress [MPa]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Measured Strain', showgrid=True, zeroline=False, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
#Include quasi-fatigue properties of composite coupon
#
#Modulus
eqsf_Tmod_df = eqsf_mod_df[eqsf_mod_df['Loading'] == 'Tension']
# anchored_text = AnchoredText("Tensile Modulus of Elasticity = " +"%.3f" % Emod_GPa + " GPa\n" +
# "UTS = "+ "%.3f" % UTS +" MPa\n"+
# "Failure Stress = " + "%.5f" % failure_stress +" MPa\n"+
# "Max Strain = "+ "%.5f" % Strain[8], loc='right')
# ax.add_artist(anchored_text)
fig_y = go.Figure(data=plt_data_y, layout=def_layout_y)
#py.offline.iplot(fig_y, filename='σvsε_UD±458T_y_data')
pio.show(fig_y, filename='σvsε_UD±458T_y_data')
fig_x = go.Figure(data=plt_data_x, layout=def_layout_x)
#py.offline.iplot(fig_x, filename='σvsε_UD±458T_x_data')
pio.show(fig_x, filename='σvsε_UD±458T_x_data')
# #Save images in PDF or vector file format for publications (LaTeX)
# #
im_Path = Image_PATH
# #.SVG
pio.write_image(fig, im_Path + 'UD±458T_stress_strain.svg')
# #.PDF
#pio.write_image(fig, im_Path + 'UD±458T_stress_strain.pdf')
# #.EPS
pio.write_image(fig, im_Path + 'UD±458T_stress_strain.eps')
# #.jpeg
#pio.write_image(fig, im_Path + 'UD±458T_stress_strain.jpg')
# ##### ±45° 8-Ply Coupon - Tension-loaded: Stress vs. Strain test summary
#
# * No. of coupons tested: 3
# * Tensile modulus of elasticity ($E_{t}$, *ASTM D3039*): 12.467 GPa
# * Test speed: 1 $\frac{mm}{min}$
# * Failure mode (*ASTM D3039*): LAT
# 
# ### 3.5.3.5 - ±45°, 8-Ply, Tension Coupon - Experiment statistical analyses
# In[51]:
##=============================== ±45° 8-Ply Tension-loaded coupon =====================================
# Statistical analyses of experiment data
#
# This is a custom STAT graphics routine. The SciPy package (namely .distplot(), interfaced with plotly)
# seems to poorly handle statistical probability distributions of the σ/ε quasi-fatigue experimental
# measurement data.
#========================================================================================================
#Define composite coupon tag I.D.
c_tag = 'BD_±45_8_T'
#Slice coupon tag data from experiment statistics DataFrame
df = eqsf_STAT_df[eqsf_STAT_df['Coupon tag'] == c_tag]
#Call experimental DataFrame to compute mean & st. dev values
#Slice UD 0° CFRE - 4 Ply - Tension loading experiment data from DataFrame
exp_df = qsf_df[qsf_df['Coupon tag'] == c_tag]
#Tensile force measurements STATS
F_stat = df['Force data'] #force statistic data
F_prob = df['φ(x) - F'] #force statistic probability computation
intval1 = df['μ ± 3σ'] #abscissa values for plot
intval2 = df['Intervals'] #abscissa values for plot
F_mean = exp_df['Force load [N]'].mean() #mean of force measurement values
F_std = exp_df['Force load [N]'].std() #st. dev of force measurement values
#Tensile strain measurements STATS - y-direction (2-direction)
ε_stat_y = df['Strain y-data'] #strain statistic data
ε_prob_y = df['φ(x) - ε,y'] #strain statistic probability computation
ε_mean_y = exp_df['ε,y [%]'].mean() #mean of force measurement values
ε_std_y = exp_df['ε,y [%]'].std() #st. dev of force measurement values
#Tensile strain measurements STATS - y-direction (2-direction)
ε_stat_x = df['Strain x-data'] #strain statistic data
ε_prob_x = df['φ(x) - ε,x'] #strain statistic probability computation
ε_mean_x = exp_df['ε,x [%]'].mean() #mean of force measurement values
ε_std_x = exp_df['ε,x [%]'].std() #st. dev of force measurement values
#Standardize data colour
data1_paint = '#1E90FF' #dodger blue
data2_paint = '#00CED1' #dark turquoise
data3_paint = '#6A5ACD' #slate blue
color_map1 = [data1_paint, data2_paint, data3_paint]
color_map2 = [data3_paint, data2_paint, data1_paint]
#Call plotly 'Scattergl' function and plot Force statistical data
F_prob_plot = go.Scattergl(x = intval2, y = F_prob, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = data1_paint),
size=5
)
)
#Call plotly 'Scattergl' function and plot Strain statistical data
εy_prob_plot = go.Scattergl(x = intval2, y = ε_prob_y, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = data2_paint),
size=5
)
)
#Call plotly 'Scattergl' function and plot Strain statistical data
εx_prob_plot = go.Scattergl(x = intval2, y = ε_prob_x, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = data3_paint),
size=5
)
)
#Assign to 'data' variable for plot initialization
F_prob_plt_data = [F_prob_plot]
εy_prob_plt_data = [εy_prob_plot]
εx_prob_plt_data = [εx_prob_plot]
prob_plot_data = [F_prob_plot, εy_prob_plot, εx_prob_plot]
#Define axes title fonts
#
#Type-faces I prefer (all sans-serif): PT Sans, SF Mono, Frutiger, Amplitude, Antique Olive, Avenir, Eurostile
# Optima,
#Font packages for plotting
font_pkg0=dict(family='Optima', size=22, color='black')
font_pkg1=dict(family='Optima', size=16, color='black')
font_pkg2=dict(family='Optima', size=12, color='black')
#Describe plot layout
def_layout_F = go.Layout(title='±45° 8-Ply Coupon - Tensile force measurement statistical probability distribution',
titlefont=font_pkg0,
yaxis=dict(title='Probability [φ(x)]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Random variable', showgrid=True, zeroline=True, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
def_layout_εy = go.Layout(title='±45° 8-Ply Coupon - Strain (1-direction) measurement statistical probability distribution',
titlefont=font_pkg0,
yaxis=dict(title='Probability [φ(x)]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Random variable', showgrid=True, zeroline=True, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
def_layout_εx = go.Layout(title='±45° 8-Ply Coupon - Strain (2-direction) measurement statistical probability distribution',
titlefont=font_pkg0,
yaxis=dict(title='Probability [φ(x)]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Random variable', showgrid=True, zeroline=True, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
# anchored_text = AnchoredText("Force measurement mean = " +"%.3f" % F_mean + " N\n" +
# "Force measurement st. dev = " +"%.3f" % F_std +
# "Strain measurement mean = " +"%.3f" % ε_mean +
# "Strain measurement st. dev = " +"%.3f" % ε_std
# ax.add_artist(anchored_text)
fig_F = go.Figure(data=F_prob_plt_data, layout=def_layout_F)
#py.offline.iplot(fig_F, filename='UD908T_F_prob_data')
pio.show(fig_F, filename='±458T_F_prob_distn')
#Write .svg and .eps plot images to repo
pio.write_image(fig_F, im_Path + '±458T_F_prob_distn.svg')
pio.write_image(fig_F, im_Path + '±458T_F_prob_distn.eps')
fig_εy = go.Figure(data=εy_prob_plt_data, layout=def_layout_εy)
#py.offline.iplot(fig_εy, filename='±458T_εy_prob_data')
pio.show(fig_εy, filename='±458T_εy_prob_distn')
#Write .svg and .eps plot images to repo
pio.write_image(fig_εy, im_Path + '±458T_εy_prob_distn.svg')
pio.write_image(fig_εy, im_Path + '±458T_εy_prob_distn.eps')
fig_εx = go.Figure(data=εx_prob_plt_data, layout=def_layout_εx)
#py.offline.iplot(fig_εx, filename='±458T_εx_prob_data')
pio.show(fig_εx, filename='±458T_εx_prob_distn')
#Write .svg and .eps plot images to repo
pio.write_image(fig_εx, im_Path + '±458T_εx_prob_distn.svg')
pio.write_image(fig_εx, im_Path + '±458T_εx_prob_distn.eps')
# ##### ±45° 8-Ply Coupon - Tension-loaded: Experiment statistics summary
# 
#
# 
#
# 
# ### 3.5.3.6 - UD 45°, 8-Ply, Tension: σ-ε plot
# In[52]:
##====================Create sub-DataFrame for UD 90° CFRE Tension QSF experiment=========================
#Slice CFRE coupon quasi-fatigue loading experiment data from DataFrame
UD_45_8_T_df = qsf_df[qsf_df['Coupon tag'] == 'UD_45_8_T']
#Define DataFrame I.D. tag
df = UD_45_8_T_df
#Define coupon I.D. tag
c_tag = 'UD_45_8_T'
#Import experimental/calculated data values
Force = df['Force load [N]']
Stress = df['σ,qs [MPa]']
Strain = df['ε,y [%]']
LinearLimit = 1
#NOTE: experimental data is imported as pandas DataFrame; force, stress and strain data are parsed
# as pandas.Series arrays. These need to be converted to 'list' type data to be plotted with Matplotlib
plt_Force = Force.tolist()
plt_Stress = Stress.tolist()
plt_Strain = Strain.tolist()
#df_list = df['STYNAME'].tolist()
#True Stress calculation
Stress_True = [ x * (1+y) for y,x in zip(Strain,Stress)]
#True Strain calculation
Strain_True = [math.log(1+x) for x in Strain]
#Composite strength characteristics
##
#Extract tensile modulus of elasticity from elastic modulus calculation DataFrame
##
### in MPa
Emod_MPa = eqsf_mod_df.loc[eqsf_mod_df['Coupon tag'] == c_tag]['E, chord [MPa]'].values[0]
### in GPa
Emod_GPa = eqsf_mod_df.loc[eqsf_mod_df['Coupon tag'] == c_tag]['E, chord [GPa]'].values[0]
##
#UTS - Ultimate tensiles strength (X,t)
UTS = max(Stress)
##
#Ultimate in-plane shear strength (S)
# S =
#Call plotly 'Scattergl' function and assign plot data
σvsε = go.Scattergl(x = Strain, y = Stress, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = '#1E90FF'),size=2
)
)
#Assign to 'data' variable for plot initialization
σvsε_data = [σvsε]
#Define axes title fonts
#
#Type-faces I prefer (all sans-serif): PT Sans, SF Mono, Frutiger, Amplitude, Antique Olive, Avenir, Eurostile
# Optima,
#Font packages for plotting
font_pkg0=dict(family='Optima', size=22, color='black')
font_pkg1=dict(family='Optima', size=16, color='black')
font_pkg2=dict(family='Optima', size=12, color='black')
#Describe plot layout
def_layout = go.Layout(title='σ vs. ε - UD 45° 8-Ply Coupon, tensile loaded',
titlefont=font_pkg0,
yaxis=dict(title='Stress [MPa]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Measured Strain', showgrid=True, zeroline=False, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
#Include quasi-fatigue properties of composite coupon
#
#Modulus
eqsf_Tmod_df = eqsf_mod_df[eqsf_mod_df['Loading'] == 'Tension']
# anchored_text = AnchoredText("Tensile Modulus of Elasticity = " +"%.3f" % Emod_GPa + " GPa\n" +
# "UTS = "+ "%.3f" % UTS +" MPa\n"+
# "Failure Stress = " + "%.5f" % failure_stress +" MPa\n"+
# "Max Strain = "+ "%.5f" % Strain[8], loc='right')
# ax.add_artist(anchored_text)
fig = go.Figure(data=σvsε_data, layout=def_layout)
#py.offline.iplot(fig, filename='σvsε_UD458T_data')
pio.show(fig, filename='σvsε_UD458T_data')
# #Save images in PDF or vector file format for publications (LaTeX)
# #
im_Path = Image_PATH
# #.SVG
pio.write_image(fig, im_Path + 'UD458T_stress_strain.svg')
# #.PDF
#pio.write_image(fig, im_Path + 'UD458T_stress_strain.pdf')
# #.EPS
pio.write_image(fig, im_Path + 'UD458T_stress_strain.eps')
# #.jpeg
#pio.write_image(fig, im_Path + 'UD458T_stress_strain.jpg')
# ##### UD 45° 8-Ply Coupon - Tension-loaded: Stress vs. Strain test summary
#
# * No. of coupons tested: 3
# * Tensile modulus of elasticity ($E_{t}$, *ASTM D3039*): 8.990 GPa
# * Test speed: 1 $\frac{mm}{min}$
# * Failure mode (*ASTM D3039*): LIT
# 
# ### 3.5.3.7 - UD 45°, 8-Ply, Tension Coupon - Experiment statistical analyses
# In[53]:
##=============================== UD 45° 8-Ply Tension-loaded coupon =====================================
# Statistical analyses of experiment data
#
# This is a custom STAT graphics routine. The SciPy package (namely .distplot(), interfaced with plotly)
# seems to poorly handle statistical probability distributions of the σ/ε quasi-fatigue experimental
# measurement data.
#========================================================================================================
#Define composite coupon tag I.D.
c_tag = 'UD_45_8_T'
#Slice coupon tag data from experiment statistics DataFrame
df = eqsf_STAT_df[eqsf_STAT_df['Coupon tag'] == c_tag]
#Call experimental DataFrame to compute mean & st. dev values
#Slice UD 0° CFRE - 4 Ply - Tension loading experiment data from DataFrame
exp_df = qsf_df[qsf_df['Coupon tag'] == c_tag]
#Tensile force measurements STATS
F_stat = df['Force data'] #force statistic data
F_prob = df['φ(x) - F'] #force statistic probability computation
intval1 = df['μ ± 3σ'] #abscissa values for plot
intval2 = df['Intervals'] #abscissa values for plot
F_mean = exp_df['Force load [N]'].mean() #mean of force measurement values
F_std = exp_df['Force load [N]'].std() #st. dev of force measurement values
#Tensile strain measurements STATS
ε_stat = df['Strain y-data'] #strain statistic data
ε_prob = df['φ(x) - ε,y'] #strain statistic probability computation
ε_mean = exp_df['ε,y [%]'].mean() #mean of force measurement values
ε_std = exp_df['ε,y [%]'].std() #st. dev of force measurement values
#Standardize data colour
data1_paint = '#1E90FF' #dodger blue
data2_paint = '#00CED1' #dark turquoise
color_map1 = [data1_paint, data2_paint]
color_map2 = [data2_paint, data1_paint]
#Call plotly 'Scattergl' function and plot Force statistical data
F_prob_plot = go.Scattergl(x = intval2, y = F_prob, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = data1_paint),
size=5
)
)
#Call plotly 'Scattergl' function and plot Strain statistical data
ε_prob_plot = go.Scattergl(x = intval2, y = ε_prob, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = data2_paint),
size=5
)
)
#Assign to 'data' variable for plot initialization
F_prob_plt_data = [F_prob_plot]
ε_prob_plt_data = [ε_prob_plot]
prob_plot_data = [F_prob_plot, ε_prob_plot]
#Define axes title fonts
#
#Type-faces I prefer (all sans-serif): PT Sans, SF Mono, Frutiger, Amplitude, Antique Olive, Avenir, Eurostile
# Optima,
#Font packages for plotting
font_pkg0=dict(family='Optima', size=22, color='black')
font_pkg1=dict(family='Optima', size=16, color='black')
font_pkg2=dict(family='Optima', size=12, color='black')
#Describe plot layout
def_layout_F = go.Layout(title='UD 45° 8-Ply Coupon - Tensile force measurement statistical probability distribution',
titlefont=font_pkg0,
yaxis=dict(title='Probability [φ(x)]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Random variable', showgrid=True, zeroline=True, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
def_layout_ε = go.Layout(title='UD 45° 8-Ply Coupon - Strain measurement statistical probability distribution',
titlefont=font_pkg0,
yaxis=dict(title='Probability [φ(x)]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Random variable', showgrid=True, zeroline=True, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
# anchored_text = AnchoredText("Force measurement mean = " +"%.3f" % F_mean + " N\n" +
# "Force measurement st. dev = " +"%.3f" % F_std +
# "Strain measurement mean = " +"%.3f" % ε_mean +
# "Strain measurement st. dev = " +"%.3f" % ε_std
# ax.add_artist(anchored_text)
fig_F = go.Figure(data=F_prob_plt_data, layout=def_layout_F)
#py.offline.iplot(fig_F, filename='UD458T_F_prob_data')
pio.show(fig_F, filename='UD458T_F_prob_distn')
#Write .svg and .eps plot images to repo
pio.write_image(fig_F, im_Path + 'UD458T_F_prob_distn.svg')
pio.write_image(fig_F, im_Path + 'UD458T_F_prob_distn.eps')
fig_ε = go.Figure(data=ε_prob_plt_data, layout=def_layout_ε)
#py.offline.iplot(fig_ε, filename='UD458T_ε_prob_data')
pio.show(fig_ε, filename='UD458T_ε_prob_distn')
#Write .svg and .eps plot images to repo
pio.write_image(fig_ε, im_Path + 'UD458T_ε_prob_distn.svg')
pio.write_image(fig_ε, im_Path + 'UD458T_ε_prob_distn.eps')
# ##### UD 45° 8-Ply Coupon - Tension-loaded: Experiment statistics summary
# 
#
# 
# ### 3.5.3.8 - UD 0°, 4-Ply, Compression: σ-ε plot
# In[54]:
##====================Create sub-DataFrame for UD 90° CFRE Tension QSF experiment=========================
#Slice CFRE coupon quasi-fatigue loading experiment data from DataFrame
UD_0_4_C_df = qsf_df[qsf_df['Coupon tag'] == 'UD_0_4_C']
#Define DataFrame I.D. tag
df = UD_0_4_C_df
#Define coupon I.D. tag
c_tag = 'UD_0_4_C'
#Import experimental/calculated data values
Force = df['Force load [N]']
Stress = df['σ,qs [MPa]']
Strain = df['ε,y [%]']
LinearLimit = 1
#NOTE: experimental data is imported as pandas DataFrame; force, stress and strain data are parsed
# as pandas.Series arrays. These need to be converted to 'list' type data to be plotted with Matplotlib
plt_Force = Force.tolist()
plt_Stress = Stress.tolist()
plt_Strain = Strain.tolist()
#df_list = df['STYNAME'].tolist()
#True Stress calculation
Stress_True = [ x * (1+y) for y,x in zip(Strain,Stress)]
#True Strain calculation
Strain_True = [math.log(1+x) for x in Strain]
#Composite strength characteristics
##
#Extract tensile modulus of elasticity from elastic modulus calculation DataFrame
##
### in MPa
Emod_MPa = eqsf_mod_df.loc[eqsf_mod_df['Coupon tag'] == c_tag]['E, chord [MPa]'].values[0]
### in GPa
Emod_GPa = eqsf_mod_df.loc[eqsf_mod_df['Coupon tag'] == c_tag]['E, chord [GPa]'].values[0]
##
#UTS - Ultimate tensiles strength (X,t)
UTS = max(Stress)
##
#Ultimate in-plane shear strength (S)
# S =
#Call plotly 'Scattergl' function and assign plot data
σvsε = go.Scattergl(x = Strain, y = Stress, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = '#1E90FF'),size=2
)
)
#Assign to 'data' variable for plot initialization
σvsε_data = [σvsε]
#Define axes title fonts
#
#Type-faces I prefer (all sans-serif): PT Sans, SF Mono, Frutiger, Amplitude, Antique Olive, Avenir, Eurostile
# Optima,
#Font packages for plotting
font_pkg0=dict(family='Optima', size=22, color='black')
font_pkg1=dict(family='Optima', size=16, color='black')
font_pkg2=dict(family='Optima', size=12, color='black')
#Describe plot layout
def_layout = go.Layout(title='σ vs. ε - UD 0° 4-Ply Coupon, compression loaded',
titlefont=font_pkg0,
yaxis=dict(title='Stress [MPa]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Measured Strain', showgrid=True, zeroline=False, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
#Include quasi-fatigue properties of composite coupon
#
#Modulus
eqsf_Tmod_df = eqsf_mod_df[eqsf_mod_df['Loading'] == 'Tension']
# anchored_text = AnchoredText("Tensile Modulus of Elasticity = " +"%.3f" % Emod_GPa + " GPa\n" +
# "UTS = "+ "%.3f" % UTS +" MPa\n"+
# "Failure Stress = " + "%.5f" % failure_stress +" MPa\n"+
# "Max Strain = "+ "%.5f" % Strain[8], loc='right')
# ax.add_artist(anchored_text)
fig = go.Figure(data=σvsε_data, layout=def_layout)
#py.offline.iplot(fig, filename='σvsε_UD04C_data')
pio.show(fig, filename='σvsε_UD04C_data')
# #Save images in PDF or vector file format for publications (LaTeX)
# #
im_Path = Image_PATH
# #.SVG
pio.write_image(fig, im_Path + 'UD04C_stress_strain.svg')
# #.PDF
#pio.write_image(fig, im_Path + 'UD04T_stress_strain.pdf')
# #.EPS
pio.write_image(fig, im_Path + 'UD04C_stress_strain.eps')
# #.jpeg
#pio.write_image(fig, im_Path + 'UD04T_stress_strain.jpg')
# ##### UD 0° 4-Ply Coupon - Compression-loaded: Stress vs. Strain test summary
#
# * No. of coupons tested: 3
# * Compressive modulus of elasticity ($E_{t}$, *ASTM D3039*): 99.226 GPa
# * Test speed: 1 $\frac{mm}{min}$
# * Failure mode (*ASTM D3039*): DGM
# 
# ### 3.5.3.8 - UD 0°, 4-Ply, Compression Coupon - Experiment statistical analyses
# In[55]:
##=============================== UD 0° 4-Ply Compression-loaded coupon =====================================
# Statistical analyses of experiment data
#
# This is a custom STAT graphics routine. The SciPy package (namely .distplot(), interfaced with plotly)
# seems to poorly handle statistical probability distributions of the σ/ε quasi-fatigue experimental
# measurement data.
#========================================================================================================
#Define composite coupon tag I.D.
c_tag = 'UD_0_4_C'
#Slice coupon tag data from experiment statistics DataFrame
df = eqsf_STAT_df[eqsf_STAT_df['Coupon tag'] == c_tag]
#Call experimental DataFrame to compute mean & st. dev values
#Slice UD 0° CFRE - 4 Ply - Tension loading experiment data from DataFrame
exp_df = qsf_df[qsf_df['Coupon tag'] == c_tag]
#Tensile force measurements STATS
F_stat = df['Force data'] #force statistic data
F_prob = df['φ(x) - F'] #force statistic probability computation
intval1 = df['μ ± 3σ'] #abscissa values for plot
intval2 = df['Intervals'] #abscissa values for plot
F_mean = exp_df['Force load [N]'].mean() #mean of force measurement values
F_std = exp_df['Force load [N]'].std() #st. dev of force measurement values
#Tensile strain measurements STATS
ε_stat = df['Strain y-data'] #strain statistic data
ε_prob = df['φ(x) - ε,y'] #strain statistic probability computation
ε_mean = exp_df['ε,y [%]'].mean() #mean of force measurement values
ε_std = exp_df['ε,y [%]'].std() #st. dev of force measurement values
#Standardize data colour
data1_paint = '#1E90FF' #dodger blue
data2_paint = '#00CED1' #dark turquoise
color_map1 = [data1_paint, data2_paint]
color_map2 = [data2_paint, data1_paint]
#Call plotly 'Scattergl' function and plot Force statistical data
F_prob_plot = go.Scattergl(x = intval2, y = F_prob, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = data1_paint),
size=5
)
)
#Call plotly 'Scattergl' function and plot Strain statistical data
ε_prob_plot = go.Scattergl(x = intval2, y = ε_prob, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = data2_paint),
size=5
)
)
#Assign to 'data' variable for plot initialization
F_prob_plt_data = [F_prob_plot]
ε_prob_plt_data = [ε_prob_plot]
prob_plot_data = [F_prob_plot, ε_prob_plot]
#Define axes title fonts
#
#Type-faces I prefer (all sans-serif): PT Sans, SF Mono, Frutiger, Amplitude, Antique Olive, Avenir, Eurostile
# Optima,
#Font packages for plotting
font_pkg0=dict(family='Optima', size=22, color='black')
font_pkg1=dict(family='Optima', size=16, color='black')
font_pkg2=dict(family='Optima', size=12, color='black')
#Describe plot layout
def_layout_F = go.Layout(title='UD 0° 4-Ply - Compression force measurement statistical probability distribution',
titlefont=font_pkg0,
yaxis=dict(title='Probability [φ(x)]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Random variable', showgrid=True, zeroline=True, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
def_layout_ε = go.Layout(title='UD 0° 4-Ply - Strain measurement statistical probability distribution',
titlefont=font_pkg0,
yaxis=dict(title='Probability [φ(x)]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Random variable', showgrid=True, zeroline=True, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
# anchored_text = AnchoredText("Force measurement mean = " +"%.3f" % F_mean + " N\n" +
# "Force measurement st. dev = " +"%.3f" % F_std +
# "Strain measurement mean = " +"%.3f" % ε_mean +
# "Strain measurement st. dev = " +"%.3f" % ε_std
# ax.add_artist(anchored_text)
fig_F = go.Figure(data=F_prob_plt_data, layout=def_layout_F)
#py.offline.iplot(fig_F, filename='UD04C_F_prob_data')
pio.show(fig_F, filename='UD04C_F_prob_distn')
#Write .svg and .eps plot images to repo
pio.write_image(fig_F, im_Path + 'UD04C_F_prob_distn.svg')
pio.write_image(fig_F, im_Path + 'UD04C_F_prob_distn.eps')
fig_ε = go.Figure(data=ε_prob_plt_data, layout=def_layout_ε)
#py.offline.iplot(fig_ε, filename='UD04C_ε_prob_data')
pio.show(fig_ε, filename='UD04C_ε_prob_distn')
#Write .svg and .eps plot images to repo
pio.write_image(fig_ε, im_Path + 'UD04C_ε_prob_distn.svg')
pio.write_image(fig_ε, im_Path + 'UD04C_ε_prob_distn.eps')
# ##### UD 0° 4-Ply Coupon - Compression-loaded: Experiment statistics summary
# 
#
# 
# ### 3.5.3.9 - UD 90°, 8-Ply, Compression: σ-ε plot
# In[56]:
##================Create sub-DataFrame for UD 90° CFRE Compression QSF experiment======================
#Slice CFRE coupon quasi-fatigue loading experiment data from DataFrame
UD_90_8_C_df = qsf_df[qsf_df['Coupon tag'] == 'UD_90_8_C']
#Define DataFrame I.D. tag
df = UD_90_8_C_df
#Define coupon I.D. tag
c_tag = 'UD_90_8_C'
#Import experimental/calculated data values
Force = df['Force load [N]']
Stress = df['σ,qs [MPa]']
Strain = df['ε,y [%]']
LinearLimit = 1
#NOTE: experimental data is imported as pandas DataFrame; force, stress and strain data are parsed
# as pandas.Series arrays. These need to be converted to 'list' type data to be plotted with Matplotlib
plt_Force = Force.tolist()
plt_Stress = Stress.tolist()
plt_Strain = Strain.tolist()
#df_list = df['STYNAME'].tolist()
#True Stress calculation
Stress_True = [ x * (1+y) for y,x in zip(Strain,Stress)]
#True Strain calculation
Strain_True = [math.log(1+x) for x in Strain]
#Composite strength characteristics
##
#Extract tensile modulus of elasticity from elastic modulus calculation DataFrame
##
### in MPa
Emod_MPa = eqsf_mod_df.loc[eqsf_mod_df['Coupon tag'] == c_tag]['E, chord [MPa]'].values[0]
### in GPa
Emod_GPa = eqsf_mod_df.loc[eqsf_mod_df['Coupon tag'] == c_tag]['E, chord [GPa]'].values[0]
##
#UTS - Ultimate tensiles strength (X,t)
UTS = max(Stress)
##
#Ultimate in-plane shear strength (S)
# S =
#Call plotly 'Scattergl' function and assign plot data
σvsε = go.Scattergl(x = Strain, y = Stress, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = '#1E90FF'),size=2
)
)
#Assign to 'data' variable for plot initialization
σvsε_data = [σvsε]
#Define axes title fonts
#
#Type-faces I prefer (all sans-serif): PT Sans, SF Mono, Frutiger, Amplitude, Antique Olive, Avenir, Eurostile
# Optima,
#Font packages for plotting
font_pkg0=dict(family='Optima', size=22, color='black')
font_pkg1=dict(family='Optima', size=16, color='black')
font_pkg2=dict(family='Optima', size=12, color='black')
#Describe plot layout
def_layout = go.Layout(title='σ vs. ε - UD 90° 8-Ply Coupon, compression loaded',
titlefont=font_pkg0,
yaxis=dict(title='Stress [MPa]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Measured Strain', showgrid=True, zeroline=False, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
#Include quasi-fatigue properties of composite coupon
#
#Modulus
eqsf_Tmod_df = eqsf_mod_df[eqsf_mod_df['Loading'] == 'Tension']
# anchored_text = AnchoredText("Tensile Modulus of Elasticity = " +"%.3f" % Emod_GPa + " GPa\n" +
# "UTS = "+ "%.3f" % UTS +" MPa\n"+
# "Failure Stress = " + "%.5f" % failure_stress +" MPa\n"+
# "Max Strain = "+ "%.5f" % Strain[8], loc='right')
# ax.add_artist(anchored_text)
fig = go.Figure(data=σvsε_data, layout=def_layout)
#py.offline.iplot(fig, filename='σvsε_UD908C_data')
pio.show(fig, filename='σvsε_UD908C_data')
# #Save images in PDF or vector file format for publications (LaTeX)
# #
im_Path = Image_PATH
# #.SVG
pio.write_image(fig, im_Path + 'UD908C_stress_strain.svg')
# #.PDF
#pio.write_image(fig, im_Path + 'UD04T_stress_strain.pdf')
# #.EPS
pio.write_image(fig, im_Path + 'UD908C_stress_strain.eps')
# #.jpeg
#pio.write_image(fig, im_Path + 'UD04T_stress_strain.jpg')
# ##### UD 90° 8-Ply Coupon - Compression-loaded: Stress vs. Strain test summary
#
# * No. of coupons tested: 3
# * Compressive modulus of elasticity ($E_{t}$, *ASTM D3039*): 6.099 GPa
# * Test speed: 1 $\frac{mm}{min}$
# * Failure mode (*ASTM D3039*): GAT
# 
# ### 3.5.3.10 - UD 90°, 8-Ply, Compression Coupon - Experiment statistical analyses
# In[57]:
##=============================== UD 90° 8-Ply Compression-loaded coupon =====================================
# Statistical analyses of experiment data
#
# This is a custom STAT graphics routine. The SciPy package (namely .distplot(), interfaced with plotly)
# seems to poorly handle statistical probability distributions of the σ/ε quasi-fatigue experimental
# measurement data.
#========================================================================================================
#Define composite coupon tag I.D.
c_tag = 'UD_90_8_C'
#Slice coupon tag data from experiment statistics DataFrame
df = eqsf_STAT_df[eqsf_STAT_df['Coupon tag'] == c_tag]
#Call experimental DataFrame to compute mean & st. dev values
#Slice UD 0° CFRE - 4 Ply - Tension loading experiment data from DataFrame
exp_df = qsf_df[qsf_df['Coupon tag'] == c_tag]
#Tensile force measurements STATS
F_stat = df['Force data'] #force statistic data
F_prob = df['φ(x) - F'] #force statistic probability computation
intval1 = df['μ ± 3σ'] #abscissa values for plot
intval2 = df['Intervals'] #abscissa values for plot
F_mean = exp_df['Force load [N]'].mean() #mean of force measurement values
F_std = exp_df['Force load [N]'].std() #st. dev of force measurement values
#Tensile strain measurements STATS
ε_stat = df['Strain y-data'] #strain statistic data
ε_prob = df['φ(x) - ε,y'] #strain statistic probability computation
ε_mean = exp_df['ε,y [%]'].mean() #mean of force measurement values
ε_std = exp_df['ε,y [%]'].std() #st. dev of force measurement values
#Standardize data colour
data1_paint = '#1E90FF' #dodger blue
data2_paint = '#00CED1' #dark turquoise
color_map1 = [data1_paint, data2_paint]
color_map2 = [data2_paint, data1_paint]
#Call plotly 'Scattergl' function and plot Force statistical data
F_prob_plot = go.Scattergl(x = intval2, y = F_prob, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = data1_paint),
size=5
)
)
#Call plotly 'Scattergl' function and plot Strain statistical data
ε_prob_plot = go.Scattergl(x = intval2, y = ε_prob, mode = 'markers',
marker = dict(
line = dict(
width = 0.5, color = data2_paint),
size=5
)
)
#Assign to 'data' variable for plot initialization
F_prob_plt_data = [F_prob_plot]
ε_prob_plt_data = [ε_prob_plot]
prob_plot_data = [F_prob_plot, ε_prob_plot]
#Define axes title fonts
#
#Type-faces I prefer (all sans-serif): PT Sans, SF Mono, Frutiger, Amplitude, Antique Olive, Avenir, Eurostile
# Optima,
#Font packages for plotting
font_pkg0=dict(family='Optima', size=22, color='black')
font_pkg1=dict(family='Optima', size=16, color='black')
font_pkg2=dict(family='Optima', size=12, color='black')
#Describe plot layout
def_layout_F = go.Layout(title='UD 90° 8-Ply - Compression force measurement statistical probability distribution',
titlefont=font_pkg0,
yaxis=dict(title='Probability [φ(x)]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Random variable', showgrid=True, zeroline=True, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
def_layout_ε = go.Layout(title='UD 90° 8-Ply - Strain measurement statistical probability distribution',
titlefont=font_pkg0,
yaxis=dict(title='Probability [φ(x)]', autorange=True, showgrid=True, titlefont=font_pkg1,
tickfont=font_pkg2, range=[0, len(Stress)], zeroline=False,
ticks='outside', showline=True, tickwidth=2, rangemode='tozero',
showexponent = 'all', exponentformat = 'power'),
xaxis=dict(title='Random variable', showgrid=True, zeroline=True, titlefont=font_pkg1,
tickfont=font_pkg2, ticks='outside', showline=True, tickwidth=2,
rangemode='tozero')
)
# anchored_text = AnchoredText("Force measurement mean = " +"%.3f" % F_mean + " N\n" +
# "Force measurement st. dev = " +"%.3f" % F_std +
# "Strain measurement mean = " +"%.3f" % ε_mean +
# "Strain measurement st. dev = " +"%.3f" % ε_std
# ax.add_artist(anchored_text)
fig_F = go.Figure(data=F_prob_plt_data, layout=def_layout_F)
#py.offline.iplot(fig_F, filename='UD908C_F_prob_data')
pio.show(fig_F, filename='UD908C_F_prob_distn')
#Write .svg and .eps plot images to repo
pio.write_image(fig_F, im_Path + 'UD908C_F_prob_distn.svg')
pio.write_image(fig_F, im_Path + 'UD908C_F_prob_distn.eps')
fig_ε = go.Figure(data=ε_prob_plt_data, layout=def_layout_ε)
#py.offline.iplot(fig_ε, filename='UD908C_ε_prob_data')
pio.show(fig_ε, filename='UD908C_ε_prob_distn')
#Write .svg and .eps plot images to repo
pio.write_image(fig_ε, im_Path + 'UD908C_ε_prob_distn.svg')
pio.write_image(fig_ε, im_Path + 'UD908C_ε_prob_distn.eps')
# ##### UD 90° 8-Ply Coupon - Compression-loaded: Experiment statistics summary
# 
#
# 
| 51.973908 | 689 | 0.646116 | 26,413 | 189,237 | 4.510052 | 0.074357 | 0.002753 | 0.002972 | 0.002418 | 0.79936 | 0.788019 | 0.778357 | 0.767024 | 0.758932 | 0.750369 | 0 | 0.03271 | 0.196267 | 189,237 | 3,640 | 690 | 51.988187 | 0.748915 | 0.722153 | 0 | 0.602787 | 0 | 0.004646 | 0.17423 | 0.024342 | 0.003484 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.026713 | 0 | 0.026713 | 0.003484 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
9c54d30ad9874895ce0e8cdbc5ad3325efc82f0f | 96 | py | Python | venv/lib/python3.8/site-packages/charset_normalizer/models.py | GiulianaPola/select_repeats | 17a0d053d4f874e42cf654dd142168c2ec8fbd11 | [
"MIT"
] | 1 | 2022-02-22T04:49:18.000Z | 2022-02-22T04:49:18.000Z | venv/lib/python3.8/site-packages/charset_normalizer/models.py | GiulianaPola/select_repeats | 17a0d053d4f874e42cf654dd142168c2ec8fbd11 | [
"MIT"
] | null | null | null | venv/lib/python3.8/site-packages/charset_normalizer/models.py | GiulianaPola/select_repeats | 17a0d053d4f874e42cf654dd142168c2ec8fbd11 | [
"MIT"
] | null | null | null | /home/runner/.cache/pip/pool/c0/e6/c3/73da9ac73f39c64aa1e94a334e92f494b910baf9253b8edef35951f3d1 | 96 | 96 | 0.895833 | 9 | 96 | 9.555556 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.395833 | 0 | 96 | 1 | 96 | 96 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
9c724b5cb1abb2ac2a88680ec9fa9ed5ee2cf6fe | 543 | py | Python | Day4/occurence_of_list.py | tushartrip1010/100_days_code_py | ee74b429e98cdd8bdf8661cf987da67c9fee5a3e | [
"Apache-2.0"
] | null | null | null | Day4/occurence_of_list.py | tushartrip1010/100_days_code_py | ee74b429e98cdd8bdf8661cf987da67c9fee5a3e | [
"Apache-2.0"
] | null | null | null | Day4/occurence_of_list.py | tushartrip1010/100_days_code_py | ee74b429e98cdd8bdf8661cf987da67c9fee5a3e | [
"Apache-2.0"
] | null | null | null | # Approach 1:
def Occurence_of_Element(Given_List, Element):
count = 0
for ele in Given_List:
if ele == Element:
count += 1
return count
Given_List = [14, 25, 16, 23, 10, 5, 6, 8, 7, 9, 10, 25, 14]
Element = 14
print(Occurence_of_Element(Given_List, Element))
# Approach 2:
def Occurence_of_Element(Given_List, Element):
return Given_List.count(Element)
Given_List = [14, 25, 16, 23, 10, 5, 6, 8, 7, 9, 10, 25, 14]
Element = 14
print(Occurence_of_Element(Given_List, Element))
| 22.625 | 61 | 0.631676 | 85 | 543 | 3.847059 | 0.317647 | 0.220183 | 0.244648 | 0.281346 | 0.703364 | 0.703364 | 0.703364 | 0.477064 | 0.477064 | 0.477064 | 0 | 0.12285 | 0.25046 | 543 | 23 | 62 | 23.608696 | 0.68059 | 0.042357 | 0 | 0.571429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0 | 0.071429 | 0.285714 | 0.142857 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
9c819239566ab4a3734bcafeb451b28ef4ebf377 | 210 | py | Python | intro/module.py | RobertoRosa7/python | 449f3908a38814ec7ec3b3ce1051b8abe70069d2 | [
"MIT"
] | null | null | null | intro/module.py | RobertoRosa7/python | 449f3908a38814ec7ec3b3ce1051b8abe70069d2 | [
"MIT"
] | 2 | 2020-07-19T15:36:35.000Z | 2022-02-02T20:30:16.000Z | intro/module.py | RobertoRosa7/python | 449f3908a38814ec7ec3b3ce1051b8abe70069d2 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
def module():
'''
Módulo para realização de abstração dos códigos e reutilização
'''
def media(n1, n2):
return (n1 + n2) / 2
def somar(n1, n2):
return n1 + n2
| 16.153846 | 70 | 0.561905 | 29 | 210 | 4.068966 | 0.689655 | 0.135593 | 0.169492 | 0.20339 | 0.237288 | 0 | 0 | 0 | 0 | 0 | 0 | 0.067114 | 0.290476 | 210 | 12 | 71 | 17.5 | 0.724832 | 0.404762 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.6 | false | 0 | 0 | 0.4 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 5 |
92c76d9192f308e7d097d61559a7f210e8613b1f | 33 | py | Python | web/__init__.py | daqbroker/daqbrokerServer | e8d2b72b4e3ab12c26dfa7b52e9d77097ede3f33 | [
"MIT"
] | null | null | null | web/__init__.py | daqbroker/daqbrokerServer | e8d2b72b4e3ab12c26dfa7b52e9d77097ede3f33 | [
"MIT"
] | null | null | null | web/__init__.py | daqbroker/daqbrokerServer | e8d2b72b4e3ab12c26dfa7b52e9d77097ede3f33 | [
"MIT"
] | null | null | null | from .web_server import WebServer | 33 | 33 | 0.878788 | 5 | 33 | 5.6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.090909 | 33 | 1 | 33 | 33 | 0.933333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
92d5824ebc518c00bb9f5f3a9c2ca14ea91c55dd | 123 | py | Python | src/cmds/sdr/__init__.py | ssc1729/ipmitool-sdr-mock | 4cc6edbbb1417a35ac3c0aff30b8d2d5c6b8be76 | [
"MIT"
] | null | null | null | src/cmds/sdr/__init__.py | ssc1729/ipmitool-sdr-mock | 4cc6edbbb1417a35ac3c0aff30b8d2d5c6b8be76 | [
"MIT"
] | null | null | null | src/cmds/sdr/__init__.py | ssc1729/ipmitool-sdr-mock | 4cc6edbbb1417a35ac3c0aff30b8d2d5c6b8be76 | [
"MIT"
] | null | null | null | from cmds.sdr.sdr import SDRMock
from cmds.sdr.sdr_elist import SDRElistMock
__all__ = [
SDRMock,
SDRElistMock
]
| 13.666667 | 43 | 0.739837 | 16 | 123 | 5.375 | 0.5 | 0.186047 | 0.255814 | 0.325581 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.195122 | 123 | 8 | 44 | 15.375 | 0.868687 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
13160044ef06e5c113bd47fb9ba116a12c048a20 | 2,460 | py | Python | activecampaign/Account.py | drewjaja/active-campaign-python | 07b07485776ceb893d95ff4bac67572f3c9b21a5 | [
"MIT"
] | 4 | 2018-03-20T15:21:49.000Z | 2018-10-10T20:52:47.000Z | activecampaign/Account.py | drewjaja/active-campaign-python | 07b07485776ceb893d95ff4bac67572f3c9b21a5 | [
"MIT"
] | 1 | 2018-03-20T17:54:22.000Z | 2018-03-20T19:58:01.000Z | activecampaign/Account.py | drewjaja/active-campaign-python | 07b07485776ceb893d95ff4bac67572f3c9b21a5 | [
"MIT"
] | 3 | 2018-03-20T14:47:43.000Z | 2018-07-17T06:56:27.000Z | from .ActiveCampaign import (
ActiveCampaign,
fmt_params,
fmt_noparams
)
import requests as rq
class Account(ActiveCampaign):
def __init__(self, url, api_key):
self.url = url
self.api_key = api_key
super(Account, self).__init__(url, api_key)
def add(self, params, post_data={}):
rq_url = fmt_noparams(
self.url,
'account_add',
self.output
)
response = rq.post(rq_url, data=post_data)
return response
def cancel(self, params, post_data={}):
rq_url = fmt_params(
self.url,
'account_cancel',
self.output,
params
)
response = rq.get(rq_url)
return response
def edit(self, params, post_data={}):
rq_url = fmt_noparams(
self.url,
'account_edit',
self.output
)
response = rq.post(rq_url, data=post_data)
return response
def list_(self, params, post_data={}):
rq_url = fmt_params(
self.url,
'account_list',
self.output,
params
)
response = rq.get(rq_url)
return response
def name_check(self, params, post_data={}):
rq_url = fmt_params(
self.url,
'account_name_check',
self.output,
params
)
response = rq.get(rq_url)
return response
def plans(self, params, post_data={}):
rq_url = fmt_params(
self.url,
'account_plans',
self.output,
params
)
response = rq.get(rq_url)
return response
def status(self, params, post_data={}):
rq_url = fmt_params(
self.url,
'account_status',
self.output,
params
)
response = rq.get(rq_url)
return response
def status_set(self, params, post_data={}):
rq_url = fmt_params(
self.url,
'account_status_set',
self.output,
params
)
response = rq.get(rq_url)
return response
def view(self, params, post_data={}):
rq_url = fmt_noparams(
self.url,
'account_view',
self.output
)
response = rq.get(rq_url)
return response
"""
## view
#print ac.api('account/view')
"""
| 22.777778 | 51 | 0.510163 | 265 | 2,460 | 4.486792 | 0.135849 | 0.075694 | 0.105971 | 0.136249 | 0.755257 | 0.755257 | 0.755257 | 0.728343 | 0.728343 | 0.728343 | 0 | 0 | 0.393089 | 2,460 | 107 | 52 | 22.990654 | 0.796383 | 0 | 0 | 0.573034 | 0 | 0 | 0.051516 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.11236 | false | 0 | 0.022472 | 0 | 0.247191 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
1352aefdba968a5c9196a4c705d88b444524f52d | 34 | py | Python | rough_draft_work_sqrt2/sqrt3.py | g00387822/Machine_Learning | 015f08ff64e337201aa9e0d125d325d837a429ad | [
"MIT"
] | null | null | null | rough_draft_work_sqrt2/sqrt3.py | g00387822/Machine_Learning | 015f08ff64e337201aa9e0d125d325d837a429ad | [
"MIT"
] | null | null | null | rough_draft_work_sqrt2/sqrt3.py | g00387822/Machine_Learning | 015f08ff64e337201aa9e0d125d325d837a429ad | [
"MIT"
] | null | null | null | A1 = 2
A2 = A1*(1/2)
print(A2)
| 4.857143 | 13 | 0.470588 | 8 | 34 | 2 | 0.625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.291667 | 0.294118 | 34 | 6 | 14 | 5.666667 | 0.375 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
13575eef5590e397f9fb7f3f565d8166c6f0f931 | 51 | py | Python | gym/wrappers/__init__.py | JialinMao/gym-ww | dad1b3313e5e5c767c189f2f43ace8097f2ff7bf | [
"MIT"
] | 6 | 2017-01-30T22:06:12.000Z | 2020-02-18T08:56:27.000Z | gym/wrappers/__init__.py | JialinMao/gym-ww | dad1b3313e5e5c767c189f2f43ace8097f2ff7bf | [
"MIT"
] | null | null | null | gym/wrappers/__init__.py | JialinMao/gym-ww | dad1b3313e5e5c767c189f2f43ace8097f2ff7bf | [
"MIT"
] | 9 | 2016-10-04T13:51:28.000Z | 2020-10-14T13:42:09.000Z | from gym.wrappers.frame_skipping import SkipWrapper | 51 | 51 | 0.901961 | 7 | 51 | 6.428571 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.058824 | 51 | 1 | 51 | 51 | 0.9375 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
136c61433f1892b24229694fe15d81913e33361b | 210 | py | Python | src/Lib/browser/__init__.py | NUS-ALSET/ace-react-redux-brython | d009490263c5716a145d9691cd59bfcd5aff837a | [
"MIT"
] | 1 | 2021-08-05T12:45:39.000Z | 2021-08-05T12:45:39.000Z | src/Lib/browser/__init__.py | NUS-ALSET/ace-react-redux-brython | d009490263c5716a145d9691cd59bfcd5aff837a | [
"MIT"
] | null | null | null | src/Lib/browser/__init__.py | NUS-ALSET/ace-react-redux-brython | d009490263c5716a145d9691cd59bfcd5aff837a | [
"MIT"
] | 1 | 2019-09-05T08:20:07.000Z | 2019-09-05T08:20:07.000Z | import javascript
from _browser import *
from .local_storage import LocalStorage
from .session_storage import SessionStorage
from .object_storage import ObjectStorage
WebSocket = window.WebSocket.new | 23.333333 | 44 | 0.819048 | 24 | 210 | 7 | 0.583333 | 0.232143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.147619 | 210 | 9 | 45 | 23.333333 | 0.938547 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.833333 | 0 | 0.833333 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
b91202683d15430b1410b1ebfaa5517b6e1be9e8 | 370 | py | Python | tests/test_main.py | jeremad/looper | 8510d4cfceeee3821f7964cd7aab98c19e2639dd | [
"MIT"
] | 3 | 2019-02-18T14:29:21.000Z | 2020-02-07T22:06:04.000Z | tests/test_main.py | jeremad/looper | 8510d4cfceeee3821f7964cd7aab98c19e2639dd | [
"MIT"
] | 8 | 2019-02-15T10:41:26.000Z | 2019-11-26T17:30:39.000Z | tests/test_main.py | jeremad/looper | 8510d4cfceeee3821f7964cd7aab98c19e2639dd | [
"MIT"
] | 1 | 2019-06-11T09:28:18.000Z | 2019-06-11T09:28:18.000Z | import pytest
import py_loop.main as looper_main
def test_version() -> None:
looper_main.main(["--version"])
def test_no_cmd() -> None:
with pytest.raises(SystemExit) as e:
looper_main.main([""])
assert e.value.code == 1
def test_cmd() -> None:
looper_main.main(["ls", "--max-tries", "1"])
looper_main.main(["--max-tries", "1", "ls"])
| 19.473684 | 48 | 0.618919 | 54 | 370 | 4.055556 | 0.444444 | 0.228311 | 0.255708 | 0.164384 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009967 | 0.186486 | 370 | 18 | 49 | 20.555556 | 0.717608 | 0 | 0 | 0 | 0 | 0 | 0.1 | 0 | 0 | 0 | 0 | 0 | 0.090909 | 1 | 0.272727 | true | 0 | 0.181818 | 0 | 0.454545 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
b91fa8efc40b7bf908edaffb9a1c8689151594aa | 405 | py | Python | rlutils/tf/nn/__init__.py | vermouth1992/rl-util | 4c06ab8f5c96a44e58f88cf30146bcb837057112 | [
"Apache-2.0"
] | null | null | null | rlutils/tf/nn/__init__.py | vermouth1992/rl-util | 4c06ab8f5c96a44e58f88cf30146bcb837057112 | [
"Apache-2.0"
] | null | null | null | rlutils/tf/nn/__init__.py | vermouth1992/rl-util | 4c06ab8f5c96a44e58f88cf30146bcb837057112 | [
"Apache-2.0"
] | null | null | null | from .actors import SquashedGaussianMLPActor, CenteredBetaMLPActor, NormalActor, \
TruncatedNormalActor, CategoricalActor, DeterministicMLPActor
from .behavior import BehaviorPolicy, EnsembleBehaviorPolicy
from .functional import build_mlp
from .layers import SqueezeLayer, EnsembleDense, LagrangeLayer
from .models import EnsembleWorldModel
from .values import EnsembleMinQNet, AtariQNetworkDeepMind
| 50.625 | 82 | 0.866667 | 34 | 405 | 10.294118 | 0.705882 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.093827 | 405 | 7 | 83 | 57.857143 | 0.953678 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.857143 | 0 | 0.857143 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
b9505bd5a38eb305c0ffda40e43a11e97c530d4e | 51 | py | Python | pm4pymdl/algo/mvp/__init__.py | dorian1000/pm4py-mdl | 71e0c2425abb183da293a58d31e25e50137c774f | [
"MIT"
] | 5 | 2021-01-31T22:45:29.000Z | 2022-02-22T14:26:06.000Z | pm4pymdl/algo/mvp/__init__.py | Javert899/pm4py-mdl | 4cc875999100f3f1ad60b925a20e40cf52337757 | [
"MIT"
] | 3 | 2021-07-07T15:32:55.000Z | 2021-07-07T16:15:36.000Z | pm4pymdl/algo/mvp/__init__.py | dorian1000/pm4py-mdl | 71e0c2425abb183da293a58d31e25e50137c774f | [
"MIT"
] | 9 | 2020-09-23T15:34:11.000Z | 2022-03-17T09:15:40.000Z | from pm4pymdl.algo.mvp import gen_framework, utils
| 25.5 | 50 | 0.843137 | 8 | 51 | 5.25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.021739 | 0.098039 | 51 | 1 | 51 | 51 | 0.891304 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
b969d91b89834f2ccd59a7837792cbe6aa59112e | 63 | py | Python | src/git_test_jenny.py | money4marry/git-test | aa9247bdf648ddbde5939299d3eb4bbb0b60cecf | [
"MIT"
] | null | null | null | src/git_test_jenny.py | money4marry/git-test | aa9247bdf648ddbde5939299d3eb4bbb0b60cecf | [
"MIT"
] | null | null | null | src/git_test_jenny.py | money4marry/git-test | aa9247bdf648ddbde5939299d3eb4bbb0b60cecf | [
"MIT"
] | null | null | null | #2022/5/29
#git_test_jenny
print('hello world liuzheng5555')
| 10.5 | 33 | 0.761905 | 10 | 63 | 4.6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.196429 | 0.111111 | 63 | 5 | 34 | 12.6 | 0.625 | 0.365079 | 0 | 0 | 0 | 0 | 0.648649 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 5 |
b970bc35d0d2154dfb8c9a6531c42f9069dc7d32 | 39 | py | Python | tests/__init__.py | dkedar7/fast_dash | 4e4c52801ed430cd84371451c12b4028704993a4 | [
"MIT"
] | 7 | 2022-01-31T12:47:48.000Z | 2022-03-29T23:33:22.000Z | tests/__init__.py | dkedar7/fast_dash | 4e4c52801ed430cd84371451c12b4028704993a4 | [
"MIT"
] | 1 | 2022-03-22T20:38:38.000Z | 2022-03-22T20:38:38.000Z | tests/__init__.py | dkedar7/fast_dash | 4e4c52801ed430cd84371451c12b4028704993a4 | [
"MIT"
] | null | null | null | """Unit test package for fast_dash."""
| 19.5 | 38 | 0.692308 | 6 | 39 | 4.333333 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.128205 | 39 | 1 | 39 | 39 | 0.764706 | 0.820513 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
b97860b3009537332cd0f15c3d784797cb916ea3 | 39,723 | py | Python | st3/mdpopups/pygments/lexers/_cocoa_builtins.py | evandroforks/mdpopups | b32849c827182b35482f8b41b44a2e2166e2356a | [
"MIT"
] | 182 | 2017-03-05T07:43:13.000Z | 2022-03-15T13:09:07.000Z | st3/mdpopups/pygments/lexers/_cocoa_builtins.py | evandroforks/mdpopups | b32849c827182b35482f8b41b44a2e2166e2356a | [
"MIT"
] | 117 | 2015-11-03T15:42:51.000Z | 2022-02-13T17:13:21.000Z | st3/mdpopups/pygments/lexers/_cocoa_builtins.py | evandroforks/mdpopups | b32849c827182b35482f8b41b44a2e2166e2356a | [
"MIT"
] | 24 | 2016-10-09T05:34:21.000Z | 2022-02-05T11:51:36.000Z | # -*- coding: utf-8 -*-
"""
pygments.lexers._cocoa_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file defines a set of types used across Cocoa frameworks from Apple.
There is a list of @interfaces, @protocols and some other (structs, unions)
File may be also used as standalone generator for aboves.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
COCOA_INTERFACES = set(['UITableViewCell', 'HKCorrelationQuery', 'NSURLSessionDataTask', 'PHFetchOptions', 'NSLinguisticTagger', 'NSStream', 'AVAudioUnitDelay', 'GCMotion', 'SKPhysicsWorld', 'NSString', 'CMAttitude', 'AVAudioEnvironmentDistanceAttenuationParameters', 'HKStatisticsCollection', 'SCNPlane', 'CBPeer', 'JSContext', 'SCNTransaction', 'SCNTorus', 'AVAudioUnitEffect', 'UICollectionReusableView', 'MTLSamplerDescriptor', 'AVAssetReaderSampleReferenceOutput', 'AVMutableCompositionTrack', 'GKLeaderboard', 'NSFetchedResultsController', 'SKRange', 'MKTileOverlayRenderer', 'MIDINetworkSession', 'UIVisualEffectView', 'CIWarpKernel', 'PKObject', 'MKRoute', 'MPVolumeView', 'UIPrintInfo', 'SCNText', 'ADClient', 'UIKeyCommand', 'AVMutableAudioMix', 'GLKEffectPropertyLight', 'WKScriptMessage', 'AVMIDIPlayer', 'PHCollectionListChangeRequest', 'UICollectionViewLayout', 'NSMutableCharacterSet', 'SKPaymentTransaction', 'NEOnDemandRuleConnect', 'NSShadow', 'SCNView', 'NSURLSessionConfiguration', 'MTLVertexAttributeDescriptor', 'CBCharacteristic', 'HKQuantityType', 'CKLocationSortDescriptor', 'NEVPNIKEv2SecurityAssociationParameters', 'CMStepCounter', 'NSNetService', 'AVAssetWriterInputMetadataAdaptor', 'UICollectionView', 'UIViewPrintFormatter', 'SCNLevelOfDetail', 'CAShapeLayer', 'MCPeerID', 'MPRatingCommand', 'WKNavigation', 'NSDictionary', 'NSFileVersion', 'CMGyroData', 'AVAudioUnitDistortion', 'CKFetchRecordsOperation', 'SKPhysicsJointSpring', 'SCNHitTestResult', 'AVAudioTime', 'CIFilter', 'UIView', 'SCNConstraint', 'CAPropertyAnimation', 'MKMapItem', 'MPRemoteCommandCenter', 'UICollectionViewFlowLayoutInvalidationContext', 'UIInputViewController', 'PKPass', 'SCNPhysicsBehavior', 'MTLRenderPassColorAttachmentDescriptor', 'MKPolygonRenderer', 'CKNotification', 'JSValue', 'PHCollectionList', 'CLGeocoder', 'NSByteCountFormatter', 'AVCaptureScreenInput', 'MPFeedbackCommand', 'CAAnimation', 'MKOverlayPathView', 'UIActionSheet', 'UIMotionEffectGroup', 'NSLengthFormatter', 'UIBarItem', 'SKProduct', 'AVAssetExportSession', 'NSKeyedUnarchiver', 'NSMutableSet', 'SCNPyramid', 'PHAssetCollection', 'MKMapView', 'HMHomeManager', 'CATransition', 'MTLCompileOptions', 'UIVibrancyEffect', 'CLCircularRegion', 'MKTileOverlay', 'SCNShape', 'ACAccountCredential', 'SKPhysicsJointLimit', 'MKMapSnapshotter', 'AVMediaSelectionGroup', 'NSIndexSet', 'CBPeripheralManager', 'CKRecordZone', 'AVAudioRecorder', 'NSURL', 'CBCentral', 'NSNumber', 'AVAudioOutputNode', 'MTLVertexAttributeDescriptorArray', 'MKETAResponse', 'SKTransition', 'SSReadingList', 'HKSourceQuery', 'UITableViewRowAction', 'UITableView', 'SCNParticlePropertyController', 'AVCaptureStillImageOutput', 'GCController', 'AVAudioPlayerNode', 'AVAudioSessionPortDescription', 'NSHTTPURLResponse', 'NEOnDemandRuleEvaluateConnection', 'SKEffectNode', 'HKQuantity', 'GCControllerElement', 'AVPlayerItemAccessLogEvent', 'SCNBox', 'NSExtensionContext', 'MKOverlayRenderer', 'SCNPhysicsVehicle', 'NSDecimalNumber', 'EKReminder', 'MKPolylineView', 'CKQuery', 'AVAudioMixerNode', 'GKAchievementDescription', 'EKParticipant', 'NSBlockOperation', 'UIActivityItemProvider', 'CLLocation', 'NSBatchUpdateRequest', 'PHContentEditingOutput', 'PHObjectChangeDetails', 'MPMoviePlayerController', 'AVAudioFormat', 'HMTrigger', 'MTLRenderPassDepthAttachmentDescriptor', 'SCNRenderer', 'GKScore', 'UISplitViewController', 'HKSource', 'NSURLConnection', 'ABUnknownPersonViewController', 'SCNTechnique', 'UIMenuController', 'NSEvent', 'SKTextureAtlas', 'NSKeyedArchiver', 'GKLeaderboardSet', 'NSSimpleCString', 'AVAudioPCMBuffer', 'CBATTRequest', 'GKMatchRequest', 'AVMetadataObject', 'SKProductsRequest', 'UIAlertView', 'NSIncrementalStore', 'MFMailComposeViewController', 'SCNFloor', 'NSSortDescriptor', 'CKFetchNotificationChangesOperation', 'MPMovieAccessLog', 'NSManagedObjectContext', 'AVAudioUnitGenerator', 'WKBackForwardList', 'SKMutableTexture', 'AVCaptureAudioDataOutput', 'ACAccount', 'AVMetadataItem', 'MPRatingCommandEvent', 'AVCaptureDeviceInputSource', 'CLLocationManager', 'MPRemoteCommand', 'AVCaptureSession', 'UIStepper', 'UIRefreshControl', 'NEEvaluateConnectionRule', 'CKModifyRecordsOperation', 'UICollectionViewTransitionLayout', 'CBCentralManager', 'NSPurgeableData', 'SLComposeViewController', 'NSHashTable', 'MKUserTrackingBarButtonItem', 'UILexiconEntry', 'CMMotionActivity', 'SKAction', 'SKShader', 'AVPlayerItemOutput', 'MTLRenderPassAttachmentDescriptor', 'UIDocumentInteractionController', 'UIDynamicItemBehavior', 'NSMutableDictionary', 'UILabel', 'AVCaptureInputPort', 'NSExpression', 'CAInterAppAudioTransportView', 'SKMutablePayment', 'UIImage', 'PHCachingImageManager', 'SCNTransformConstraint', 'UIColor', 'SCNGeometrySource', 'AVCaptureAutoExposureBracketedStillImageSettings', 'UIPopoverBackgroundView', 'UIToolbar', 'NSNotificationCenter', 'AVAssetReaderOutputMetadataAdaptor', 'NSEntityMigrationPolicy', 'NSLocale', 'NSURLSession', 'SCNCamera', 'NSTimeZone', 'UIManagedDocument', 'AVMutableVideoCompositionLayerInstruction', 'AVAssetTrackGroup', 'NSInvocationOperation', 'ALAssetRepresentation', 'AVQueuePlayer', 'HMServiceGroup', 'UIPasteboard', 'PHContentEditingInput', 'NSLayoutManager', 'EKCalendarChooser', 'EKObject', 'CATiledLayer', 'GLKReflectionMapEffect', 'NSManagedObjectID', 'NSEnergyFormatter', 'SLRequest', 'HMCharacteristic', 'AVPlayerLayer', 'MTLRenderPassDescriptor', 'SKPayment', 'NSPointerArray', 'AVAudioMix', 'SCNLight', 'MCAdvertiserAssistant', 'MKMapSnapshotOptions', 'HKCategorySample', 'AVAudioEnvironmentReverbParameters', 'SCNMorpher', 'AVTimedMetadataGroup', 'CBMutableCharacteristic', 'NSFetchRequest', 'UIDevice', 'NSManagedObject', 'NKAssetDownload', 'AVOutputSettingsAssistant', 'SKPhysicsJointPin', 'UITabBar', 'UITextInputMode', 'NSFetchRequestExpression', 'HMActionSet', 'CTSubscriber', 'PHAssetChangeRequest', 'NSPersistentStoreRequest', 'UITabBarController', 'HKQuantitySample', 'AVPlayerItem', 'AVSynchronizedLayer', 'MKDirectionsRequest', 'NSMetadataItem', 'UIPresentationController', 'UINavigationItem', 'PHFetchResultChangeDetails', 'PHImageManager', 'AVCaptureManualExposureBracketedStillImageSettings', 'UIStoryboardPopoverSegue', 'SCNLookAtConstraint', 'UIGravityBehavior', 'UIWindow', 'CBMutableDescriptor', 'NEOnDemandRuleDisconnect', 'UIBezierPath', 'UINavigationController', 'ABPeoplePickerNavigationController', 'EKSource', 'AVAssetWriterInput', 'AVPlayerItemTrack', 'GLKEffectPropertyTexture', 'NSURLResponse', 'SKPaymentQueue', 'NSAssertionHandler', 'MKReverseGeocoder', 'GCControllerAxisInput', 'NSArray', 'NSOrthography', 'NSURLSessionUploadTask', 'NSCharacterSet', 'AVMutableVideoCompositionInstruction', 'AVAssetReaderOutput', 'EAGLContext', 'WKFrameInfo', 'CMPedometer', 'MyClass', 'CKModifyBadgeOperation', 'AVCaptureAudioFileOutput', 'SKEmitterNode', 'NSMachPort', 'AVVideoCompositionCoreAnimationTool', 'PHCollection', 'SCNPhysicsWorld', 'NSURLRequest', 'CMAccelerometerData', 'NSNetServiceBrowser', 'CLFloor', 'AVAsynchronousVideoCompositionRequest', 'SCNGeometry', 'SCNIKConstraint', 'CIKernel', 'CAGradientLayer', 'HKCharacteristicType', 'NSFormatter', 'SCNAction', 'CATransaction', 'CBUUID', 'UIStoryboard', 'MPMediaLibrary', 'UITapGestureRecognizer', 'MPMediaItemArtwork', 'NSURLSessionTask', 'AVAudioUnit', 'MCBrowserViewController', 'NSRelationshipDescription', 'HKSample', 'WKWebView', 'NSMutableAttributedString', 'NSPersistentStoreAsynchronousResult', 'MPNowPlayingInfoCenter', 'MKLocalSearch', 'EAAccessory', 'HKCorrelation', 'CATextLayer', 'NSNotificationQueue', 'UINib', 'GLKTextureLoader', 'HKObjectType', 'NSValue', 'NSMutableIndexSet', 'SKPhysicsContact', 'NSProgress', 'AVPlayerViewController', 'CAScrollLayer', 'GKSavedGame', 'NSTextCheckingResult', 'PHObjectPlaceholder', 'SKConstraint', 'EKEventEditViewController', 'NSEntityDescription', 'NSURLCredentialStorage', 'UIApplication', 'SKDownload', 'SCNNode', 'MKLocalSearchRequest', 'SKScene', 'UISearchDisplayController', 'NEOnDemandRule', 'MTLRenderPassStencilAttachmentDescriptor', 'CAReplicatorLayer', 'UIPrintPageRenderer', 'EKCalendarItem', 'NSUUID', 'EAAccessoryManager', 'NEOnDemandRuleIgnore', 'SKRegion', 'AVAssetResourceLoader', 'EAWiFiUnconfiguredAccessoryBrowser', 'NSUserActivity', 'CTCall', 'UIPrinterPickerController', 'CIVector', 'UINavigationBar', 'UIPanGestureRecognizer', 'MPMediaQuery', 'ABNewPersonViewController', 'CKRecordZoneID', 'HKAnchoredObjectQuery', 'CKFetchRecordZonesOperation', 'UIStoryboardSegue', 'ACAccountType', 'GKSession', 'SKVideoNode', 'PHChange', 'SKReceiptRefreshRequest', 'GCExtendedGamepadSnapshot', 'MPSeekCommandEvent', 'GCExtendedGamepad', 'CAValueFunction', 'SCNCylinder', 'NSNotification', 'NSBatchUpdateResult', 'PKPushCredentials', 'SCNPhysicsSliderJoint', 'AVCaptureDeviceFormat', 'AVPlayerItemErrorLog', 'NSMapTable', 'NSSet', 'CMMotionManager', 'GKVoiceChatService', 'UIPageControl', 'UILexicon', 'MTLArrayType', 'AVAudioUnitReverb', 'MKGeodesicPolyline', 'AVMutableComposition', 'NSLayoutConstraint', 'UIPrinter', 'NSOrderedSet', 'CBAttribute', 'PKPushPayload', 'NSIncrementalStoreNode', 'EKEventStore', 'MPRemoteCommandEvent', 'UISlider', 'UIBlurEffect', 'CKAsset', 'AVCaptureInput', 'AVAudioEngine', 'MTLVertexDescriptor', 'SKPhysicsBody', 'NSOperation', 'UIImageAsset', 'MKMapCamera', 'SKProductsResponse', 'GLKEffectPropertyMaterial', 'AVCaptureDevice', 'CTCallCenter', 'CABTMIDILocalPeripheralViewController', 'NEVPNManager', 'HKQuery', 'SCNPhysicsContact', 'CBMutableService', 'AVSampleBufferDisplayLayer', 'SCNSceneSource', 'SKLightNode', 'CKDiscoveredUserInfo', 'NSMutableArray', 'MTLDepthStencilDescriptor', 'MTLArgument', 'NSMassFormatter', 'CIRectangleFeature', 'PKPushRegistry', 'NEVPNConnection', 'MCNearbyServiceBrowser', 'NSOperationQueue', 'MKPolylineRenderer', 'UICollectionViewLayoutAttributes', 'NSValueTransformer', 'UICollectionViewFlowLayout', 'CIBarcodeFeature', 'MPChangePlaybackRateCommandEvent', 'NSEntityMapping', 'SKTexture', 'NSMergePolicy', 'UITextInputStringTokenizer', 'NSRecursiveLock', 'AVAsset', 'NSUndoManager', 'AVAudioUnitSampler', 'NSItemProvider', 'SKUniform', 'MPMediaPickerController', 'CKOperation', 'MTLRenderPipelineDescriptor', 'EAWiFiUnconfiguredAccessory', 'NSFileCoordinator', 'SKRequest', 'NSFileHandle', 'NSConditionLock', 'UISegmentedControl', 'NSManagedObjectModel', 'UITabBarItem', 'SCNCone', 'MPMediaItem', 'SCNMaterial', 'EKRecurrenceRule', 'UIEvent', 'UITouch', 'UIPrintInteractionController', 'CMDeviceMotion', 'NEVPNProtocol', 'NSCompoundPredicate', 'HKHealthStore', 'MKMultiPoint', 'HKSampleType', 'UIPrintFormatter', 'AVAudioUnitEQFilterParameters', 'SKView', 'NSConstantString', 'UIPopoverController', 'CKDatabase', 'AVMetadataFaceObject', 'UIAccelerometer', 'EKEventViewController', 'CMAltitudeData', 'MTLStencilDescriptor', 'UISwipeGestureRecognizer', 'NSPort', 'MKCircleRenderer', 'AVCompositionTrack', 'NSAsynchronousFetchRequest', 'NSUbiquitousKeyValueStore', 'NSMetadataQueryResultGroup', 'AVAssetResourceLoadingDataRequest', 'UITableViewHeaderFooterView', 'CKNotificationID', 'AVAudioSession', 'HKUnit', 'NSNull', 'NSPersistentStoreResult', 'MKCircleView', 'AVAudioChannelLayout', 'NEVPNProtocolIKEv2', 'WKProcessPool', 'UIAttachmentBehavior', 'CLBeacon', 'NSInputStream', 'NSURLCache', 'GKPlayer', 'NSMappingModel', 'NSHTTPCookie', 'AVMutableVideoComposition', 'PHFetchResult', 'NSAttributeDescription', 'AVPlayer', 'MKAnnotationView', 'UIFontDescriptor', 'NSTimer', 'CBDescriptor', 'MKOverlayView', 'AVAudioUnitTimePitch', 'NSSaveChangesRequest', 'UIReferenceLibraryViewController', 'SKPhysicsJointFixed', 'UILocalizedIndexedCollation', 'UIInterpolatingMotionEffect', 'UIDocumentPickerViewController', 'AVAssetWriter', 'NSBundle', 'SKStoreProductViewController', 'GLKViewController', 'NSMetadataQueryAttributeValueTuple', 'GKTurnBasedMatch', 'AVAudioFile', 'UIActivity', 'NSPipe', 'MKShape', 'NSMergeConflict', 'CIImage', 'HKObject', 'UIRotationGestureRecognizer', 'AVPlayerItemLegibleOutput', 'AVAssetImageGenerator', 'GCControllerButtonInput', 'CKMarkNotificationsReadOperation', 'CKSubscription', 'MPTimedMetadata', 'NKIssue', 'UIScreenMode', 'HMAccessoryBrowser', 'GKTurnBasedEventHandler', 'UIWebView', 'MKPolyline', 'JSVirtualMachine', 'AVAssetReader', 'NSAttributedString', 'GKMatchmakerViewController', 'NSCountedSet', 'UIButton', 'WKNavigationResponse', 'GKLocalPlayer', 'MPMovieErrorLog', 'AVSpeechUtterance', 'HKStatistics', 'UILocalNotification', 'HKBiologicalSexObject', 'AVURLAsset', 'CBPeripheral', 'NSDateComponentsFormatter', 'SKSpriteNode', 'UIAccessibilityElement', 'AVAssetWriterInputGroup', 'HMZone', 'AVAssetReaderAudioMixOutput', 'NSEnumerator', 'UIDocument', 'MKLocalSearchResponse', 'UISimpleTextPrintFormatter', 'PHPhotoLibrary', 'CBService', 'UIDocumentMenuViewController', 'MCSession', 'QLPreviewController', 'CAMediaTimingFunction', 'UITextPosition', 'ASIdentifierManager', 'AVAssetResourceLoadingRequest', 'SLComposeServiceViewController', 'UIPinchGestureRecognizer', 'PHObject', 'NSExtensionItem', 'HKSampleQuery', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MKRouteStep', 'SCNCapsule', 'NSMetadataQuery', 'AVAssetResourceLoadingContentInformationRequest', 'UITraitCollection', 'CTCarrier', 'NSFileSecurity', 'UIAcceleration', 'UIMotionEffect', 'MTLRenderPipelineReflection', 'CLHeading', 'CLVisit', 'MKDirectionsResponse', 'HMAccessory', 'MTLStructType', 'UITextView', 'CMMagnetometerData', 'UICollisionBehavior', 'UIProgressView', 'CKServerChangeToken', 'UISearchBar', 'MKPlacemark', 'AVCaptureConnection', 'NSPropertyMapping', 'ALAssetsFilter', 'SK3DNode', 'AVPlayerItemErrorLogEvent', 'NSJSONSerialization', 'AVAssetReaderVideoCompositionOutput', 'ABPersonViewController', 'CIDetector', 'GKTurnBasedMatchmakerViewController', 'MPMediaItemCollection', 'SCNSphere', 'NSCondition', 'NSURLCredential', 'MIDINetworkConnection', 'NSFileProviderExtension', 'NSDecimalNumberHandler', 'NSAtomicStoreCacheNode', 'NSAtomicStore', 'EKAlarm', 'CKNotificationInfo', 'AVAudioUnitEQ', 'UIPercentDrivenInteractiveTransition', 'MKPolygon', 'AVAssetTrackSegment', 'MTLVertexAttribute', 'NSExpressionDescription', 'HKStatisticsCollectionQuery', 'NSURLAuthenticationChallenge', 'NSDirectoryEnumerator', 'MKDistanceFormatter', 'UIAlertAction', 'NSPropertyListSerialization', 'GKPeerPickerController', 'UIUserNotificationSettings', 'UITableViewController', 'GKNotificationBanner', 'MKPointAnnotation', 'MTLRenderPassColorAttachmentDescriptorArray', 'NSCache', 'SKPhysicsJoint', 'NSXMLParser', 'UIViewController', 'MFMessageComposeViewController', 'AVAudioInputNode', 'NSDataDetector', 'CABTMIDICentralViewController', 'AVAudioUnitMIDIInstrument', 'AVCaptureVideoPreviewLayer', 'AVAssetWriterInputPassDescription', 'MPChangePlaybackRateCommand', 'NSURLComponents', 'CAMetalLayer', 'UISnapBehavior', 'AVMetadataMachineReadableCodeObject', 'CKDiscoverUserInfosOperation', 'NSTextAttachment', 'NSException', 'UIMenuItem', 'CMMotionActivityManager', 'SCNGeometryElement', 'NCWidgetController', 'CAEmitterLayer', 'MKUserLocation', 'UIImagePickerController', 'CIFeature', 'AVCaptureDeviceInput', 'ALAsset', 'NSURLSessionDownloadTask', 'SCNPhysicsHingeJoint', 'MPMoviePlayerViewController', 'NSMutableOrderedSet', 'SCNMaterialProperty', 'UIFont', 'AVCaptureVideoDataOutput', 'NSCachedURLResponse', 'ALAssetsLibrary', 'NSInvocation', 'UILongPressGestureRecognizer', 'NSTextStorage', 'WKWebViewConfiguration', 'CIFaceFeature', 'MKMapSnapshot', 'GLKEffectPropertyFog', 'AVComposition', 'CKDiscoverAllContactsOperation', 'AVAudioMixInputParameters', 'CAEmitterBehavior', 'PKPassLibrary', 'UIMutableUserNotificationCategory', 'NSLock', 'NEVPNProtocolIPSec', 'ADBannerView', 'UIDocumentPickerExtensionViewController', 'UIActivityIndicatorView', 'AVPlayerMediaSelectionCriteria', 'CALayer', 'UIAccessibilityCustomAction', 'UIBarButtonItem', 'AVAudioSessionRouteDescription', 'CLBeaconRegion', 'HKBloodTypeObject', 'MTLVertexBufferLayoutDescriptorArray', 'CABasicAnimation', 'AVVideoCompositionInstruction', 'AVMutableTimedMetadataGroup', 'EKRecurrenceEnd', 'NSTextContainer', 'TWTweetComposeViewController', 'UIScrollView', 'WKNavigationAction', 'AVPlayerItemMetadataOutput', 'EKRecurrenceDayOfWeek', 'NSNumberFormatter', 'MTLComputePipelineReflection', 'UIScreen', 'CLRegion', 'NSProcessInfo', 'GLKTextureInfo', 'SCNSkinner', 'AVCaptureMetadataOutput', 'SCNAnimationEvent', 'NSTextTab', 'JSManagedValue', 'NSDate', 'UITextChecker', 'WKBackForwardListItem', 'NSData', 'NSParagraphStyle', 'AVMutableMetadataItem', 'EKCalendar', 'NSMutableURLRequest', 'UIVideoEditorController', 'HMTimerTrigger', 'AVAudioUnitVarispeed', 'UIDynamicAnimator', 'AVCompositionTrackSegment', 'GCGamepadSnapshot', 'MPMediaEntity', 'GLKSkyboxEffect', 'UISwitch', 'EKStructuredLocation', 'UIGestureRecognizer', 'NSProxy', 'GLKBaseEffect', 'UIPushBehavior', 'GKScoreChallenge', 'NSCoder', 'MPMediaPlaylist', 'NSDateComponents', 'WKUserScript', 'EKEvent', 'NSDateFormatter', 'NSAsynchronousFetchResult', 'AVAssetWriterInputPixelBufferAdaptor', 'UIVisualEffect', 'UICollectionViewCell', 'UITextField', 'CLPlacemark', 'MPPlayableContentManager', 'AVCaptureOutput', 'HMCharacteristicWriteAction', 'CKModifySubscriptionsOperation', 'NSPropertyDescription', 'GCGamepad', 'UIMarkupTextPrintFormatter', 'SCNTube', 'NSPersistentStoreCoordinator', 'AVAudioEnvironmentNode', 'GKMatchmaker', 'CIContext', 'NSThread', 'SLComposeSheetConfigurationItem', 'SKPhysicsJointSliding', 'NSPredicate', 'GKVoiceChat', 'SKCropNode', 'AVCaptureAudioPreviewOutput', 'NSStringDrawingContext', 'GKGameCenterViewController', 'UIPrintPaper', 'SCNPhysicsBallSocketJoint', 'UICollectionViewLayoutInvalidationContext', 'GLKEffectPropertyTransform', 'AVAudioIONode', 'UIDatePicker', 'MKDirections', 'ALAssetsGroup', 'CKRecordZoneNotification', 'SCNScene', 'MPMovieAccessLogEvent', 'CKFetchSubscriptionsOperation', 'CAEmitterCell', 'AVAudioUnitTimeEffect', 'HMCharacteristicMetadata', 'MKPinAnnotationView', 'UIPickerView', 'UIImageView', 'UIUserNotificationCategory', 'SCNPhysicsVehicleWheel', 'HKCategoryType', 'MPMediaQuerySection', 'GKFriendRequestComposeViewController', 'NSError', 'MTLRenderPipelineColorAttachmentDescriptor', 'SCNPhysicsShape', 'UISearchController', 'SCNPhysicsBody', 'CTSubscriberInfo', 'AVPlayerItemAccessLog', 'MPMediaPropertyPredicate', 'CMLogItem', 'NSAutoreleasePool', 'NSSocketPort', 'AVAssetReaderTrackOutput', 'SKNode', 'UIMutableUserNotificationAction', 'SCNProgram', 'AVSpeechSynthesisVoice', 'CMAltimeter', 'AVCaptureAudioChannel', 'GKTurnBasedExchangeReply', 'AVVideoCompositionLayerInstruction', 'AVSpeechSynthesizer', 'GKChallengeEventHandler', 'AVCaptureFileOutput', 'UIControl', 'SCNPhysicsField', 'CKReference', 'LAContext', 'CKRecordID', 'ADInterstitialAd', 'AVAudioSessionDataSourceDescription', 'AVAudioBuffer', 'CIColorKernel', 'GCControllerDirectionPad', 'NSFileManager', 'AVMutableAudioMixInputParameters', 'UIScreenEdgePanGestureRecognizer', 'CAKeyframeAnimation', 'CKQueryNotification', 'PHAdjustmentData', 'EASession', 'AVAssetResourceRenewalRequest', 'UIInputView', 'NSFileWrapper', 'UIResponder', 'NSPointerFunctions', 'NSHTTPCookieStorage', 'AVMediaSelectionOption', 'NSRunLoop', 'NSFileAccessIntent', 'CAAnimationGroup', 'MKCircle', 'UIAlertController', 'NSMigrationManager', 'NSDateIntervalFormatter', 'UICollectionViewUpdateItem', 'CKDatabaseOperation', 'PHImageRequestOptions', 'SKReachConstraints', 'CKRecord', 'CAInterAppAudioSwitcherView', 'WKWindowFeatures', 'GKInvite', 'NSMutableData', 'PHAssetCollectionChangeRequest', 'NSMutableParagraphStyle', 'UIDynamicBehavior', 'GLKEffectProperty', 'CKFetchRecordChangesOperation', 'SKShapeNode', 'MPMovieErrorLogEvent', 'MKPolygonView', 'MPContentItem', 'HMAction', 'NSScanner', 'GKAchievementChallenge', 'AVAudioPlayer', 'CKContainer', 'AVVideoComposition', 'NKLibrary', 'NSPersistentStore', 'AVCaptureMovieFileOutput', 'HMRoom', 'GKChallenge', 'UITextRange', 'NSURLProtectionSpace', 'ACAccountStore', 'MPSkipIntervalCommand', 'NSComparisonPredicate', 'HMHome', 'PHVideoRequestOptions', 'NSOutputStream', 'MPSkipIntervalCommandEvent', 'PKAddPassesViewController', 'UITextSelectionRect', 'CTTelephonyNetworkInfo', 'AVTextStyleRule', 'NSFetchedPropertyDescription', 'UIPageViewController', 'CATransformLayer', 'UICollectionViewController', 'AVAudioNode', 'MCNearbyServiceAdvertiser', 'NSObject', 'PHAsset', 'GKLeaderboardViewController', 'CKQueryCursor', 'MPMusicPlayerController', 'MKOverlayPathRenderer', 'CMPedometerData', 'HMService', 'SKFieldNode', 'GKAchievement', 'WKUserContentController', 'AVAssetTrack', 'TWRequest', 'SKLabelNode', 'AVCaptureBracketedStillImageSettings', 'MIDINetworkHost', 'MPMediaPredicate', 'AVFrameRateRange', 'MTLTextureDescriptor', 'MTLVertexBufferLayoutDescriptor', 'MPFeedbackCommandEvent', 'UIUserNotificationAction', 'HKStatisticsQuery', 'SCNParticleSystem', 'NSIndexPath', 'AVVideoCompositionRenderContext', 'CADisplayLink', 'HKObserverQuery', 'UIPopoverPresentationController', 'CKQueryOperation', 'CAEAGLLayer', 'NSMutableString', 'NSMessagePort', 'NSURLQueryItem', 'MTLStructMember', 'AVAudioSessionChannelDescription', 'GLKView', 'UIActivityViewController', 'GKAchievementViewController', 'GKTurnBasedParticipant', 'NSURLProtocol', 'NSUserDefaults', 'NSCalendar', 'SKKeyframeSequence', 'AVMetadataItemFilter', 'CKModifyRecordZonesOperation', 'WKPreferences', 'NSMethodSignature', 'NSRegularExpression', 'EAGLSharegroup', 'AVPlayerItemVideoOutput', 'PHContentEditingInputRequestOptions', 'GKMatch', 'CIColor', 'UIDictationPhrase'])
COCOA_PROTOCOLS = set(['SKStoreProductViewControllerDelegate', 'AVVideoCompositionInstruction', 'AVAudioSessionDelegate', 'GKMatchDelegate', 'NSFileManagerDelegate', 'UILayoutSupport', 'NSCopying', 'UIPrintInteractionControllerDelegate', 'QLPreviewControllerDataSource', 'SKProductsRequestDelegate', 'NSTextStorageDelegate', 'MCBrowserViewControllerDelegate', 'MTLComputeCommandEncoder', 'SCNSceneExportDelegate', 'UISearchResultsUpdating', 'MFMailComposeViewControllerDelegate', 'MTLBlitCommandEncoder', 'NSDecimalNumberBehaviors', 'PHContentEditingController', 'NSMutableCopying', 'UIActionSheetDelegate', 'UIViewControllerTransitioningDelegate', 'UIAlertViewDelegate', 'AVAudioPlayerDelegate', 'MKReverseGeocoderDelegate', 'NSCoding', 'UITextInputTokenizer', 'GKFriendRequestComposeViewControllerDelegate', 'UIActivityItemSource', 'NSCacheDelegate', 'UIAdaptivePresentationControllerDelegate', 'GKAchievementViewControllerDelegate', 'UIViewControllerTransitionCoordinator', 'EKEventEditViewDelegate', 'NSURLConnectionDelegate', 'UITableViewDelegate', 'GKPeerPickerControllerDelegate', 'UIGuidedAccessRestrictionDelegate', 'AVSpeechSynthesizerDelegate', 'AVAudio3DMixing', 'AVPlayerItemLegibleOutputPushDelegate', 'ADInterstitialAdDelegate', 'HMAccessoryBrowserDelegate', 'AVAssetResourceLoaderDelegate', 'UITabBarControllerDelegate', 'CKRecordValue', 'SKPaymentTransactionObserver', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'UIInputViewAudioFeedback', 'GKChallengeListener', 'SKSceneDelegate', 'UIPickerViewDelegate', 'UIWebViewDelegate', 'UIApplicationDelegate', 'GKInviteEventListener', 'MPMediaPlayback', 'MyClassJavaScriptMethods', 'AVAsynchronousKeyValueLoading', 'QLPreviewItem', 'SCNBoundingVolume', 'NSPortDelegate', 'UIContentContainer', 'SCNNodeRendererDelegate', 'SKRequestDelegate', 'SKPhysicsContactDelegate', 'HMAccessoryDelegate', 'UIPageViewControllerDataSource', 'SCNSceneRendererDelegate', 'SCNPhysicsContactDelegate', 'MKMapViewDelegate', 'AVPlayerItemOutputPushDelegate', 'UICollectionViewDelegate', 'UIImagePickerControllerDelegate', 'MTLRenderCommandEncoder', 'UIToolbarDelegate', 'WKUIDelegate', 'SCNActionable', 'NSURLConnectionDataDelegate', 'MKOverlay', 'CBCentralManagerDelegate', 'JSExport', 'NSTextLayoutOrientationProvider', 'UIPickerViewDataSource', 'PKPushRegistryDelegate', 'UIViewControllerTransitionCoordinatorContext', 'NSLayoutManagerDelegate', 'MTLLibrary', 'NSFetchedResultsControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'MTLResource', 'NSDiscardableContent', 'UITextFieldDelegate', 'MTLBuffer', 'MTLSamplerState', 'GKGameCenterControllerDelegate', 'MPMediaPickerControllerDelegate', 'UISplitViewControllerDelegate', 'UIAppearance', 'UIPickerViewAccessibilityDelegate', 'UITraitEnvironment', 'UIScrollViewAccessibilityDelegate', 'ADBannerViewDelegate', 'MPPlayableContentDataSource', 'MTLComputePipelineState', 'NSURLSessionDelegate', 'MTLCommandBuffer', 'NSXMLParserDelegate', 'UIViewControllerRestoration', 'UISearchBarDelegate', 'UIBarPositioning', 'CBPeripheralDelegate', 'UISearchDisplayDelegate', 'CAAction', 'PKAddPassesViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MTLDepthStencilState', 'GKTurnBasedMatchmakerViewControllerDelegate', 'MPPlayableContentDelegate', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'UIAppearanceContainer', 'UIStateRestoring', 'UITextDocumentProxy', 'MTLDrawable', 'NSURLSessionTaskDelegate', 'NSFilePresenter', 'AVAudioStereoMixing', 'UIViewControllerContextTransitioning', 'UITextInput', 'CBPeripheralManagerDelegate', 'UITextInputDelegate', 'NSFastEnumeration', 'NSURLAuthenticationChallengeSender', 'SCNProgramDelegate', 'AVVideoCompositing', 'SCNAnimatable', 'NSSecureCoding', 'MCAdvertiserAssistantDelegate', 'GKLocalPlayerListener', 'GLKNamedEffect', 'UIPopoverControllerDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'NSExtensionRequestHandling', 'UITextSelecting', 'UIPrinterPickerControllerDelegate', 'NCWidgetProviding', 'MTLCommandEncoder', 'NSURLProtocolClient', 'MFMessageComposeViewControllerDelegate', 'UIVideoEditorControllerDelegate', 'WKNavigationDelegate', 'GKSavedGameListener', 'UITableViewDataSource', 'MTLFunction', 'EKCalendarChooserDelegate', 'NSUserActivityDelegate', 'UICollisionBehaviorDelegate', 'NSStreamDelegate', 'MCNearbyServiceBrowserDelegate', 'HMHomeDelegate', 'UINavigationControllerDelegate', 'MCSessionDelegate', 'UIDocumentPickerDelegate', 'UIViewControllerInteractiveTransitioning', 'GKTurnBasedEventListener', 'SCNSceneRenderer', 'MTLTexture', 'GLKViewDelegate', 'EAAccessoryDelegate', 'WKScriptMessageHandler', 'PHPhotoLibraryChangeObserver', 'NSKeyedUnarchiverDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'NSMachPortDelegate', 'SCNShadable', 'UIPopoverBackgroundViewMethods', 'UIDocumentMenuDelegate', 'UIBarPositioningDelegate', 'ABPersonViewControllerDelegate', 'NSNetServiceBrowserDelegate', 'EKEventViewDelegate', 'UIScrollViewDelegate', 'NSURLConnectionDownloadDelegate', 'UIGestureRecognizerDelegate', 'UINavigationBarDelegate', 'AVAudioMixing', 'NSFetchedResultsSectionInfo', 'UIDocumentInteractionControllerDelegate', 'MTLParallelRenderCommandEncoder', 'QLPreviewControllerDelegate', 'UIAccessibilityReadingContent', 'ABUnknownPersonViewControllerDelegate', 'GLKViewControllerDelegate', 'UICollectionViewDelegateFlowLayout', 'UIPopoverPresentationControllerDelegate', 'UIDynamicAnimatorDelegate', 'NSTextAttachmentContainer', 'MKAnnotation', 'UIAccessibilityIdentification', 'UICoordinateSpace', 'ABNewPersonViewControllerDelegate', 'MTLDevice', 'CAMediaTiming', 'AVCaptureFileOutputRecordingDelegate', 'HMHomeManagerDelegate', 'UITextViewDelegate', 'UITabBarDelegate', 'GKLeaderboardViewControllerDelegate', 'UISearchControllerDelegate', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'UITextInputTraits', 'MTLRenderPipelineState', 'GKVoiceChatClient', 'UIKeyInput', 'UICollectionViewDataSource', 'SCNTechniqueSupport', 'NSLocking', 'AVCaptureFileOutputDelegate', 'GKChallengeEventHandlerDelegate', 'UIObjectRestoration', 'CIFilterConstructor', 'AVPlayerItemOutputPullDelegate', 'EAGLDrawable', 'AVVideoCompositionValidationHandling', 'UIViewControllerAnimatedTransitioning', 'NSURLSessionDownloadDelegate', 'UIAccelerometerDelegate', 'UIPageViewControllerDelegate', 'MTLCommandQueue', 'UIDataSourceModelAssociation', 'AVAudioRecorderDelegate', 'GKSessionDelegate', 'NSKeyedArchiverDelegate', 'CAMetalDrawable', 'UIDynamicItem', 'CLLocationManagerDelegate', 'NSMetadataQueryDelegate', 'NSNetServiceDelegate', 'GKMatchmakerViewControllerDelegate', 'NSURLSessionDataDelegate'])
COCOA_PRIMITIVES = set(['ROTAHeader', '__CFBundle', 'MortSubtable', 'AudioFilePacketTableInfo', 'CGPDFOperatorTable', 'KerxStateEntry', 'ExtendedTempoEvent', 'CTParagraphStyleSetting', 'OpaqueMIDIPort', '_GLKMatrix3', '_GLKMatrix2', '_GLKMatrix4', 'ExtendedControlEvent', 'CAFAudioDescription', 'OpaqueCMBlockBuffer', 'CGTextDrawingMode', 'EKErrorCode', 'GCAcceleration', 'AudioUnitParameterInfo', '__SCPreferences', '__CTFrame', '__CTLine', 'AudioFile_SMPTE_Time', 'gss_krb5_lucid_context_v1', 'OpaqueJSValue', 'TrakTableEntry', 'AudioFramePacketTranslation', 'CGImageSource', 'OpaqueJSPropertyNameAccumulator', 'JustPCGlyphRepeatAddAction', '__CFBinaryHeap', 'OpaqueMIDIThruConnection', 'opaqueCMBufferQueue', 'OpaqueMusicSequence', 'MortRearrangementSubtable', 'MixerDistanceParams', 'MorxSubtable', 'MIDIObjectPropertyChangeNotification', 'SFNTLookupSegment', 'CGImageMetadataErrors', 'CGPath', 'OpaqueMIDIEndpoint', 'AudioComponentPlugInInterface', 'gss_ctx_id_t_desc_struct', 'sfntFontFeatureSetting', 'OpaqueJSContextGroup', '__SCNetworkConnection', 'AudioUnitParameterValueTranslation', 'CGImageMetadataType', 'CGPattern', 'AudioFileTypeAndFormatID', 'CGContext', 'AUNodeInteraction', 'SFNTLookupTable', 'JustPCDecompositionAction', 'KerxControlPointHeader', 'AudioStreamPacketDescription', 'KernSubtableHeader', '__SecCertificate', 'AUMIDIOutputCallbackStruct', 'MIDIMetaEvent', 'AudioQueueChannelAssignment', 'AnchorPoint', 'JustTable', '__CFNetService', 'CF_BRIDGED_TYPE', 'gss_krb5_lucid_key', 'CGPDFDictionary', 'KerxSubtableHeader', 'CAF_UUID_ChunkHeader', 'gss_krb5_cfx_keydata', 'OpaqueJSClass', 'CGGradient', 'OpaqueMIDISetup', 'JustPostcompTable', '__CTParagraphStyle', 'AudioUnitParameterHistoryInfo', 'OpaqueJSContext', 'CGShading', 'MIDIThruConnectionParams', 'BslnFormat0Part', 'SFNTLookupSingle', '__CFHost', '__SecRandom', '__CTFontDescriptor', '_NSRange', 'sfntDirectory', 'AudioQueueLevelMeterState', 'CAFPositionPeak', 'PropLookupSegment', '__CVOpenGLESTextureCache', 'sfntInstance', '_GLKQuaternion', 'AnkrTable', '__SCNetworkProtocol', 'gss_buffer_desc_struct', 'CAFFileHeader', 'KerxOrderedListHeader', 'CGBlendMode', 'STXEntryOne', 'CAFRegion', 'SFNTLookupTrimmedArrayHeader', 'SCNMatrix4', 'KerxControlPointEntry', 'OpaqueMusicTrack', '_GLKVector4', 'gss_OID_set_desc_struct', 'OpaqueMusicPlayer', '_CFHTTPAuthentication', 'CGAffineTransform', 'CAFMarkerChunk', 'AUHostIdentifier', 'ROTAGlyphEntry', 'BslnTable', 'gss_krb5_lucid_context_version', '_GLKMatrixStack', 'CGImage', 'KernStateEntry', 'SFNTLookupSingleHeader', 'MortLigatureSubtable', 'CAFUMIDChunk', 'SMPTETime', 'CAFDataChunk', 'CGPDFStream', 'AudioFileRegionList', 'STEntryTwo', 'SFNTLookupBinarySearchHeader', 'OpbdTable', '__CTGlyphInfo', 'BslnFormat2Part', 'KerxIndexArrayHeader', 'TrakTable', 'KerxKerningPair', '__CFBitVector', 'KernVersion0SubtableHeader', 'OpaqueAudioComponentInstance', 'AudioChannelLayout', '__CFUUID', 'MIDISysexSendRequest', '__CFNumberFormatter', 'CGImageSourceStatus', 'AudioFileMarkerList', 'AUSamplerBankPresetData', 'CGDataProvider', 'AudioFormatInfo', '__SecIdentity', 'sfntCMapExtendedSubHeader', 'MIDIChannelMessage', 'KernOffsetTable', 'CGColorSpaceModel', 'MFMailComposeErrorCode', 'CGFunction', '__SecTrust', 'AVAudio3DAngularOrientation', 'CGFontPostScriptFormat', 'KernStateHeader', 'AudioUnitCocoaViewInfo', 'CGDataConsumer', 'OpaqueMIDIDevice', 'KernVersion0Header', 'AnchorPointTable', 'CGImageDestination', 'CAFInstrumentChunk', 'AudioUnitMeterClipping', 'MorxChain', '__CTFontCollection', 'STEntryOne', 'STXEntryTwo', 'ExtendedNoteOnEvent', 'CGColorRenderingIntent', 'KerxSimpleArrayHeader', 'MorxTable', '_GLKVector3', '_GLKVector2', 'MortTable', 'CGPDFBox', 'AudioUnitParameterValueFromString', '__CFSocket', 'ALCdevice_struct', 'MIDINoteMessage', 'sfntFeatureHeader', 'CGRect', '__SCNetworkInterface', '__CFTree', 'MusicEventUserData', 'TrakTableData', 'GCQuaternion', 'MortContextualSubtable', '__CTRun', 'AudioUnitFrequencyResponseBin', 'MortChain', 'MorxInsertionSubtable', 'CGImageMetadata', 'gss_auth_identity', 'AudioUnitMIDIControlMapping', 'CAFChunkHeader', 'CGImagePropertyOrientation', 'CGPDFScanner', 'OpaqueMusicEventIterator', 'sfntDescriptorHeader', 'AudioUnitNodeConnection', 'OpaqueMIDIDeviceList', 'ExtendedAudioFormatInfo', 'BslnFormat1Part', 'sfntFontDescriptor', 'KernSimpleArrayHeader', '__CFRunLoopObserver', 'CGPatternTiling', 'MIDINotification', 'MorxLigatureSubtable', 'MessageComposeResult', 'MIDIThruConnectionEndpoint', 'MusicDeviceStdNoteParams', 'opaqueCMSimpleQueue', 'ALCcontext_struct', 'OpaqueAudioQueue', 'PropLookupSingle', 'CGInterpolationQuality', 'CGColor', 'AudioOutputUnitStartAtTimeParams', 'gss_name_t_desc_struct', 'CGFunctionCallbacks', 'CAFPacketTableHeader', 'AudioChannelDescription', 'sfntFeatureName', 'MorxContextualSubtable', 'CVSMPTETime', 'AudioValueRange', 'CGTextEncoding', 'AudioStreamBasicDescription', 'AUNodeRenderCallback', 'AudioPanningInfo', 'KerxOrderedListEntry', '__CFAllocator', 'OpaqueJSPropertyNameArray', '__SCDynamicStore', 'OpaqueMIDIEntity', '__CTRubyAnnotation', 'SCNVector4', 'CFHostClientContext', 'CFNetServiceClientContext', 'AudioUnitPresetMAS_SettingData', 'opaqueCMBufferQueueTriggerToken', 'AudioUnitProperty', 'CAFRegionChunk', 'CGPDFString', '__GLsync', '__CFStringTokenizer', 'JustWidthDeltaEntry', 'sfntVariationAxis', '__CFNetDiagnostic', 'CAFOverviewSample', 'sfntCMapEncoding', 'CGVector', '__SCNetworkService', 'opaqueCMSampleBuffer', 'AUHostVersionIdentifier', 'AudioBalanceFade', 'sfntFontRunFeature', 'KerxCoordinateAction', 'sfntCMapSubHeader', 'CVPlanarPixelBufferInfo', 'AUNumVersion', 'AUSamplerInstrumentData', 'AUPreset', '__CTRunDelegate', 'OpaqueAudioQueueProcessingTap', 'KerxTableHeader', '_NSZone', 'OpaqueExtAudioFile', '__CFRunLoopSource', '__CVMetalTextureCache', 'KerxAnchorPointAction', 'OpaqueJSString', 'AudioQueueParameterEvent', '__CFHTTPMessage', 'OpaqueCMClock', 'ScheduledAudioFileRegion', 'STEntryZero', 'AVAudio3DPoint', 'gss_channel_bindings_struct', 'sfntVariationHeader', 'AUChannelInfo', 'UIOffset', 'GLKEffectPropertyPrv', 'KerxStateHeader', 'CGLineJoin', 'CGPDFDocument', '__CFBag', 'KernOrderedListHeader', '__SCNetworkSet', '__SecKey', 'MIDIObjectAddRemoveNotification', 'AudioUnitParameter', 'JustPCActionSubrecord', 'AudioComponentDescription', 'AudioUnitParameterValueName', 'AudioUnitParameterEvent', 'KerxControlPointAction', 'AudioTimeStamp', 'KernKerningPair', 'gss_buffer_set_desc_struct', 'MortFeatureEntry', 'FontVariation', 'CAFStringID', 'LcarCaretClassEntry', 'AudioUnitParameterStringFromValue', 'ACErrorCode', 'ALMXGlyphEntry', 'LtagTable', '__CTTypesetter', 'AuthorizationOpaqueRef', 'UIEdgeInsets', 'CGPathElement', 'CAFMarker', 'KernTableHeader', 'NoteParamsControlValue', 'SSLContext', 'gss_cred_id_t_desc_struct', 'AudioUnitParameterNameInfo', 'CGDataConsumerCallbacks', 'ALMXHeader', 'CGLineCap', 'MIDIControlTransform', 'CGPDFArray', '__SecPolicy', 'AudioConverterPrimeInfo', '__CTTextTab', '__CFNetServiceMonitor', 'AUInputSamplesInOutputCallbackStruct', '__CTFramesetter', 'CGPDFDataFormat', 'STHeader', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'MIDIValueMap', 'JustDirectionTable', '__SCBondStatus', 'SFNTLookupSegmentHeader', 'OpaqueCMMemoryPool', 'CGPathDrawingMode', 'CGFont', '__SCNetworkReachability', 'AudioClassDescription', 'CGPoint', 'AVAudio3DVectorOrientation', 'CAFStrings', '__CFNetServiceBrowser', 'opaqueMTAudioProcessingTap', 'sfntNameRecord', 'CGPDFPage', 'CGLayer', 'ComponentInstanceRecord', 'CAFInfoStrings', 'HostCallbackInfo', 'MusicDeviceNoteParams', 'OpaqueVTCompressionSession', 'KernIndexArrayHeader', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'MusicTrackLoopInfo', 'opaqueCMFormatDescription', 'STClassTable', 'sfntDirectoryEntry', 'OpaqueCMTimebase', 'CGDataProviderDirectCallbacks', 'MIDIPacketList', 'CAFOverviewChunk', 'MIDIPacket', 'ScheduledAudioSlice', 'CGDataProviderSequentialCallbacks', 'AudioBuffer', 'MorxRearrangementSubtable', 'CGPatternCallbacks', 'AUDistanceAttenuationData', 'MIDIIOErrorNotification', 'CGPDFContentStream', 'IUnknownVTbl', 'MIDITransform', 'MortInsertionSubtable', 'CABarBeatTime', 'AudioBufferList', '__CVBuffer', 'AURenderCallbackStruct', 'STXEntryZero', 'JustPCDuctilityAction', 'OpaqueAudioQueueTimeline', 'VTDecompressionOutputCallbackRecord', 'OpaqueMIDIClient', '__CFPlugInInstance', 'AudioQueueBuffer', '__CFFileDescriptor', 'AudioUnitConnection', '_GKTurnBasedExchangeStatus', 'LcarCaretTable', 'CVPlanarComponentInfo', 'JustWidthDeltaGroup', 'OpaqueAudioComponent', 'ParameterEvent', '__CVPixelBufferPool', '__CTFont', 'CGColorSpace', 'CGSize', 'AUDependentParameter', 'MIDIDriverInterface', 'gss_krb5_rfc1964_keydata', '__CFDateFormatter', 'LtagStringRange', 'OpaqueVTDecompressionSession', 'gss_iov_buffer_desc_struct', 'AUPresetEvent', 'PropTable', 'KernOrderedListEntry', 'CF_BRIDGED_MUTABLE_TYPE', 'gss_OID_desc_struct', 'AudioUnitPresetMAS_Settings', 'AudioFileMarker', 'JustPCConditionalAddAction', 'BslnFormat3Part', '__CFNotificationCenter', 'MortSwashSubtable', 'AUParameterMIDIMapping', 'SCNVector3', 'OpaqueAudioConverter', 'MIDIRawData', 'sfntNameHeader', '__CFRunLoop', 'MFMailComposeResult', 'CATransform3D', 'OpbdSideValues', 'CAF_SMPTE_Time', '__SecAccessControl', 'JustPCAction', 'OpaqueVTFrameSilo', 'OpaqueVTMultiPassStorage', 'CGPathElementType', 'AudioFormatListItem', 'AudioUnitExternalBuffer', 'AudioFileRegion', 'AudioValueTranslation', 'CGImageMetadataTag', 'CAFPeakChunk', 'AudioBytePacketTranslation', 'sfntCMapHeader', '__CFURLEnumerator', 'STXHeader', 'CGPDFObjectType', 'SFNTLookupArrayHeader'])
if __name__ == '__main__': # pragma: no cover
import os
import re
FRAMEWORKS_PATH = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.0.sdk/System/Library/Frameworks/'
frameworks = os.listdir(FRAMEWORKS_PATH)
all_interfaces = set()
all_protocols = set()
all_primitives = set()
for framework in frameworks:
frameworkHeadersDir = FRAMEWORKS_PATH + framework + '/Headers/'
if not os.path.exists(frameworkHeadersDir):
continue
headerFilenames = os.listdir(frameworkHeadersDir)
for f in headerFilenames:
if not f.endswith('.h'):
continue
headerFilePath = frameworkHeadersDir + f
content = open(headerFilePath).read()
res = re.findall('(?<=@interface )\w+', content)
for r in res:
all_interfaces.add(r)
res = re.findall('(?<=@protocol )\w+', content)
for r in res:
all_protocols.add(r)
res = re.findall('(?<=typedef enum )\w+', content)
for r in res:
all_primitives.add(r)
res = re.findall('(?<=typedef struct )\w+', content)
for r in res:
all_primitives.add(r)
res = re.findall('(?<=typedef const struct )\w+', content)
for r in res:
all_primitives.add(r)
print("ALL interfaces: \n")
print(all_interfaces)
print("\nALL protocols: \n")
print(all_protocols)
print("\nALL primitives: \n")
print(all_primitives)
| 536.797297 | 21,464 | 0.807416 | 2,013 | 39,723 | 15.818182 | 0.901143 | 0.002512 | 0.001884 | 0.001884 | 0.007255 | 0.006752 | 0.00603 | 0.004774 | 0.004774 | 0.004774 | 0 | 0.001176 | 0.05775 | 39,723 | 73 | 21,465 | 544.150685 | 0.849555 | 0.010598 | 0 | 0.232558 | 0 | 0.023256 | 0.791494 | 0.387215 | 0 | 0 | 0 | 0 | 0.023256 | 1 | 0 | false | 0.069767 | 0.069767 | 0 | 0.069767 | 0.162791 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 5 |
b97c22a56c6c89ffccb481e10d7ab2bacd42694a | 126 | py | Python | Chapter 1/hello-working copy.py | JoeBugajski/python-examples | c32472900a68aca43d699c610f6f50638b9ddb98 | [
"MIT"
] | null | null | null | Chapter 1/hello-working copy.py | JoeBugajski/python-examples | c32472900a68aca43d699c610f6f50638b9ddb98 | [
"MIT"
] | null | null | null | Chapter 1/hello-working copy.py | JoeBugajski/python-examples | c32472900a68aca43d699c610f6f50638b9ddb98 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright 2009-2017 BHG http://bw.org/
print('Hello, World.')
print("Hey, what's up there Sparky?")
| 21 | 40 | 0.68254 | 21 | 126 | 4.095238 | 0.952381 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.081081 | 0.119048 | 126 | 5 | 41 | 25.2 | 0.693694 | 0.47619 | 0 | 0 | 0 | 0 | 0.640625 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 5 |
b982e5da755fc81ec8e7d008fbbf38e4330d34c1 | 30 | py | Python | dask-gateway-server/dask_gateway_server/__main__.py | AndreaGiardini/dask-gateway | c2583548df19359d24031e1dd9161c616d3bed50 | [
"BSD-3-Clause"
] | 170 | 2016-09-14T10:35:24.000Z | 2022-03-29T20:29:32.000Z | dask-gateway-server/dask_gateway_server/__main__.py | AndreaGiardini/dask-gateway | c2583548df19359d24031e1dd9161c616d3bed50 | [
"BSD-3-Clause"
] | 318 | 2019-09-18T18:42:57.000Z | 2022-03-31T11:05:38.000Z | dask-gateway-server/dask_gateway_server/__main__.py | AndreaGiardini/dask-gateway | c2583548df19359d24031e1dd9161c616d3bed50 | [
"BSD-3-Clause"
] | 61 | 2019-09-18T18:09:56.000Z | 2022-03-25T20:35:11.000Z | from .app import main
main()
| 7.5 | 21 | 0.7 | 5 | 30 | 4.2 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2 | 30 | 3 | 22 | 10 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
b9a1e1a2719a23e26cde93cb86d6396b4498209b | 78 | py | Python | pysimplelogger/__init__.py | rickalm/pysimplelogger | 26cae49cb6dd9393abeb43a1d573da7c500956ca | [
"MIT"
] | null | null | null | pysimplelogger/__init__.py | rickalm/pysimplelogger | 26cae49cb6dd9393abeb43a1d573da7c500956ca | [
"MIT"
] | null | null | null | pysimplelogger/__init__.py | rickalm/pysimplelogger | 26cae49cb6dd9393abeb43a1d573da7c500956ca | [
"MIT"
] | null | null | null | import sys
if sys.version_info[0] == 3:
from pysimplelogger.logger3 import *
| 19.5 | 37 | 0.75641 | 12 | 78 | 4.833333 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.044776 | 0.141026 | 78 | 3 | 38 | 26 | 0.820896 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
b9bdd317d723fbe3239b70794b0cfed909ffcf67 | 37 | py | Python | factory-ai-vision/EdgeSolution/modules/WebModule/backend/vision_on_edge/azure_app_insight/__init__.py | piyushka17/azure-intelligent-edge-patterns | 0d088899afb0022daa2ac434226824dba2c997c1 | [
"MIT"
] | null | null | null | factory-ai-vision/EdgeSolution/modules/WebModule/backend/vision_on_edge/azure_app_insight/__init__.py | piyushka17/azure-intelligent-edge-patterns | 0d088899afb0022daa2ac434226824dba2c997c1 | [
"MIT"
] | null | null | null | factory-ai-vision/EdgeSolution/modules/WebModule/backend/vision_on_edge/azure_app_insight/__init__.py | piyushka17/azure-intelligent-edge-patterns | 0d088899afb0022daa2ac434226824dba2c997c1 | [
"MIT"
] | null | null | null | """Azure Application insight app."""
| 18.5 | 36 | 0.702703 | 4 | 37 | 6.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.108108 | 37 | 1 | 37 | 37 | 0.787879 | 0.810811 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
b9ca724e781a7bc0ecae27e65e41a91f54a21fa8 | 141 | py | Python | rrd/view/portal/test.py | ab-sin-the/dashboard | 80a21061325266a1a5164437e163905fac6e508c | [
"Apache-2.0"
] | null | null | null | rrd/view/portal/test.py | ab-sin-the/dashboard | 80a21061325266a1a5164437e163905fac6e508c | [
"Apache-2.0"
] | null | null | null | rrd/view/portal/test.py | ab-sin-the/dashboard | 80a21061325266a1a5164437e163905fac6e508c | [
"Apache-2.0"
] | null | null | null | <span class="report-dropdown pull-right" onclick="report_display_single(' + str(header_num) + ')"><img src="/static/img/dropdown.svg"></span> | 141 | 141 | 0.730496 | 20 | 141 | 5 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.056738 | 141 | 1 | 141 | 141 | 0.75188 | 0 | 0 | 0 | 0 | 0 | 0.676056 | 0.330986 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
b9fab2d935b2af81518da09f88795c694502712e | 118 | py | Python | pyx/setup.py | brittainhard/py | aede05530ad05a8319fef7e76b49e4bf3cebebac | [
"MIT"
] | null | null | null | pyx/setup.py | brittainhard/py | aede05530ad05a8319fef7e76b49e4bf3cebebac | [
"MIT"
] | null | null | null | pyx/setup.py | brittainhard/py | aede05530ad05a8319fef7e76b49e4bf3cebebac | [
"MIT"
] | null | null | null | from distutils.core import setup
from Cython.Build import cythonize
setup(ext_modules=cythonize("hello_world.pyx"))
| 19.666667 | 47 | 0.822034 | 17 | 118 | 5.588235 | 0.764706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09322 | 118 | 5 | 48 | 23.6 | 0.88785 | 0 | 0 | 0 | 0 | 0 | 0.127119 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
6a084e7975d87c556696f1c879c20263b192f03c | 236 | py | Python | napari_roi/__init__.py | BodenmillerGroup/napari-roi | 996b400d2addbbe12b1e031e367f608694d62eeb | [
"MIT"
] | 1 | 2022-02-06T11:32:04.000Z | 2022-02-06T11:32:04.000Z | napari_roi/__init__.py | BodenmillerGroup/napari-roi | 996b400d2addbbe12b1e031e367f608694d62eeb | [
"MIT"
] | 3 | 2022-01-10T08:37:53.000Z | 2022-02-10T09:10:00.000Z | napari_roi/__init__.py | BodenmillerGroup/napari-roi | 996b400d2addbbe12b1e031e367f608694d62eeb | [
"MIT"
] | null | null | null | from ._roi import ROI, ROIBase, ROIOrigin
from ._roi_widget import ROIWidget
try:
from ._version import version as __version__
except ImportError:
__version__ = "unknown"
__all__ = ["ROI", "ROIBase", "ROIOrigin", "ROIWidget"]
| 23.6 | 54 | 0.737288 | 27 | 236 | 5.851852 | 0.518519 | 0.088608 | 0.240506 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.161017 | 236 | 9 | 55 | 26.222222 | 0.79798 | 0 | 0 | 0 | 0 | 0 | 0.148305 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.571429 | 0 | 0.571429 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
6a1df39036a9335ac80ac2663124da8fea198311 | 268 | py | Python | perform/rom/nonintrusive_rom/autoencoder_rom/autoencoder_koopman/otto2019.py | cwentland0/perform | e08771cb776a7e6518c43350746e2ca72f79b153 | [
"MIT"
] | 6 | 2021-03-24T21:42:06.000Z | 2022-01-28T20:00:13.000Z | perform/rom/nonintrusive_rom/autoencoder_rom/autoencoder_koopman/otto2019.py | cwentland0/perform | e08771cb776a7e6518c43350746e2ca72f79b153 | [
"MIT"
] | 38 | 2021-04-15T15:30:21.000Z | 2022-01-29T01:23:57.000Z | perform/rom/nonintrusive_rom/autoencoder_rom/autoencoder_koopman/otto2019.py | cwentland0/perform | e08771cb776a7e6518c43350746e2ca72f79b153 | [
"MIT"
] | 1 | 2021-07-03T03:13:36.000Z | 2021-07-03T03:13:36.000Z | from perform.rom.nonintrusive_rom.autoencoder_rom.autoencoder_koopman.autoencoder_koopman import AutoencoderKoopman
class AEKoopmanOtto2019(AutoencoderKoopman):
"""Class implementing the discrete-time variant of the autoencoder Koopman method by Otto (2019)."""
| 44.666667 | 115 | 0.839552 | 30 | 268 | 7.366667 | 0.666667 | 0.244344 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.032922 | 0.093284 | 268 | 5 | 116 | 53.6 | 0.876543 | 0.350746 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
6a2eaf14c5a974f07c32d4efa594cfad62829897 | 97 | py | Python | layers/graph/__init__.py | iamsg08/Joing-Parsing-and-Generation-for-Abstractive-Summarization | a432e6e78ac1b3016c2a5f8788a613772b11da40 | [
"BSD-3-Clause"
] | 29 | 2019-11-17T08:03:28.000Z | 2021-01-06T15:16:05.000Z | layers/graph/__init__.py | iamsg08/Joing-Parsing-and-Generation-for-Abstractive-Summarization | a432e6e78ac1b3016c2a5f8788a613772b11da40 | [
"BSD-3-Clause"
] | 1 | 2020-08-15T07:09:06.000Z | 2020-08-24T06:49:02.000Z | layers/graph/__init__.py | iamsg08/Joing-Parsing-and-Generation-for-Abstractive-Summarization | a432e6e78ac1b3016c2a5f8788a613772b11da40 | [
"BSD-3-Clause"
] | 4 | 2019-11-21T15:19:08.000Z | 2021-08-20T15:34:02.000Z | from .TreeNode import TreeNode
from .Graph_De import Graph_De
__all__ = ["TreeNode", "Graph_De"] | 24.25 | 34 | 0.773196 | 14 | 97 | 4.857143 | 0.428571 | 0.308824 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.123711 | 97 | 4 | 34 | 24.25 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0.163265 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
dbf985ed614bf83b714851ca0b03bf77384f3164 | 125 | py | Python | system_tests/lewis_emulators/__init__.py | ISISComputingGroup/EPICS-Keithley6517B | e91bc4a8d267892dad2d83ed60ce22d2f710a08f | [
"BSD-3-Clause"
] | null | null | null | system_tests/lewis_emulators/__init__.py | ISISComputingGroup/EPICS-Keithley6517B | e91bc4a8d267892dad2d83ed60ce22d2f710a08f | [
"BSD-3-Clause"
] | null | null | null | system_tests/lewis_emulators/__init__.py | ISISComputingGroup/EPICS-Keithley6517B | e91bc4a8d267892dad2d83ed60ce22d2f710a08f | [
"BSD-3-Clause"
] | null | null | null | # DO NOT DELETE THIS FILE - LEWIS FRAMEWORK REQUIRES THE DIRECTORY TO BE IMPORTABLE
from __future__ import absolute_import
| 41.666667 | 84 | 0.816 | 18 | 125 | 5.388889 | 0.944444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.168 | 125 | 2 | 85 | 62.5 | 0.932692 | 0.648 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
e001543822e08c56409f765bcdc1521d2c9f7e31 | 75 | py | Python | mathmodule_pkg/statistics/__init__.py | Retr0MrWave/mathModule | 6c538e5d8c77e8bb4e4117de0246c493abeda994 | [
"MIT"
] | null | null | null | mathmodule_pkg/statistics/__init__.py | Retr0MrWave/mathModule | 6c538e5d8c77e8bb4e4117de0246c493abeda994 | [
"MIT"
] | null | null | null | mathmodule_pkg/statistics/__init__.py | Retr0MrWave/mathModule | 6c538e5d8c77e8bb4e4117de0246c493abeda994 | [
"MIT"
] | null | null | null | from .averages import am, gm, hm, median
all = ["am", "gm", "hm", "median"] | 37.5 | 40 | 0.6 | 12 | 75 | 3.75 | 0.666667 | 0.177778 | 0.266667 | 0.533333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.16 | 75 | 2 | 41 | 37.5 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0.157895 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
e006bcbabc21133fd95e548ced93385305ac7d4e | 75 | py | Python | django/db/migrations/__init__.py | trught007/django | d55d21dbb8b307941c2d26b95be46bf83015d868 | [
"BSD-3-Clause"
] | 1 | 2021-11-11T04:13:11.000Z | 2021-11-11T04:13:11.000Z | django/db/migrations/__init__.py | amit2014/django | 072e25eee70c0e629fcbb37f0485a6c6694b6856 | [
"BSD-3-Clause"
] | null | null | null | django/db/migrations/__init__.py | amit2014/django | 072e25eee70c0e629fcbb37f0485a6c6694b6856 | [
"BSD-3-Clause"
] | 1 | 2020-12-24T01:28:30.000Z | 2020-12-24T01:28:30.000Z | from .migration import Migration # NOQA
from .operations import * # NOQA
| 25 | 40 | 0.746667 | 9 | 75 | 6.222222 | 0.555556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.186667 | 75 | 2 | 41 | 37.5 | 0.918033 | 0.12 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
e076034796f3f2af76627e4388b5d86093c4cf36 | 86 | py | Python | chart/chart/python/spectralsequence_chart/infinity.py | hoodmane/sseq | 0f19a29c95486a629b0d054c703ca0a58999ae97 | [
"Apache-2.0",
"MIT"
] | 7 | 2021-04-22T04:06:09.000Z | 2022-01-25T04:05:49.000Z | chart/chart/python/spectralsequence_chart/infinity.py | hoodmane/sseq | 0f19a29c95486a629b0d054c703ca0a58999ae97 | [
"Apache-2.0",
"MIT"
] | 68 | 2020-03-21T22:37:24.000Z | 2022-03-31T02:51:35.000Z | chart/chart/python/spectralsequence_chart/infinity.py | hoodmane/sseq | 0f19a29c95486a629b0d054c703ca0a58999ae97 | [
"Apache-2.0",
"MIT"
] | 5 | 2021-02-17T06:37:43.000Z | 2022-02-01T03:53:22.000Z | """Defines the constant INFINITY = 65535."""
INFINITY : int = 65535
""" int: 65535 """ | 28.666667 | 44 | 0.639535 | 10 | 86 | 5.5 | 0.6 | 0.290909 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.208333 | 0.162791 | 86 | 3 | 45 | 28.666667 | 0.555556 | 0.44186 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
e0799969a3930a0dd13553dee83dc6138600af45 | 14,908 | py | Python | sdk/python/pulumi_azure/monitoring/metric_alert.py | adnang/pulumi-azure | 32360d2f1e41e27d7fdd6522cb26d65e531f279f | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure/monitoring/metric_alert.py | adnang/pulumi-azure | 32360d2f1e41e27d7fdd6522cb26d65e531f279f | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure/monitoring/metric_alert.py | adnang/pulumi-azure | 32360d2f1e41e27d7fdd6522cb26d65e531f279f | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class MetricAlert(pulumi.CustomResource):
actions: pulumi.Output[list]
"""
One or more `action` blocks as defined below.
* `actionGroupId` (`str`) - The ID of the Action Group can be sourced from the `monitoring.ActionGroup` resource
* `webhookProperties` (`dict`) - The map of custom string properties to include with the post operation. These data are appended to the webhook payload.
"""
auto_mitigate: pulumi.Output[bool]
"""
Should the alerts in this Metric Alert be auto resolved? Defaults to `true`.
"""
criterias: pulumi.Output[list]
"""
One or more `criteria` blocks as defined below.
* `aggregation` (`str`) - The statistic that runs over the metric values. Possible values are `Average`, `Count`, `Minimum`, `Maximum` and `Total`.
* `dimensions` (`list`) - One or more `dimension` blocks as defined below.
* `name` (`str`) - One of the dimension names.
* `operator` (`str`) - The dimension operator. Possible values are `Include` and `Exclude`.
* `values` (`list`) - The list of dimension values.
* `metricName` (`str`) - One of the metric names to be monitored.
* `metricNamespace` (`str`) - One of the metric namespaces to be monitored.
* `operator` (`str`) - The criteria operator. Possible values are `Equals`, `NotEquals`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `threshold` (`float`) - The criteria threshold value that activates the alert.
"""
description: pulumi.Output[str]
"""
The description of this Metric Alert.
"""
enabled: pulumi.Output[bool]
"""
Should this Metric Alert be enabled? Defaults to `true`.
"""
frequency: pulumi.Output[str]
"""
The evaluation frequency of this Metric Alert, represented in ISO 8601 duration format. Possible values are `PT1M`, `PT5M`, `PT15M`, `PT30M` and `PT1H`. Defaults to `PT1M`.
"""
name: pulumi.Output[str]
"""
The name of the Metric Alert. Changing this forces a new resource to be created.
"""
resource_group_name: pulumi.Output[str]
"""
The name of the resource group in which to create the Metric Alert instance.
"""
scopes: pulumi.Output[str]
"""
A set of strings of resource IDs at which the metric criteria should be applied.
"""
severity: pulumi.Output[float]
"""
The severity of this Metric Alert. Possible values are `0`, `1`, `2`, `3` and `4`. Defaults to `3`.
"""
tags: pulumi.Output[dict]
"""
A mapping of tags to assign to the resource.
"""
window_size: pulumi.Output[str]
"""
The period of time that is used to monitor alert activity, represented in ISO 8601 duration format. This value must be greater than `frequency`. Possible values are `PT1M`, `PT5M`, `PT15M`, `PT30M`, `PT1H`, `PT6H`, `PT12H` and `P1D`. Defaults to `PT5M`.
"""
def __init__(__self__, resource_name, opts=None, actions=None, auto_mitigate=None, criterias=None, description=None, enabled=None, frequency=None, name=None, resource_group_name=None, scopes=None, severity=None, tags=None, window_size=None, __props__=None, __name__=None, __opts__=None):
"""
Manages a Metric Alert within Azure Monitor.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
main_resource_group = azure.core.ResourceGroup("mainResourceGroup", location="West US")
to_monitor = azure.storage.Account("toMonitor",
resource_group_name=main_resource_group.name,
location=main_resource_group.location,
account_tier="Standard",
account_replication_type="LRS")
main_action_group = azure.monitoring.ActionGroup("mainActionGroup",
resource_group_name=main_resource_group.name,
short_name="exampleact",
webhook_receiver=[{
"name": "callmyapi",
"serviceUri": "http://example.com/alert",
}])
example = azure.monitoring.MetricAlert("example",
resource_group_name=main_resource_group.name,
scopes=[to_monitor.id],
description="Action will be triggered when Transactions count is greater than 50.",
criteria=[{
"metricNamespace": "Microsoft.Storage/storageAccounts",
"metricName": "Transactions",
"aggregation": "Total",
"operator": "GreaterThan",
"threshold": 50,
"dimension": [{
"name": "ApiName",
"operator": "Include",
"values": ["*"],
}],
}],
action=[{
"actionGroupId": main_action_group.id,
}])
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[list] actions: One or more `action` blocks as defined below.
:param pulumi.Input[bool] auto_mitigate: Should the alerts in this Metric Alert be auto resolved? Defaults to `true`.
:param pulumi.Input[list] criterias: One or more `criteria` blocks as defined below.
:param pulumi.Input[str] description: The description of this Metric Alert.
:param pulumi.Input[bool] enabled: Should this Metric Alert be enabled? Defaults to `true`.
:param pulumi.Input[str] frequency: The evaluation frequency of this Metric Alert, represented in ISO 8601 duration format. Possible values are `PT1M`, `PT5M`, `PT15M`, `PT30M` and `PT1H`. Defaults to `PT1M`.
:param pulumi.Input[str] name: The name of the Metric Alert. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Metric Alert instance.
:param pulumi.Input[str] scopes: A set of strings of resource IDs at which the metric criteria should be applied.
:param pulumi.Input[float] severity: The severity of this Metric Alert. Possible values are `0`, `1`, `2`, `3` and `4`. Defaults to `3`.
:param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] window_size: The period of time that is used to monitor alert activity, represented in ISO 8601 duration format. This value must be greater than `frequency`. Possible values are `PT1M`, `PT5M`, `PT15M`, `PT30M`, `PT1H`, `PT6H`, `PT12H` and `P1D`. Defaults to `PT5M`.
The **actions** object supports the following:
* `actionGroupId` (`pulumi.Input[str]`) - The ID of the Action Group can be sourced from the `monitoring.ActionGroup` resource
* `webhookProperties` (`pulumi.Input[dict]`) - The map of custom string properties to include with the post operation. These data are appended to the webhook payload.
The **criterias** object supports the following:
* `aggregation` (`pulumi.Input[str]`) - The statistic that runs over the metric values. Possible values are `Average`, `Count`, `Minimum`, `Maximum` and `Total`.
* `dimensions` (`pulumi.Input[list]`) - One or more `dimension` blocks as defined below.
* `name` (`pulumi.Input[str]`) - One of the dimension names.
* `operator` (`pulumi.Input[str]`) - The dimension operator. Possible values are `Include` and `Exclude`.
* `values` (`pulumi.Input[list]`) - The list of dimension values.
* `metricName` (`pulumi.Input[str]`) - One of the metric names to be monitored.
* `metricNamespace` (`pulumi.Input[str]`) - One of the metric namespaces to be monitored.
* `operator` (`pulumi.Input[str]`) - The criteria operator. Possible values are `Equals`, `NotEquals`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `threshold` (`pulumi.Input[float]`) - The criteria threshold value that activates the alert.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['actions'] = actions
__props__['auto_mitigate'] = auto_mitigate
if criterias is None:
raise TypeError("Missing required property 'criterias'")
__props__['criterias'] = criterias
__props__['description'] = description
__props__['enabled'] = enabled
__props__['frequency'] = frequency
__props__['name'] = name
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if scopes is None:
raise TypeError("Missing required property 'scopes'")
__props__['scopes'] = scopes
__props__['severity'] = severity
__props__['tags'] = tags
__props__['window_size'] = window_size
super(MetricAlert, __self__).__init__(
'azure:monitoring/metricAlert:MetricAlert',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, actions=None, auto_mitigate=None, criterias=None, description=None, enabled=None, frequency=None, name=None, resource_group_name=None, scopes=None, severity=None, tags=None, window_size=None):
"""
Get an existing MetricAlert resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[list] actions: One or more `action` blocks as defined below.
:param pulumi.Input[bool] auto_mitigate: Should the alerts in this Metric Alert be auto resolved? Defaults to `true`.
:param pulumi.Input[list] criterias: One or more `criteria` blocks as defined below.
:param pulumi.Input[str] description: The description of this Metric Alert.
:param pulumi.Input[bool] enabled: Should this Metric Alert be enabled? Defaults to `true`.
:param pulumi.Input[str] frequency: The evaluation frequency of this Metric Alert, represented in ISO 8601 duration format. Possible values are `PT1M`, `PT5M`, `PT15M`, `PT30M` and `PT1H`. Defaults to `PT1M`.
:param pulumi.Input[str] name: The name of the Metric Alert. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Metric Alert instance.
:param pulumi.Input[str] scopes: A set of strings of resource IDs at which the metric criteria should be applied.
:param pulumi.Input[float] severity: The severity of this Metric Alert. Possible values are `0`, `1`, `2`, `3` and `4`. Defaults to `3`.
:param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] window_size: The period of time that is used to monitor alert activity, represented in ISO 8601 duration format. This value must be greater than `frequency`. Possible values are `PT1M`, `PT5M`, `PT15M`, `PT30M`, `PT1H`, `PT6H`, `PT12H` and `P1D`. Defaults to `PT5M`.
The **actions** object supports the following:
* `actionGroupId` (`pulumi.Input[str]`) - The ID of the Action Group can be sourced from the `monitoring.ActionGroup` resource
* `webhookProperties` (`pulumi.Input[dict]`) - The map of custom string properties to include with the post operation. These data are appended to the webhook payload.
The **criterias** object supports the following:
* `aggregation` (`pulumi.Input[str]`) - The statistic that runs over the metric values. Possible values are `Average`, `Count`, `Minimum`, `Maximum` and `Total`.
* `dimensions` (`pulumi.Input[list]`) - One or more `dimension` blocks as defined below.
* `name` (`pulumi.Input[str]`) - One of the dimension names.
* `operator` (`pulumi.Input[str]`) - The dimension operator. Possible values are `Include` and `Exclude`.
* `values` (`pulumi.Input[list]`) - The list of dimension values.
* `metricName` (`pulumi.Input[str]`) - One of the metric names to be monitored.
* `metricNamespace` (`pulumi.Input[str]`) - One of the metric namespaces to be monitored.
* `operator` (`pulumi.Input[str]`) - The criteria operator. Possible values are `Equals`, `NotEquals`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `threshold` (`pulumi.Input[float]`) - The criteria threshold value that activates the alert.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["actions"] = actions
__props__["auto_mitigate"] = auto_mitigate
__props__["criterias"] = criterias
__props__["description"] = description
__props__["enabled"] = enabled
__props__["frequency"] = frequency
__props__["name"] = name
__props__["resource_group_name"] = resource_group_name
__props__["scopes"] = scopes
__props__["severity"] = severity
__props__["tags"] = tags
__props__["window_size"] = window_size
return MetricAlert(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 57.559846 | 299 | 0.657969 | 1,812 | 14,908 | 5.266556 | 0.147351 | 0.053023 | 0.038143 | 0.023892 | 0.764225 | 0.75427 | 0.740543 | 0.707639 | 0.680394 | 0.666771 | 0 | 0.009415 | 0.237658 | 14,908 | 258 | 300 | 57.782946 | 0.830268 | 0.513349 | 0 | 0.025 | 1 | 0 | 0.154206 | 0.015002 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05 | false | 0.0125 | 0.075 | 0.025 | 0.325 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
0eb90c37411043b0dd64690eebeb4510565b57d8 | 49 | py | Python | linear_regression/tempCodeRunnerFile.py | shahad-mahmud/machine_lerning | 7e975bab70b010277cbe6f6c0a5fb44d878e24a6 | [
"Apache-2.0"
] | null | null | null | linear_regression/tempCodeRunnerFile.py | shahad-mahmud/machine_lerning | 7e975bab70b010277cbe6f6c0a5fb44d878e24a6 | [
"Apache-2.0"
] | null | null | null | linear_regression/tempCodeRunnerFile.py | shahad-mahmud/machine_lerning | 7e975bab70b010277cbe6f6c0a5fb44d878e24a6 | [
"Apache-2.0"
] | null | null | null | re()
# plt.scatter(range(len(x)), x)
# plt.show() | 16.333333 | 31 | 0.591837 | 9 | 49 | 3.222222 | 0.777778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.102041 | 49 | 3 | 32 | 16.333333 | 0.659091 | 0.816327 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
0ef74222c287ebc0f236be7e92a94db3a6182b03 | 112 | py | Python | SAER/src/models/__init__.py | HCDM/XRec | dae7d3e1237b8e41913656eb33d81e78c61424ea | [
"MIT"
] | null | null | null | SAER/src/models/__init__.py | HCDM/XRec | dae7d3e1237b8e41913656eb33d81e78c61424ea | [
"MIT"
] | null | null | null | SAER/src/models/__init__.py | HCDM/XRec | dae7d3e1237b8e41913656eb33d81e78c61424ea | [
"MIT"
] | 1 | 2022-03-21T07:04:00.000Z | 2022-03-21T07:04:00.000Z | '''
chatbot seq2seq models
'''
from .saer import SAER, SentimentRegressor, TextClassifier
from .gmf import GMF
| 16 | 58 | 0.767857 | 13 | 112 | 6.615385 | 0.692308 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010417 | 0.142857 | 112 | 6 | 59 | 18.666667 | 0.885417 | 0.196429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
16065467890bdc7aadef1c85f16e2a99900f40c4 | 35 | py | Python | pelican/plugins/feed_amount_of_items/__init__.py | Kartones/PythonAssorted | 0351b176f45aab886965056bebd951d29f5b99fb | [
"Unlicense"
] | 12 | 2016-12-27T19:41:46.000Z | 2020-06-02T19:14:26.000Z | pelican/plugins/feed_amount_of_items/__init__.py | Kartones/PythonAssorted | 0351b176f45aab886965056bebd951d29f5b99fb | [
"Unlicense"
] | 1 | 2020-08-18T20:58:29.000Z | 2020-08-19T05:31:40.000Z | pelican/plugins/feed_amount_of_items/__init__.py | Kartones/PythonAssorted | 0351b176f45aab886965056bebd951d29f5b99fb | [
"Unlicense"
] | 2 | 2020-08-18T20:23:59.000Z | 2021-08-01T13:35:02.000Z | from .feed_amount_of_items import * | 35 | 35 | 0.857143 | 6 | 35 | 4.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085714 | 35 | 1 | 35 | 35 | 0.84375 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
1651dc9586b9d1a3fb81f50a8fa6a83c26cfb7f2 | 346 | py | Python | crawler/spiders/__init__.py | flaneuse/biothings.crawler | 70e4eec45e44fb7300643b1fb64b0824ed6e7df2 | [
"Apache-2.0"
] | null | null | null | crawler/spiders/__init__.py | flaneuse/biothings.crawler | 70e4eec45e44fb7300643b1fb64b0824ed6e7df2 | [
"Apache-2.0"
] | null | null | null | crawler/spiders/__init__.py | flaneuse/biothings.crawler | 70e4eec45e44fb7300643b1fb64b0824ed6e7df2 | [
"Apache-2.0"
] | null | null | null |
from .broadscrape.figshare import FigshareSpider
from .focusedscrape.harvard import HarvardSpider
from .focusedscrape.immport import ImmPortSpider
from .focusedscrape.ncbi_geo import NCBIGeoSpider
from .focusedscrape.nyu import NYUDataCatalogSpider
from .sitemapscrape.omicsdi import OmicsdiSpider
from .sitemapscrape.zenodo import ZenodoSpider
| 38.444444 | 51 | 0.875723 | 36 | 346 | 8.388889 | 0.555556 | 0.225166 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.083815 | 346 | 8 | 52 | 43.25 | 0.952681 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
bc4278ccda5e575ad330d95e583e49057643504e | 157 | py | Python | nationex/nationex_lib/error_response.py | Purplship/purplship-carriers | dcd044320b86e9af5fe3ef15c36ebf7828b2851b | [
"MIT"
] | 2 | 2021-04-12T22:40:28.000Z | 2021-04-21T18:28:31.000Z | nationex/nationex_lib/error_response.py | Purplship/purplship-carriers | dcd044320b86e9af5fe3ef15c36ebf7828b2851b | [
"MIT"
] | 2 | 2021-01-29T07:14:31.000Z | 2021-02-18T18:29:23.000Z | nationex/nationex_lib/error_response.py | Purplship/purplship-carriers | dcd044320b86e9af5fe3ef15c36ebf7828b2851b | [
"MIT"
] | 3 | 2020-09-09T17:04:46.000Z | 2021-03-05T00:32:32.000Z | from attr import s
from typing import Optional
@s(auto_attribs=True)
class ErrorResponse:
code: Optional[int] = None
message: Optional[str] = None
| 17.444444 | 33 | 0.732484 | 22 | 157 | 5.181818 | 0.727273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.184713 | 157 | 8 | 34 | 19.625 | 0.890625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.833333 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
bc42c4d8fe11ef8ac30b6a3b84e2bac0409c22a7 | 96 | py | Python | poom.py | xDiaym/poom | 8f0e59bc0acc39b77fe761f9c1e2386e37bc6d78 | [
"MIT"
] | 3 | 2022-01-01T10:28:17.000Z | 2022-02-06T19:06:24.000Z | poom.py | xDiaym/poom | 8f0e59bc0acc39b77fe761f9c1e2386e37bc6d78 | [
"MIT"
] | 4 | 2022-01-09T13:01:20.000Z | 2022-02-11T14:55:01.000Z | poom.py | xDiaym/poom | 8f0e59bc0acc39b77fe761f9c1e2386e37bc6d78 | [
"MIT"
] | null | null | null | import sys
from poom.game import main
if __name__ == "__main__":
sys.exit(main(sys.argv))
| 13.714286 | 28 | 0.697917 | 15 | 96 | 3.933333 | 0.666667 | 0.237288 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.177083 | 96 | 6 | 29 | 16 | 0.746835 | 0 | 0 | 0 | 0 | 0 | 0.083333 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
bc46a045fdce114c5d4d985d361b468382c41c86 | 211 | py | Python | mwptoolkit/module/Embedder/__init__.py | ShubhamAnandJain/MWP-CS229 | ce86233504fdb37e104a3944fd81d4606fbfa621 | [
"MIT"
] | 71 | 2021-03-08T06:06:15.000Z | 2022-03-30T11:59:37.000Z | mwptoolkit/module/Embedder/__init__.py | ShubhamAnandJain/MWP-CS229 | ce86233504fdb37e104a3944fd81d4606fbfa621 | [
"MIT"
] | 13 | 2021-09-07T12:38:23.000Z | 2022-03-22T15:08:16.000Z | mwptoolkit/module/Embedder/__init__.py | ShubhamAnandJain/MWP-CS229 | ce86233504fdb37e104a3944fd81d4606fbfa621 | [
"MIT"
] | 21 | 2021-02-16T07:46:36.000Z | 2022-03-23T13:41:33.000Z | from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from mwptoolkit.module.Embedder import basic_embedder,bert_embedder,position_embedder,roberta_embedder | 52.75 | 102 | 0.905213 | 27 | 211 | 6.407407 | 0.518519 | 0.17341 | 0.277457 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.07109 | 211 | 4 | 102 | 52.75 | 0.882653 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0.25 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
bc92ff1834f848202a85df120e9c7e4242df3740 | 1,680 | py | Python | saopy/prov/__init__.py | CityPulse/CP_Resourcemanagement | aa670fa89d5e086a98ade3ccc152518be55abf2e | [
"MIT"
] | 2 | 2016-11-03T14:57:45.000Z | 2019-05-13T13:21:08.000Z | saopy/prov/__init__.py | CityPulse/CP_Resourcemanagement | aa670fa89d5e086a98ade3ccc152518be55abf2e | [
"MIT"
] | null | null | null | saopy/prov/__init__.py | CityPulse/CP_Resourcemanagement | aa670fa89d5e086a98ade3ccc152518be55abf2e | [
"MIT"
] | 1 | 2020-07-23T11:27:15.000Z | 2020-07-23T11:27:15.000Z | import saopy.model
from saopy.model import prov___Activity as Activity
from saopy.model import prov___ActivityInfluence as ActivityInfluence
from saopy.model import prov___Agent as Agent
from saopy.model import prov___AgentInfluence as AgentInfluence
from saopy.model import prov___Association as Association
from saopy.model import prov___Attribution as Attribution
from saopy.model import prov___Bundle as Bundle
from saopy.model import prov___Collection as Collection
from saopy.model import prov___Communication as Communication
from saopy.model import prov___Delegation as Delegation
from saopy.model import prov___Derivation as Derivation
from saopy.model import prov___EmptyCollection as EmptyCollection
from saopy.model import prov___End as End
from saopy.model import prov___Entity as Entity
from saopy.model import prov___EntityInfluence as EntityInfluence
from saopy.model import prov___Generation as Generation
from saopy.model import prov___Influence as Influence
from saopy.model import prov___InstantaneousEvent as InstantaneousEvent
from saopy.model import prov___Invalidation as Invalidation
from saopy.model import prov___Location as Location
from saopy.model import prov___Organization as Organization
from saopy.model import prov___Person as Person
from saopy.model import prov___Plan as Plan
from saopy.model import prov___PrimarySource as PrimarySource
from saopy.model import prov___Quotation as Quotation
from saopy.model import prov___Revision as Revision
from saopy.model import prov___Role as Role
from saopy.model import prov___SoftwareAgent as SoftwareAgent
from saopy.model import prov___Start as Start
from saopy.model import prov___Usage as Usage
| 50.909091 | 71 | 0.872619 | 243 | 1,680 | 5.662551 | 0.148148 | 0.225291 | 0.305233 | 0.436047 | 0.523256 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.108929 | 1,680 | 32 | 72 | 52.5 | 0.919172 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
bcafd944a67bfbcbea2ee9c491b0558319b45303 | 55 | py | Python | python/smqtk/algorithms/nn_index/__init__.py | Purg/SMQTK | 705a2b2979935ed129aac7db578571c4ae1343e7 | [
"BSD-3-Clause"
] | 1 | 2021-04-25T16:53:50.000Z | 2021-04-25T16:53:50.000Z | python/smqtk/algorithms/nn_index/__init__.py | Purg/SMQTK | 705a2b2979935ed129aac7db578571c4ae1343e7 | [
"BSD-3-Clause"
] | 3 | 2021-09-08T02:17:49.000Z | 2022-03-12T00:40:33.000Z | python/smqtk/algorithms/nn_index/__init__.py | Purg/SMQTK | 705a2b2979935ed129aac7db578571c4ae1343e7 | [
"BSD-3-Clause"
] | null | null | null | from ._interface_nn_index import NearestNeighborsIndex
| 27.5 | 54 | 0.909091 | 6 | 55 | 7.833333 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.072727 | 55 | 1 | 55 | 55 | 0.921569 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
bcbbe2e52074d149f1183253a125059c0298f607 | 302 | py | Python | src/wai/annotations/domain/image/segmentation/__init__.py | waikato-ufdl/wai-annotations-core | bac3429e9488efb456972c74f9d462f951c4af3d | [
"Apache-2.0"
] | null | null | null | src/wai/annotations/domain/image/segmentation/__init__.py | waikato-ufdl/wai-annotations-core | bac3429e9488efb456972c74f9d462f951c4af3d | [
"Apache-2.0"
] | 3 | 2021-06-30T23:42:47.000Z | 2022-03-01T03:45:07.000Z | src/wai/annotations/domain/image/segmentation/__init__.py | waikato-ufdl/wai-annotations-core | bac3429e9488efb456972c74f9d462f951c4af3d | [
"Apache-2.0"
] | null | null | null | """
Package specifying the domain of images annotated with labels for each pixel.
"""
from ._ImageSegmentationAnnotation import ImageSegmentationAnnotation
from ._ImageSegmentationDomainSpecifier import ImageSegmentationDomainSpecifier
from ._ImageSegmentationInstance import ImageSegmentationInstance
| 43.142857 | 79 | 0.880795 | 24 | 302 | 10.958333 | 0.708333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.086093 | 302 | 6 | 80 | 50.333333 | 0.952899 | 0.254967 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
4c1eceddbb61a127a67a4f27d519dbb8cfc3e409 | 309 | py | Python | mmdet/ops/utils/__init__.py | MinliangLin/TSD | d84ddc049d6b18c3a2408c90d2b7dd63b4e2d3a1 | [
"Apache-2.0"
] | 454 | 2020-04-17T10:58:36.000Z | 2022-03-16T13:04:33.000Z | mmdet/ops/utils/__init__.py | MinliangLin/TSD | d84ddc049d6b18c3a2408c90d2b7dd63b4e2d3a1 | [
"Apache-2.0"
] | 37 | 2020-04-29T12:37:54.000Z | 2022-01-26T21:10:42.000Z | mmdet/ops/utils/__init__.py | MinliangLin/TSD | d84ddc049d6b18c3a2408c90d2b7dd63b4e2d3a1 | [
"Apache-2.0"
] | 61 | 2020-04-30T04:28:08.000Z | 2022-01-26T08:14:13.000Z | # from . import compiling_info
from .compiling_info import get_compiler_version, get_compiling_cuda_version
# get_compiler_version = compiling_info.get_compiler_version
# get_compiling_cuda_version = compiling_info.get_compiling_cuda_version
__all__ = ["get_compiler_version", "get_compiling_cuda_version"]
| 38.625 | 76 | 0.864078 | 41 | 309 | 5.829268 | 0.219512 | 0.217573 | 0.301255 | 0.384937 | 0.514644 | 0.514644 | 0.514644 | 0 | 0 | 0 | 0 | 0 | 0.07767 | 309 | 7 | 77 | 44.142857 | 0.838596 | 0.511327 | 0 | 0 | 0 | 0 | 0.312925 | 0.176871 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
4c1fd3f8ee4c0698eff20d0c7c7b60a0d3ac1d3c | 68 | py | Python | example/testcfg.py | cheesycod/IBLPy | 65deaeae5ad8d0584ddcef25e470b1b58d2cb6aa | [
"MIT"
] | 5 | 2021-03-10T21:21:52.000Z | 2022-02-20T06:35:03.000Z | example/testcfg.py | cheesycod/IBLPy | 65deaeae5ad8d0584ddcef25e470b1b58d2cb6aa | [
"MIT"
] | null | null | null | example/testcfg.py | cheesycod/IBLPy | 65deaeae5ad8d0584ddcef25e470b1b58d2cb6aa | [
"MIT"
] | null | null | null | TOKEN="NzMzMDQzNzY4NjkyOTY1NDQ4.Xw9aNQ.aYw1PpzgP4b5CYxSwKBdfV4mD40"
| 34 | 67 | 0.911765 | 4 | 68 | 15.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.149254 | 0.014706 | 68 | 1 | 68 | 68 | 0.776119 | 0 | 0 | 0 | 0 | 0 | 0.867647 | 0.867647 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
4c467a871392cfcc7319a0d2a2f7ea95bfba81f0 | 200 | py | Python | tests/test_A008588.py | TimothyDJones/oeis | d9d608bc32ee31c73c139e1b68e4eb6315205e8d | [
"MIT"
] | 21 | 2020-03-21T17:50:13.000Z | 2022-01-18T01:52:47.000Z | tests/test_A008588.py | TimothyDJones/oeis | d9d608bc32ee31c73c139e1b68e4eb6315205e8d | [
"MIT"
] | 296 | 2019-11-18T14:04:36.000Z | 2022-03-27T21:59:24.000Z | tests/test_A008588.py | TimothyDJones/oeis | d9d608bc32ee31c73c139e1b68e4eb6315205e8d | [
"MIT"
] | 29 | 2019-11-18T11:56:22.000Z | 2022-03-26T22:31:57.000Z | from oeis import A008588
def test_A008588():
assert A008588[:10] == [
0,
6,
12,
18,
24,
30,
36,
42,
48,
54,
]
| 11.764706 | 28 | 0.345 | 20 | 200 | 3.4 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.426966 | 0.555 | 200 | 16 | 29 | 12.5 | 0.337079 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.071429 | 1 | 0.071429 | true | 0 | 0.071429 | 0 | 0.142857 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
4c4a1c155cf7b16e24fbb1ce6d69238526e6b37a | 83 | py | Python | deepmatch/__init__.py | Lu1352/DeepMatch | cc8c1a943b0d8e7336621416dbf6290e71998068 | [
"Apache-2.0"
] | 2 | 2020-05-16T07:47:34.000Z | 2020-05-16T07:47:38.000Z | deepmatch/__init__.py | Lu1352/DeepMatch | cc8c1a943b0d8e7336621416dbf6290e71998068 | [
"Apache-2.0"
] | null | null | null | deepmatch/__init__.py | Lu1352/DeepMatch | cc8c1a943b0d8e7336621416dbf6290e71998068 | [
"Apache-2.0"
] | null | null | null | from .utils import check_version
__version__ = '0.1.2'
check_version(__version__)
| 16.6 | 32 | 0.795181 | 12 | 83 | 4.666667 | 0.666667 | 0.428571 | 0.678571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.040541 | 0.108434 | 83 | 4 | 33 | 20.75 | 0.716216 | 0 | 0 | 0 | 0 | 0 | 0.060241 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
4c6c2666f9adb689e14762ee368690025fd722c1 | 22 | py | Python | 1/kdh/4_7287.py | KNU-CS09/Baekjoon | 77253565dd23004668ca3f548a196e78142a1e29 | [
"MIT"
] | null | null | null | 1/kdh/4_7287.py | KNU-CS09/Baekjoon | 77253565dd23004668ca3f548a196e78142a1e29 | [
"MIT"
] | 8 | 2018-05-30T07:48:49.000Z | 2018-06-15T00:10:05.000Z | 1/kdh/4_7287.py | KNU-CS09/Baekjoon | 77253565dd23004668ca3f548a196e78142a1e29 | [
"MIT"
] | null | null | null | print(3)
print("vwan") | 11 | 13 | 0.681818 | 4 | 22 | 3.75 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.047619 | 0.045455 | 22 | 2 | 13 | 11 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0.173913 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 5 |
d5bc2c69eac174d95d60607340e87869c16f328f | 9,040 | py | Python | tests/test_cli.py | gmerz/openapi-python-client | c6314a77026ed49056f624a3875734b8a45d0fa5 | [
"MIT"
] | null | null | null | tests/test_cli.py | gmerz/openapi-python-client | c6314a77026ed49056f624a3875734b8a45d0fa5 | [
"MIT"
] | 47 | 2020-12-02T07:04:31.000Z | 2022-02-10T22:11:24.000Z | tests/test_cli.py | gmerz/openapi-python-client | c6314a77026ed49056f624a3875734b8a45d0fa5 | [
"MIT"
] | 1 | 2021-11-04T10:26:12.000Z | 2021-11-04T10:26:12.000Z | from pathlib import Path
from unittest.mock import MagicMock
import pytest
from typer.testing import CliRunner
from openapi_python_client.parser.errors import GeneratorError, ParseError
runner = CliRunner()
def test_version(mocker):
generate = mocker.patch("openapi_python_client.cli.generate")
from openapi_python_client.cli import app
result = runner.invoke(app, ["--version", "generate"])
generate.assert_not_called()
assert result.exit_code == 0
assert "openapi-python-client version: " in result.stdout
@pytest.fixture
def _create_new_client(mocker) -> MagicMock:
return mocker.patch("openapi_python_client.create_new_client", return_value=[])
def test_config_arg(mocker, _create_new_client):
load_config = mocker.patch("openapi_python_client.config.Config.load_from_path")
from openapi_python_client.cli import MetaType, app
config_path = "config/path"
path = "cool/path"
file_encoding = "utf-8"
result = runner.invoke(
app,
[f"--config={config_path}", "generate", f"--path={path}", f"--file-encoding={file_encoding}"],
catch_exceptions=False,
)
assert result.exit_code == 0
load_config.assert_called_once_with(path=Path(config_path))
_create_new_client.assert_called_once_with(
url=None, path=Path(path), custom_template_path=None, meta=MetaType.POETRY, file_encoding="utf-8"
)
def test_bad_config(mocker, _create_new_client):
load_config = mocker.patch(
"openapi_python_client.config.Config.load_from_path", side_effect=ValueError("Bad Config")
)
from openapi_python_client.cli import app
config_path = "config/path"
path = "cool/path"
result = runner.invoke(app, [f"--config={config_path}", "generate", f"--path={path}"])
assert result.exit_code == 2
assert "Unable to parse config" in result.stdout
load_config.assert_called_once_with(path=Path(config_path))
_create_new_client.assert_not_called()
class TestGenerate:
def test_generate_no_params(self, _create_new_client):
from openapi_python_client.cli import app
result = runner.invoke(app, ["generate"])
assert result.exit_code == 1, result.output
_create_new_client.assert_not_called()
def test_generate_url_and_path(self, _create_new_client):
from openapi_python_client.cli import app
result = runner.invoke(app, ["generate", "--path=blah", "--url=otherblah"])
assert result.exit_code == 1
_create_new_client.assert_not_called()
def test_generate_url(self, _create_new_client):
url = "cool.url"
from openapi_python_client.cli import MetaType, app
result = runner.invoke(app, ["generate", f"--url={url}"])
assert result.exit_code == 0
_create_new_client.assert_called_once_with(
url=url, path=None, custom_template_path=None, meta=MetaType.POETRY, file_encoding="utf-8"
)
def test_generate_path(self, _create_new_client):
path = "cool/path"
from openapi_python_client.cli import MetaType, app
result = runner.invoke(app, ["generate", f"--path={path}"])
assert result.exit_code == 0
_create_new_client.assert_called_once_with(
url=None, path=Path(path), custom_template_path=None, meta=MetaType.POETRY, file_encoding="utf-8"
)
def test_generate_meta(self, _create_new_client):
path = "cool/path"
from openapi_python_client.cli import MetaType, app
result = runner.invoke(app, ["generate", f"--path={path}", "--meta=none"])
assert result.exit_code == 0
_create_new_client.assert_called_once_with(
url=None, path=Path(path), custom_template_path=None, meta=MetaType.NONE, file_encoding="utf-8"
)
def test_generate_encoding(self, _create_new_client):
path = "cool/path"
file_encoding = "utf-8"
from openapi_python_client.cli import MetaType, app
result = runner.invoke(app, ["generate", f"--path={path}", f"--file-encoding={file_encoding}"])
assert result.exit_code == 0
_create_new_client.assert_called_once_with(
url=None, path=Path(path), custom_template_path=None, meta=MetaType.POETRY, file_encoding="utf-8"
)
def test_generate_encoding_errors(self, _create_new_client):
path = "cool/path"
file_encoding = "error-file-encoding"
from openapi_python_client.cli import MetaType, app
result = runner.invoke(app, ["generate", f"--path={path}", f"--file-encoding={file_encoding}"])
assert result.exit_code == 1
assert result.output == "Unknown encoding : {}\n".format(file_encoding)
def test_generate_handle_errors(self, _create_new_client):
_create_new_client.return_value = [GeneratorError(detail="this is a message")]
path = "cool/path"
from openapi_python_client.cli import app
result = runner.invoke(app, ["generate", f"--path={path}"])
assert result.exit_code == 1
assert result.output == (
"Error(s) encountered while generating, client was not created\n\n"
"Unable to generate the client\n\n"
"this is a message\n\n\n"
"If you believe this was a mistake or this tool is missing a feature you need, please open an issue at "
"https://github.com/triaxtec/openapi-python-client/issues/new/choose\n"
)
def test_generate_handle_multiple_warnings(self, _create_new_client):
error_1 = ParseError(data={"test": "data"}, detail="this is a message")
error_2 = ParseError(data={"other": "data"}, detail="this is another message", header="Custom Header")
_create_new_client.return_value = [error_1, error_2]
path = "cool/path"
from openapi_python_client.cli import app
result = runner.invoke(app, ["generate", f"--path={path}"])
assert result.exit_code == 0
assert result.output == (
"Warning(s) encountered while generating. Client was generated, but some pieces may be missing\n\n"
"Unable to parse this part of your OpenAPI document: \n\n"
"this is a message\n\n"
"{'test': 'data'}\n\n"
"Custom Header\n\n"
"this is another message\n\n"
"{'other': 'data'}\n\n"
"If you believe this was a mistake or this tool is missing a feature you need, please open an issue at "
"https://github.com/triaxtec/openapi-python-client/issues/new/choose\n"
)
@pytest.fixture
def _update_existing_client(mocker):
return mocker.patch("openapi_python_client.update_existing_client")
class TestUpdate:
def test_update_no_params(self, _update_existing_client):
from openapi_python_client.cli import app
result = runner.invoke(app, ["update"])
assert result.exit_code == 1
_update_existing_client.assert_not_called()
def test_update_url_and_path(self, _update_existing_client):
from openapi_python_client.cli import app
result = runner.invoke(app, ["update", "--path=blah", "--url=otherblah"])
assert result.exit_code == 1
_update_existing_client.assert_not_called()
def test_update_url(self, _update_existing_client):
url = "cool.url"
from openapi_python_client.cli import MetaType, app
result = runner.invoke(app, ["update", f"--url={url}"])
assert result.exit_code == 0
_update_existing_client.assert_called_once_with(
url=url, path=None, custom_template_path=None, meta=MetaType.POETRY, file_encoding="utf-8"
)
def test_update_path(self, _update_existing_client):
path = "cool/path"
from openapi_python_client.cli import MetaType, app
result = runner.invoke(app, ["update", f"--path={path}"])
assert result.exit_code == 0
_update_existing_client.assert_called_once_with(
url=None, path=Path(path), custom_template_path=None, meta=MetaType.POETRY, file_encoding="utf-8"
)
def test_update_encoding(self, _update_existing_client):
path = "cool/path"
file_encoding = "utf-8"
from openapi_python_client.cli import MetaType, app
result = runner.invoke(app, ["update", f"--path={path}", f"--file-encoding={file_encoding}"])
assert result.exit_code == 0
_update_existing_client.assert_called_once_with(
url=None, path=Path(path), custom_template_path=None, meta=MetaType.POETRY, file_encoding="utf-8"
)
def test_update_encoding_errors(self, _update_existing_client):
path = "cool/path"
file_encoding = "error-file-encoding"
from openapi_python_client.cli import MetaType, app
result = runner.invoke(app, ["update", f"--path={path}", f"--file-encoding={file_encoding}"])
assert result.exit_code == 1
assert result.output == "Unknown encoding : {}\n".format(file_encoding)
| 37.355372 | 116 | 0.676991 | 1,188 | 9,040 | 4.882997 | 0.116162 | 0.060507 | 0.088433 | 0.075332 | 0.817445 | 0.786589 | 0.744527 | 0.733149 | 0.698673 | 0.665747 | 0 | 0.004615 | 0.209071 | 9,040 | 241 | 117 | 37.510373 | 0.806713 | 0 | 0 | 0.537572 | 0 | 0.028902 | 0.213827 | 0.048341 | 0 | 0 | 0 | 0 | 0.231214 | 1 | 0.115607 | false | 0 | 0.132948 | 0.011561 | 0.271676 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
d5dc75b2c3c6624d0b325fbb91b204f79274c4b6 | 92 | py | Python | lztools/types/__init__.py | Zanzes/lztools | 4091416464cbb441f5af26ade6a03ff18ae1bf01 | [
"MIT"
] | null | null | null | lztools/types/__init__.py | Zanzes/lztools | 4091416464cbb441f5af26ade6a03ff18ae1bf01 | [
"MIT"
] | null | null | null | lztools/types/__init__.py | Zanzes/lztools | 4091416464cbb441f5af26ade6a03ff18ae1bf01 | [
"MIT"
] | null | null | null | from lztools.pytools.utils import import_class
Server = import_class()
__all__ = [Server] | 15.333333 | 46 | 0.782609 | 12 | 92 | 5.5 | 0.666667 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.130435 | 92 | 6 | 47 | 15.333333 | 0.825 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
d5fdf960711d98160cc9c7868a9b824d63737161 | 1,923 | py | Python | sdk/python/pulumi_azure_native/logic/v20180701preview/__init__.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/logic/v20180701preview/__init__.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/logic/v20180701preview/__init__.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from ... import _utilities
import typing
# Export this package's modules as members:
from ._enums import *
from .get_integration_account import *
from .get_integration_account_agreement import *
from .get_integration_account_assembly import *
from .get_integration_account_batch_configuration import *
from .get_integration_account_certificate import *
from .get_integration_account_map import *
from .get_integration_account_partner import *
from .get_integration_account_schema import *
from .get_integration_account_session import *
from .get_workflow import *
from .integration_account import *
from .integration_account_agreement import *
from .integration_account_assembly import *
from .integration_account_batch_configuration import *
from .integration_account_certificate import *
from .integration_account_map import *
from .integration_account_partner import *
from .integration_account_schema import *
from .integration_account_session import *
from .list_integration_account_agreement_content_callback_url import *
from .list_integration_account_assembly_content_callback_url import *
from .list_integration_account_callback_url import *
from .list_integration_account_key_vault_keys import *
from .list_integration_account_map_content_callback_url import *
from .list_integration_account_partner_content_callback_url import *
from .list_integration_account_schema_content_callback_url import *
from .list_workflow_callback_url import *
from .list_workflow_run_action_expression_traces import *
from .list_workflow_run_action_repetition_expression_traces import *
from .list_workflow_trigger_callback_url import *
from .list_workflow_version_trigger_callback_url import *
from .workflow import *
from ._inputs import *
from . import outputs
| 44.72093 | 80 | 0.853354 | 253 | 1,923 | 6.047431 | 0.256917 | 0.222222 | 0.109804 | 0.141176 | 0.773203 | 0.349673 | 0.158824 | 0.130719 | 0 | 0 | 0 | 0.000576 | 0.096724 | 1,923 | 42 | 81 | 45.785714 | 0.880253 | 0.105564 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
913936bf037c19b1e8d44d8d655a9524948d1743 | 175 | py | Python | bin/hexes/polyhexes-1234-diamond-ring.py | tiwo/puzzler | 7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e | [
"Intel"
] | null | null | null | bin/hexes/polyhexes-1234-diamond-ring.py | tiwo/puzzler | 7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e | [
"Intel"
] | null | null | null | bin/hexes/polyhexes-1234-diamond-ring.py | tiwo/puzzler | 7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e | [
"Intel"
] | 1 | 2022-01-02T16:54:14.000Z | 2022-01-02T16:54:14.000Z | #!/usr/bin/env python
# $Id$
"""83,837 solutions"""
import puzzler
from puzzler.puzzles.polyhexes1234 import Polyhexes1234DiamondRing
puzzler.run(Polyhexes1234DiamondRing)
| 17.5 | 66 | 0.794286 | 19 | 175 | 7.315789 | 0.789474 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.106918 | 0.091429 | 175 | 9 | 67 | 19.444444 | 0.767296 | 0.24 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
e687f5ae04fa28193a41fd714da7a05e72ca8b7d | 119 | py | Python | dippy/core/snowflake.py | dontbanmeplz/dippy.core | 3811f0aa8bc0ae9f9a25c15c4cc054630b3e9710 | [
"MIT"
] | 4 | 2021-04-23T10:26:28.000Z | 2021-08-29T15:34:46.000Z | dippy/core/snowflake.py | dontbanmeplz/dippy.core | 3811f0aa8bc0ae9f9a25c15c4cc054630b3e9710 | [
"MIT"
] | 23 | 2021-05-27T13:48:32.000Z | 2021-12-15T15:41:28.000Z | dippy/core/snowflake.py | dontbanmeplz/dippy.core | 3811f0aa8bc0ae9f9a25c15c4cc054630b3e9710 | [
"MIT"
] | 7 | 2021-05-22T17:16:57.000Z | 2021-12-15T15:19:12.000Z | class Snowflake(int):
def __new__(cls, value):
return super().__new__(cls, 0 if not value else int(value))
| 29.75 | 67 | 0.663866 | 18 | 119 | 3.944444 | 0.722222 | 0.169014 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010638 | 0.210084 | 119 | 3 | 68 | 39.666667 | 0.744681 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 5 |
e68c487ab167f132477010cbcd3c04c89a4c1eb1 | 107 | py | Python | Code/Python2.7/Kattis/12aaah.py | nicholasz2510/General | e2783cad4da7f9b50c952c2b91ef311d22b1d56f | [
"MIT"
] | 1 | 2019-11-21T15:56:03.000Z | 2019-11-21T15:56:03.000Z | Code/Python2.7/Kattis/12aaah.py | nicholasz2510/General | e2783cad4da7f9b50c952c2b91ef311d22b1d56f | [
"MIT"
] | 12 | 2019-11-21T21:00:57.000Z | 2022-02-27T01:46:56.000Z | Code/Python2.7/Kattis/12aaah.py | nicholasz2510/General | e2783cad4da7f9b50c952c2b91ef311d22b1d56f | [
"MIT"
] | 1 | 2019-11-21T20:49:18.000Z | 2019-11-21T20:49:18.000Z | import sys
if len(sys.stdin.readline()) >= len(sys.stdin.readline()):
print "go"
else:
print "no"
| 15.285714 | 58 | 0.626168 | 16 | 107 | 4.1875 | 0.625 | 0.179104 | 0.328358 | 0.567164 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.186916 | 107 | 6 | 59 | 17.833333 | 0.770115 | 0 | 0 | 0 | 0 | 0 | 0.037383 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.2 | null | null | 0.4 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
e698166ca1561e62b71981010224c1a7e1d709fb | 127 | py | Python | soltravelhelper/__init__.py | lndr/soltravelhelper | 7551d248e16992d7d2fdbfda059c154a6a325731 | [
"BSD-3-Clause"
] | null | null | null | soltravelhelper/__init__.py | lndr/soltravelhelper | 7551d248e16992d7d2fdbfda059c154a6a325731 | [
"BSD-3-Clause"
] | null | null | null | soltravelhelper/__init__.py | lndr/soltravelhelper | 7551d248e16992d7d2fdbfda059c154a6a325731 | [
"BSD-3-Clause"
] | null | null | null | from .soltravelhelper import distance, time_constant_acceleration, time_constant_velocity, \
velocity_after_time, Traveler
| 42.333333 | 92 | 0.850394 | 14 | 127 | 7.285714 | 0.714286 | 0.235294 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.102362 | 127 | 2 | 93 | 63.5 | 0.894737 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 5 |
e69c1c0df4631dc53a710750ece2878f46c10557 | 697 | py | Python | juno/resources/routes/subscription_routes.py | leogregianin/juno-python | 0be2b70516b0dde713ff36cdb40888f06cc538f5 | [
"MIT"
] | 2 | 2022-03-25T21:08:46.000Z | 2022-03-31T21:10:17.000Z | juno/resources/routes/subscription_routes.py | leogregianin/juno-python | 0be2b70516b0dde713ff36cdb40888f06cc538f5 | [
"MIT"
] | null | null | null | juno/resources/routes/subscription_routes.py | leogregianin/juno-python | 0be2b70516b0dde713ff36cdb40888f06cc538f5 | [
"MIT"
] | null | null | null | from ..handler_request import get_resource_url
def get_base_url():
return f"{get_resource_url()}/subscriptions"
def get_specific_subscription_by_id_url(subscription_id):
return f"{get_base_url()}/{subscription_id}"
def get_deactivation_subscription_url(subscription_id):
return f"{get_base_url()}/{subscription_id}/deactivation"
def get_activation_subscription_url(subscription_id):
return f"{get_base_url()}/{subscription_id}/activation"
def get_cancelation_subscription_url(subscription_id):
return f"{get_base_url()}/{subscription_id}/cancelation"
def get_completion_subscription_url(subscription_id):
return f"{get_base_url()}/{subscription_id}/completion"
| 26.807692 | 61 | 0.799139 | 93 | 697 | 5.516129 | 0.215054 | 0.292398 | 0.331384 | 0.224172 | 0.561404 | 0.561404 | 0.561404 | 0.561404 | 0.561404 | 0.561404 | 0 | 0 | 0.091822 | 697 | 25 | 62 | 27.88 | 0.810427 | 0 | 0 | 0 | 0 | 0 | 0.360115 | 0.360115 | 0 | 0 | 0 | 0 | 0 | 1 | 0.461538 | false | 0 | 0.076923 | 0.461538 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 5 |
e69cdc8ddf205a3eb254f1587d9bee8209e1913d | 428 | py | Python | flarestack/cosmo/__init__.py | grburgess/flarestack | 6f94b9493d5470539e2705e473c84683720122cc | [
"MIT"
] | 1 | 2021-04-19T06:26:03.000Z | 2021-04-19T06:26:03.000Z | flarestack/cosmo/__init__.py | Raimer/flarestack | 60659d368db93ead7b53addf3af9f1e8ac3a52bc | [
"MIT"
] | null | null | null | flarestack/cosmo/__init__.py | Raimer/flarestack | 60659d368db93ead7b53addf3af9f1e8ac3a52bc | [
"MIT"
] | 1 | 2022-03-01T06:11:46.000Z | 2022-03-01T06:11:46.000Z | from flarestack.cosmo.icecube_diffuse_flux import get_diffuse_flux_at_100TeV, \
get_diffuse_flux_at_1GeV, get_diffuse_flux_contour, plot_diffuse_flux, contours, \
get_diffuse_flux
from flarestack.cosmo.rates import get_rate
from flarestack.cosmo.neutrino_cosmology import calculate_transient_cosmology, define_cosmology_functions, cumulative_z
from flarestack.cosmo.simulate_catalogue import simulate_transient_catalogue | 71.333333 | 119 | 0.885514 | 58 | 428 | 6.068966 | 0.448276 | 0.1875 | 0.215909 | 0.090909 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010101 | 0.074766 | 428 | 6 | 120 | 71.333333 | 0.878788 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
e6f2c2f6c5bb56ebde6229c531216e72f8f375c7 | 99 | py | Python | vformer/viz/__init__.py | aditya-agrawal-30502/vformer | e1f4950f980238442ff1dc39a8f0791e4fbc9dac | [
"MIT"
] | 90 | 2021-09-08T10:21:19.000Z | 2022-03-26T18:11:47.000Z | vformer/viz/__init__.py | aditya-agrawal-30502/vformer | e1f4950f980238442ff1dc39a8f0791e4fbc9dac | [
"MIT"
] | 72 | 2021-09-09T06:54:50.000Z | 2022-03-31T09:23:31.000Z | vformer/viz/__init__.py | aditya-agrawal-30502/vformer | e1f4950f980238442ff1dc39a8f0791e4fbc9dac | [
"MIT"
] | 21 | 2021-09-09T05:56:03.000Z | 2022-03-20T08:22:09.000Z | from .vit_grad_rollout import ViTAttentionGradRollout
from .vit_rollout import ViTAttentionRollout
| 33 | 53 | 0.89899 | 11 | 99 | 7.818182 | 0.636364 | 0.162791 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.080808 | 99 | 2 | 54 | 49.5 | 0.945055 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
fc113538840e590a17e230468d942459c6d73bb6 | 2,627 | py | Python | droidlab/experiments/migrations/0004_auto_20161015_1113.py | beakman/droidlab | 9be2174d95b80f7f797442f10b61e8d2e1f3a513 | [
"BSD-3-Clause"
] | null | null | null | droidlab/experiments/migrations/0004_auto_20161015_1113.py | beakman/droidlab | 9be2174d95b80f7f797442f10b61e8d2e1f3a513 | [
"BSD-3-Clause"
] | null | null | null | droidlab/experiments/migrations/0004_auto_20161015_1113.py | beakman/droidlab | 9be2174d95b80f7f797442f10b61e8d2e1f3a513 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-15 09:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('experiments', '0003_auto_20161013_2021'),
]
operations = [
migrations.AlterField(
model_name='experiment',
name='date',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='experiment',
name='name',
field=models.CharField(max_length=255),
),
migrations.AlterField(
model_name='result',
name='capture_id',
field=models.CharField(blank=True, max_length=250),
),
migrations.AlterField(
model_name='result',
name='comments',
field=models.CharField(blank=True, max_length=250),
),
migrations.AlterField(
model_name='result',
name='config',
field=models.CharField(blank=True, max_length=250),
),
migrations.AlterField(
model_name='result',
name='device',
field=models.CharField(blank=True, max_length=250),
),
migrations.AlterField(
model_name='result',
name='device_conf',
field=models.CharField(blank=True, max_length=250),
),
migrations.AlterField(
model_name='result',
name='imei',
field=models.CharField(blank=True, max_length=250),
),
migrations.AlterField(
model_name='result',
name='ip',
field=models.CharField(blank=True, max_length=250),
),
migrations.AlterField(
model_name='result',
name='operator',
field=models.CharField(blank=True, max_length=250),
),
migrations.AlterField(
model_name='result',
name='os_version',
field=models.CharField(blank=True, max_length=250),
),
migrations.AlterField(
model_name='result',
name='scenario',
field=models.CharField(blank=True, max_length=250),
),
migrations.AlterField(
model_name='result',
name='tech',
field=models.CharField(blank=True, max_length=250),
),
migrations.AlterField(
model_name='result',
name='timestamp',
field=models.CharField(blank=True, max_length=100),
),
]
| 30.546512 | 63 | 0.549296 | 247 | 2,627 | 5.680162 | 0.255061 | 0.199572 | 0.249465 | 0.28938 | 0.75196 | 0.75196 | 0.662865 | 0.63578 | 0.63578 | 0.63578 | 0 | 0.041049 | 0.332318 | 2,627 | 85 | 64 | 30.905882 | 0.758837 | 0.025885 | 0 | 0.679487 | 1 | 0 | 0.086072 | 0.008998 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.025641 | 0 | 0.064103 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
fc1306efa9012342894e207fdb872079ddfd2774 | 3,219 | py | Python | models/backbone.py | Simardeep27/pan-tensor | 56b4ea59c260fbeb357cf95af0c4fe45cc978ee0 | [
"MIT"
] | 1 | 2021-06-01T08:52:03.000Z | 2021-06-01T08:52:03.000Z | models/backbone.py | Simardeep27/pan-tensor | 56b4ea59c260fbeb357cf95af0c4fe45cc978ee0 | [
"MIT"
] | 3 | 2021-04-20T16:25:29.000Z | 2022-03-31T23:51:10.000Z | models/backbone.py | liuch37/pan-tensorflow | 1719d82fdedb5c7882699de193e01aa78c0d9f91 | [
"MIT"
] | null | null | null | '''
This code is to build backbone model by pretrained ResNet from ImageNet.
'''
import tensorflow as tf
__all__ = ['resnet50','resnet101']
class resnet50(tf.keras.Model):
def __init__(self, pretrained=False):
super(resnet50, self).__init__()
if pretrained:
self.model = tf.keras.applications.ResNet50(include_top=False, weights='imagenet')
else:
self.model = tf.keras.applications.ResNet50(include_top=False, weights=None)
# extract layer 1, 2, 3, 4
layer1_name = 'conv2_block3_out'
layer2_name = 'conv3_block4_out'
layer3_name = 'conv4_block6_out'
layer4_name = 'conv5_block3_out'
self.model1 = tf.keras.Model(inputs=self.model.input, outputs=self.model.get_layer(layer1_name).output)
self.model2 = tf.keras.Model(inputs=self.model.input, outputs=self.model.get_layer(layer2_name).output)
self.model3 = tf.keras.Model(inputs=self.model.input, outputs=self.model.get_layer(layer3_name).output)
self.model4 = tf.keras.Model(inputs=self.model.input, outputs=self.model.get_layer(layer4_name).output)
def call(self, x):
f = []
x1 = self.model1(x)
x2 = self.model2(x)
x3 = self.model3(x)
x4 = self.model4(x)
f.append(x1)
f.append(x2)
f.append(x3)
f.append(x4)
return f
class resnet101(tf.keras.Model):
def __init__(self, pretrained=False):
super(resnet101, self).__init__()
if pretrained:
self.model = tf.keras.applications.ResNet101(include_top=False, weights='imagenet')
else:
self.model = tf.keras.applications.ResNet101(include_top=False, weights=None)
# extract layer 1, 2, 3, 4
layer1_name = 'conv2_block3_out'
layer2_name = 'conv3_block4_out'
layer3_name = 'conv4_block23_out'
layer4_name = 'conv5_block3_out'
self.model1 = tf.keras.Model(inputs=self.model.input, outputs=self.model.get_layer(layer1_name).output)
self.model2 = tf.keras.Model(inputs=self.model.input, outputs=self.model.get_layer(layer2_name).output)
self.model3 = tf.keras.Model(inputs=self.model.input, outputs=self.model.get_layer(layer3_name).output)
self.model4 = tf.keras.Model(inputs=self.model.input, outputs=self.model.get_layer(layer4_name).output)
def call(self, x):
f = []
x1 = self.model1(x)
x2 = self.model2(x)
x3 = self.model3(x)
x4 = self.model4(x)
f.append(x1)
f.append(x2)
f.append(x3)
f.append(x4)
return f
# unit test
if __name__ == '__main__':
batch_size = 32
Height = 48
Width = 160
Channel = 3
tf.random.set_seed(0)
input_images = tf.random.uniform(shape=[batch_size,Height,Width,Channel])
model = resnet50(pretrained=False)
output_features = model(input_images)
print("Input size is:",input_images.shape)
print("Output feature map size is:", len(output_features))
for layer in range(len(output_features)):
print("Shape of layer {} is {}".format(layer, output_features[layer].shape)) | 39.256098 | 111 | 0.640572 | 428 | 3,219 | 4.626168 | 0.231308 | 0.090909 | 0.060606 | 0.072727 | 0.745455 | 0.745455 | 0.745455 | 0.745455 | 0.745455 | 0.681818 | 0 | 0.044417 | 0.237651 | 3,219 | 82 | 112 | 39.256098 | 0.762429 | 0.041317 | 0 | 0.626866 | 0 | 0 | 0.076048 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.059701 | false | 0 | 0.014925 | 0 | 0.134328 | 0.044776 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
fc4bb83bd00343cb4c5f85cb81e77b8df5dddb76 | 451 | py | Python | gan_lab/resnetgan/__init__.py | sidward14/gan-lab | 7a9828810cb5cc67c1e420a8698e44f8f2448528 | [
"MIT"
] | 12 | 2020-05-16T12:21:07.000Z | 2021-11-29T04:51:18.000Z | gan_lab/resnetgan/__init__.py | sidward14/gan-zoo | 7a9828810cb5cc67c1e420a8698e44f8f2448528 | [
"MIT"
] | 1 | 2020-06-07T20:40:55.000Z | 2020-06-16T04:19:19.000Z | gan_lab/resnetgan/__init__.py | sidward14/gan-zoo | 7a9828810cb5cc67c1e420a8698e44f8f2448528 | [
"MIT"
] | 1 | 2020-05-20T12:14:15.000Z | 2020-05-20T12:14:15.000Z | # -*- coding: UTF-8 -*-
"""ResNet GANs and non-progressive GANs in general.
"""
#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++#
import os; import sys
sys.path.append(
os.path.abspath( os.path.join( os.path.dirname( __file__ ), '..' ) )
)
from . import resblocks
from . import base
from . import architectures
from . import learner
#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++# | 25.055556 | 80 | 0.436807 | 40 | 451 | 4.825 | 0.6 | 0.207254 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.0025 | 0.113082 | 451 | 18 | 81 | 25.055556 | 0.48 | 0.503326 | 0 | 0 | 0 | 0 | 0.009346 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.625 | 0 | 0.625 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
fc5044545f8394b6b60265d0beab24c2289d0595 | 125 | py | Python | Foresite/upload_csv/admin.py | khoamb/Foresite | 97b155452d92fe1c487e7cbeffbc867604a1e726 | [
"MIT"
] | null | null | null | Foresite/upload_csv/admin.py | khoamb/Foresite | 97b155452d92fe1c487e7cbeffbc867604a1e726 | [
"MIT"
] | 6 | 2018-11-29T23:25:16.000Z | 2018-11-30T01:17:33.000Z | Foresite/upload_csv/admin.py | PricelessAntonio/Foresite | 4eec1ab5bf588b1ef6ec176a612bc62e8d55b424 | [
"MIT"
] | 3 | 2018-09-05T18:57:03.000Z | 2020-03-22T02:19:58.000Z | from django.contrib import admin
from .models import CsvUpload
# Register your models here.
admin.site.register(CsvUpload)
| 17.857143 | 32 | 0.808 | 17 | 125 | 5.941176 | 0.647059 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.128 | 125 | 6 | 33 | 20.833333 | 0.926606 | 0.208 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 5 |
fc58c2caadf9da78427b743a5b62500eba0cb561 | 8,111 | py | Python | tests/plugins/report/test_commands.py | maxipavlovic/connect-cli | 73989c076c6fb5b4562c61a351448b1c77556676 | [
"Apache-2.0"
] | 1 | 2022-02-16T10:12:46.000Z | 2022-02-16T10:12:46.000Z | tests/plugins/report/test_commands.py | maxipavlovic/connect-cli | 73989c076c6fb5b4562c61a351448b1c77556676 | [
"Apache-2.0"
] | null | null | null | tests/plugins/report/test_commands.py | maxipavlovic/connect-cli | 73989c076c6fb5b4562c61a351448b1c77556676 | [
"Apache-2.0"
] | null | null | null | import os
from click.testing import CliRunner
from openpyxl import load_workbook
from connect.cli.core.config import Config
def test_not_valid_report_dir(fs, ccli):
config = Config()
config.load(fs.root_path)
config.add_account(
'VA-000',
'Account 1',
'ApiKey XXXX:YYYY',
endpoint='https://localhost/public/v1',
)
config.activate('VA-000')
config.store()
os.mkdir(f'{fs.root_path}/tmp2')
runner = CliRunner()
result = runner.invoke(
ccli,
[
'-c',
fs.root_path,
'report',
'list',
'-d',
f'{fs.root_path}/tmp2',
],
)
assert result.exit_code == 1
assert f"The directory `{fs.root_path}/tmp2` is not a reports project root directory." in result.output
def test_no_reports(fs, ccli):
config = Config()
config.load(fs.root_path)
config.add_account(
'VA-000',
'Account 1',
'ApiKey XXXX:YYYY',
endpoint='https://localhost/public/v1',
)
config.activate('VA-000')
config.store()
runner = CliRunner()
result = runner.invoke(
ccli,
[
'-c',
fs.root_path,
'report',
'list',
'-d',
'./tests/fixtures/reports/no_reports',
],
)
assert result.exit_code == 1
assert 'Invalid `reports.json`: [] is too short' in result.output
def test_report_client_exception(fs, ccli):
config = Config()
config.load(fs.root_path)
config.add_account(
'VA-000',
'Account 1',
'ApiKey XXXX:YYYY',
endpoint='https://localhost/public/v1',
)
config.activate('VA-000')
config.store()
runner = CliRunner()
result = runner.invoke(
ccli,
[
'-c',
fs.root_path,
'report',
'execute',
'entrypoint',
'-d',
'./tests/fixtures/reports/connect_exception',
],
)
assert result.exit_code == 1
assert "Error returned by Connect when executing the report" in result.output
def test_report_generic_exception(fs, ccli):
config = Config()
config.load(fs.root_path)
config.add_account(
'VA-000',
'Account 1',
'ApiKey XXXX:YYYY',
endpoint='https://localhost/public/v1',
)
config.activate('VA-000')
config.store()
runner = CliRunner()
result = runner.invoke(
ccli,
[
'-c',
fs.root_path,
'report',
'execute',
'entrypoint',
'-d',
'./tests/fixtures/reports/generic_exception',
],
)
assert result.exit_code == 1
assert "Unexpected error while executing the report" in result.output
def test_report_custom_exception(fs, ccli):
config = Config()
config.load(fs.root_path)
config.add_account(
'VA-000',
'Account 1',
'ApiKey XXXX:YYYY',
endpoint='https://localhost/public/v1',
)
config.activate('VA-000')
config.store()
runner = CliRunner()
result = runner.invoke(
ccli,
[
'-c',
fs.root_path,
'report',
'execute',
'entrypoint',
'-d',
'./tests/fixtures/reports/custom_exception',
],
)
assert result.exit_code == 1
assert "Custom error" in result.output
def test_input_parameters(mocker, fs, ccli):
config = Config()
config.load(fs.root_path)
config.add_account(
'VA-000',
'Account 1',
'ApiKey XXXX:YYYY',
endpoint='https://localhost/public/v1',
)
config.activate('VA-000')
config.store()
runner = CliRunner()
mocker.patch(
'connect.cli.plugins.report.wizard.dialogus',
side_effect=[
{
'status': 'Active',
},
{
'date': {
'from': '2021-01-01',
'to': '2021-02-01',
},
},
],
)
result = runner.invoke(
ccli,
[
'-c',
fs.root_path,
'report',
'execute',
'entrypoint',
'-d',
'./tests/fixtures/reports/report_with_inputs',
'-o',
f'{fs.root_path}/report.xlsx',
],
)
assert result.exit_code == 0
assert "100%" in result.output
def test_basic_report(fs, ccli):
config = Config()
config.load(fs.root_path)
config.add_account(
'VA-000',
'Account 1',
'ApiKey XXXX:YYYY',
endpoint='https://localhost/public/v1',
)
config.activate('VA-000')
config.store()
runner = CliRunner()
result = runner.invoke(
ccli,
[
'-c',
fs.root_path,
'report',
'list',
'-d',
'./tests/fixtures/reports/basic_report',
],
)
assert "Connect Reports Fixture version 1.0.0" in result.output
def test_basic_report_2(fs, ccli):
config = Config()
config.load(fs.root_path)
config.add_account(
'VA-000',
'Account 1',
'ApiKey XXXX:YYYY',
endpoint='https://localhost/public/v1',
)
config.activate('VA-000')
config.store()
runner = CliRunner()
result = runner.invoke(
ccli,
[
'-c',
fs.root_path,
'report',
'info',
'entrypoint',
'-d',
'./tests/fixtures/reports/basic_report',
],
)
assert result.exit_code == 0
assert "Basic report info" in result.output
def test_basic_report_3(fs, ccli):
config = Config()
config.load(fs.root_path)
config.add_account(
'VA-000',
'Account 1',
'ApiKey XXXX:YYYY',
endpoint='https://localhost/public/v1',
)
config.activate('VA-000')
config.store()
runner = CliRunner()
result = runner.invoke(
ccli,
[
'-c',
fs.root_path,
'report',
'execute',
'invalid',
'-d',
'./tests/fixtures/reports/basic_report',
],
)
assert result.exit_code == 1
assert 'The report `invalid` does not exist.' in result.output
def test_basic_report_4(fs, ccli):
config = Config()
config.load(fs.root_path)
config.add_account(
'VA-000',
'Account 1',
'ApiKey XXXX:YYYY',
endpoint='https://localhost/public/v1',
)
config.activate('VA-000')
config.store()
runner = CliRunner()
os.mkdir(f'{fs.root_path}/report')
result = runner.invoke(
ccli,
[
'-c',
fs.root_path,
'report',
'execute',
'entrypoint',
'-d',
'./tests/fixtures/reports/basic_report',
'-o'
f'{fs.root_path}/report/report',
],
)
assert result.exit_code == 0
assert "Processing report test report" in result.output
wb = load_workbook(f'{fs.root_path}/report/report.xlsx')
assert wb['Data']['A1'].value == 'Row'
assert wb['Data']['A2'].value == 1
assert wb['Data']['A3'].value == 2
assert wb['Data']['A4'].value is None
def test_basic_report_5(fs, ccli):
config = Config()
config.load(fs.root_path)
config.add_account(
'VA-000',
'Account 1',
'ApiKey XXXX:YYYY',
endpoint='https://localhost/public/v1',
)
config.activate('VA-000')
config.store()
runner = CliRunner()
result = runner.invoke(
ccli,
[
'-c',
fs.root_path,
'report',
'info',
'entrypoint_wrong',
'-d',
'./tests/fixtures/reports/basic_report',
],
)
assert result.exit_code == 1
assert 'Error: The report `entrypoint_wrong` does not exist.' in result.output
| 22.783708 | 107 | 0.51313 | 854 | 8,111 | 4.759953 | 0.141686 | 0.042804 | 0.071341 | 0.059041 | 0.820172 | 0.80369 | 0.744403 | 0.667159 | 0.667159 | 0.645018 | 0 | 0.025194 | 0.349155 | 8,111 | 355 | 108 | 22.847887 | 0.744838 | 0 | 0 | 0.707396 | 0 | 0 | 0.247072 | 0.068919 | 0 | 0 | 0 | 0 | 0.080386 | 1 | 0.03537 | false | 0 | 0.012862 | 0 | 0.048232 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
fc6429d8257906ad8269cee061539a0805e58502 | 5,934 | py | Python | neuro-sdk/tests/test_file_filter.py | neuro-inc/platform-client-python | 012e355249ea900b76f9ce4209fb9d029652f9b2 | [
"Apache-2.0"
] | 11 | 2020-10-11T15:38:11.000Z | 2021-11-09T11:29:50.000Z | neuro-sdk/tests/test_file_filter.py | neuro-inc/platform-client-python | 012e355249ea900b76f9ce4209fb9d029652f9b2 | [
"Apache-2.0"
] | 611 | 2020-09-30T21:27:52.000Z | 2022-01-10T10:44:44.000Z | neuro-sdk/tests/test_file_filter.py | neuro-inc/platform-client-python | 012e355249ea900b76f9ce4209fb9d029652f9b2 | [
"Apache-2.0"
] | 1 | 2020-10-05T15:10:24.000Z | 2020-10-05T15:10:24.000Z | import codecs
from neuro_sdk.file_filter import FileFilter, translate
async def test_empty_filter() -> None:
ff = FileFilter()
assert await ff.match("spam")
assert await ff.match(".spam")
assert await ff.match("spam/ham")
async def test_exclude_all() -> None:
ff = FileFilter()
ff.exclude("*")
assert not await ff.match("spam")
assert not await ff.match(".spam")
async def test_exclude() -> None:
ff = FileFilter()
ff.exclude("*.txt")
assert await ff.match("spam")
assert not await ff.match("spam.txt")
assert not await ff.match(".txt")
assert not await ff.match("dir/spam.txt")
assert not await ff.match("dir/.txt")
assert await ff.match("dir.txt/spam")
assert await ff.match("dir/child.txt/spam")
async def test_exclude_include() -> None:
ff = FileFilter()
ff.exclude("*.txt")
ff.include("s*")
assert await ff.match("spam")
assert await ff.match("spam.txt")
assert await ff.match("ham")
assert not await ff.match("ham.txt")
assert not await ff.match(".txt")
assert await ff.match("dir/spam.txt")
assert not await ff.match("dir/ham.txt")
assert await ff.match("dir.txt/spam")
assert not await ff.match("dir/.txt")
async def test_exclude_with_slash() -> None:
ff = FileFilter()
ff.exclude("dir/*.txt")
assert await ff.match("spam.txt")
assert not await ff.match("dir/spam.txt")
assert not await ff.match("dir/spam.txt/")
assert not await ff.match("dir/.txt")
assert await ff.match("parent/dir/spam.txt")
async def test_exclude_with_leading_slash() -> None:
ff = FileFilter()
ff.exclude("/spam")
assert not await ff.match("spam")
assert not await ff.match("spam/")
assert await ff.match("ham")
assert await ff.match("dir/spam")
async def test_exclude_with_trailing_slash() -> None:
ff = FileFilter()
ff.exclude("spam/")
assert await ff.match("spam")
assert not await ff.match("spam/")
async def test_exclude_crosscomponent() -> None:
ff = FileFilter()
ff.exclude("a?b")
assert not await ff.match("a-b")
assert await ff.match("a/b")
ff = FileFilter()
ff.exclude("a*b")
assert not await ff.match("ab")
assert not await ff.match("a-b")
assert not await ff.match("arab")
assert await ff.match("a/b")
assert await ff.match("alice/bob")
ff = FileFilter()
ff.exclude("a[!0-9]b")
assert await ff.match("a0b")
assert not await ff.match("a-b")
assert await ff.match("a/b")
async def test_exclude_recursive() -> None:
ff = FileFilter()
ff.exclude("**/dir/*.txt")
assert await ff.match("spam.txt")
assert not await ff.match("dir/spam.txt")
assert await ff.match("dir/spam")
assert not await ff.match("parent/dir/spam.txt")
assert await ff.match("parent/dir/spam")
ff = FileFilter()
ff.exclude("dir/**/*.txt")
assert await ff.match("spam.txt")
assert not await ff.match("dir/spam.txt")
assert await ff.match("dir/spam")
assert not await ff.match("dir/child/spam.txt")
assert await ff.match("dir/child/spam")
ff = FileFilter()
ff.exclude("dir/**")
assert await ff.match("spam")
assert not await ff.match("dir/")
assert await ff.match("dir")
assert not await ff.match("dir/child")
assert not await ff.match("dir/child/")
assert not await ff.match("dir/child/spam")
ff = FileFilter()
ff.exclude("dir/**/")
assert await ff.match("spam")
assert not await ff.match("dir/")
assert await ff.match("dir")
assert not await ff.match("dir/child/")
assert await ff.match("dir/child")
assert not await ff.match("dir/child/")
assert not await ff.match("dir/child/spam/")
assert await ff.match("dir/child/spam")
async def test_exclude_include_with_prefix() -> None:
ff = FileFilter()
ff.exclude("*.txt", "parent/")
ff.include("s*", "parent/child/")
assert await ff.match("spam.txt")
assert await ff.match("ham.txt")
assert not await ff.match("parent/spam.txt")
assert not await ff.match("parent/ham.txt")
assert await ff.match("other/spam.txt")
assert await ff.match("other/ham.txt")
assert await ff.match("parent/child/spam.txt")
assert not await ff.match("parent/child/ham.txt")
def test_translate() -> None:
assert translate("") == "/?"
assert translate("abc") == r"abc/?"
assert translate("/abc") == r"/abc/?"
assert translate("abc/") == r"abc/"
assert translate("abc/de") == r"abc/de/?"
assert translate("a?c") == r"a[^/]c/?"
assert translate("a*c") == r"a[^/]*c/?"
assert translate("a[bc]d") == r"a[bc](?<!/)d/?"
assert translate("a[b-d]e") == r"a[b-d](?<!/)e/?"
assert translate("a[!b-d]e") == r"a[^b-d](?<!/)e/?"
assert translate("[a-zA-Z_]") == r"[a-zA-Z_](?<!/)/?"
assert translate(r"\?") == r"\?/?"
def test_translate_recursive() -> None:
assert translate("**") == r".*"
assert translate("**/") == r"(?:.+/)?"
assert translate("**/abc") == r"(?:.+/)?abc/?"
assert translate("/**") == r"/.*"
assert translate("abc/**") == r"abc/.*"
assert translate("/**/") == r"/(?:.+/)?"
assert translate("abc/**/def") == r"abc/(?:.+/)?def/?"
async def test_read_from_buffer() -> None:
ff = FileFilter()
ff.read_from_buffer(
codecs.BOM_UTF8 + b"*.txt \r\n" # CRLF and trailing spaces
b"\n" # empty line
b"# comment\n" # comment
b"!s*", # negation
prefix="base/",
)
assert len(ff.filters) == 2
assert await ff.match("base/spam.txt")
assert not await ff.match("base/ham.txt")
assert await ff.match("ham.txt")
async def test_parent_ignore_file() -> None:
ff = FileFilter()
ff.exclude("dir/s*", "", "dir/")
ff.exclude("/*.txt", "", "dir/")
assert not await ff.match("spam.txt")
assert await ff.match("ham.txt")
assert not await ff.match("spam")
| 30.587629 | 68 | 0.613414 | 871 | 5,934 | 4.130884 | 0.081516 | 0.155642 | 0.266815 | 0.205114 | 0.833519 | 0.798221 | 0.697888 | 0.650083 | 0.577821 | 0.554197 | 0 | 0.001047 | 0.195484 | 5,934 | 193 | 69 | 30.746114 | 0.752618 | 0.008763 | 0 | 0.449367 | 0 | 0 | 0.18734 | 0.003573 | 0 | 0 | 0 | 0 | 0.632911 | 1 | 0.012658 | false | 0 | 0.012658 | 0 | 0.025316 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.