code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
# Code generated by `typeddictgen`. DO NOT EDIT.
"""V2beta2MetricStatusDict generated type."""
from typing import TypedDict
from kubernetes_typed.client import V2beta2ExternalMetricStatusDict, V2beta2ObjectMetricStatusDict, V2beta2PodsMetricStatusDict, V2beta2ResourceMetricStatusDict
V2beta2MetricStatusDict = TypedDict(
"V2beta2MetricStatusDict",
{
"external": V2beta2ExternalMetricStatusDict,
"object": V2beta2ObjectMetricStatusDict,
"pods": V2beta2PodsMetricStatusDict,
"resource": V2beta2ResourceMetricStatusDict,
"type": str,
},
total=False,
)
| [
"typing.TypedDict"
] | [((313, 557), 'typing.TypedDict', 'TypedDict', (['"""V2beta2MetricStatusDict"""', "{'external': V2beta2ExternalMetricStatusDict, 'object':\n V2beta2ObjectMetricStatusDict, 'pods': V2beta2PodsMetricStatusDict,\n 'resource': V2beta2ResourceMetricStatusDict, 'type': str}"], {'total': '(False)'}), "('V2beta2MetricStatusDict', {'external':\n V2beta2ExternalMetricStatusDict, 'object':\n V2beta2ObjectMetricStatusDict, 'pods': V2beta2PodsMetricStatusDict,\n 'resource': V2beta2ResourceMetricStatusDict, 'type': str}, total=False)\n", (322, 557), False, 'from typing import TypedDict\n')] |
#!/usr/bin/env python
# Simulate the /robot_driver so that interfaces to it can operate the same on
# the real robot and in simulation
from __future__ import print_function, division
from threading import Lock
import rospy
import diagnostic_updater
from diagnostic_msgs.msg import DiagnosticStatus
from fetch_driver_msgs.msg import (RobotState, ChargerState, GripperState,
JointState as FetchJointState)
from power_msgs.msg import BreakerState
from sensor_msgs.msg import JointState
from power_msgs.srv import BreakerCommand, BreakerCommandResponse
from std_srvs.srv import Trigger, TriggerResponse
# Helper function to produce diagnostic updaters
def produce_breaker_diagnostic_func(breaker):
def diagnostic_func(stat):
stat.summary(
DiagnosticStatus.OK if breaker.state == BreakerState.STATE_ENABLED else DiagnosticStatus.ERROR,
"Enabled" if breaker.state == BreakerState.STATE_ENABLED else "Disabled"
)
return stat
return diagnostic_func
# This is the class that acts as the stub to the robot driver
class SimulatedRobotDriver(object):
"""
In simulation, implement the minimum amount of logic necessary to correctly
spoof the behaviour of the robot driver
"""
BATTERY_FULL_VOLTAGE = 25
BATTERY_LOW_VOLTAGE = 19.9
BATTERY_FULL_CAPACITY = 133400
BATTERY_LOW_CAPACITY = 6000
BATTERY_CAPACITY_DECAY = 10 # Amount of capacity to lose per second
GRIPPER_JOINT_NAME = 'l_gripper_finger_joint'
def __init__(self):
# Internal parameters for the functions of this driver
self._publish_rate = 50 # Hz rate.
# The state of the arm, gripper, and base breakers
self._arm_breaker_state = BreakerState(
name="arm_breaker",
state=BreakerState.STATE_ENABLED
)
self._base_breaker_state = BreakerState(
name="base_breaker",
state=BreakerState.STATE_ENABLED
)
self._gripper_breaker_state = BreakerState(
name="gripper_breaker",
state=BreakerState.STATE_ENABLED
)
# The cached state of the robot
self._robot_state = RobotState(
ready=True,
breakers=[self._arm_breaker_state, self._base_breaker_state, self._gripper_breaker_state],
charger=ChargerState(
state=0, # Unknown what this actually is
charging_mode=2, # "Not Charging" according to the comments
battery_voltage=SimulatedRobotDriver.BATTERY_FULL_VOLTAGE,
battery_capacity=SimulatedRobotDriver.BATTERY_FULL_CAPACITY
)
)
self._robot_state_lock = Lock()
# The cached state of the gripper
self._gripper_state = GripperState(ready=True)
self._gripper_state.joints.append(FetchJointState(
name="gripper_joint",
control_mode=3, # Based on values on the robot
position=0.05, # Default start position of open
))
self._gripper_state_lock = Lock()
# Create the diagnostic updater
self._updater = diagnostic_updater.Updater()
self._updater.setHardwareID("none")
self._updater.add("arm_breaker", produce_breaker_diagnostic_func(self._arm_breaker_state))
self._updater.add("base_breaker", produce_breaker_diagnostic_func(self._base_breaker_state))
self._updater.add("gripper_breaker", produce_breaker_diagnostic_func(self._gripper_breaker_state))
# Publishers
self._robot_state_publisher = rospy.Publisher('/robot_state', RobotState, queue_size=1)
self._gripper_state_publisher = rospy.Publisher('/gripper_state', GripperState, queue_size=1)
# Subscribers
self._joint_state_sub = rospy.Subscriber('/joint_states', JointState, self._on_joint_state)
# The services to set and reset the breakers
self._arm_breaker_service = rospy.Service("/arm_breaker", BreakerCommand, self.set_arm_breaker)
self._base_breaker_service = rospy.Service("/base_breaker", BreakerCommand, self.set_base_breaker)
self._gripper_breaker_service = rospy.Service("/gripper_breaker", BreakerCommand, self.set_gripper_breaker)
# Simulation service to put the battery into low mode or not
self._battery_low_service = rospy.Service(
"~battery_to_low",
Trigger,
self._on_battery_to_level(
SimulatedRobotDriver.BATTERY_LOW_VOLTAGE, SimulatedRobotDriver.BATTERY_LOW_CAPACITY
)
)
self._battery_nominal_service = rospy.Service(
"~battery_to_nominal",
Trigger,
self._on_battery_to_level(
SimulatedRobotDriver.BATTERY_FULL_VOLTAGE, SimulatedRobotDriver.BATTERY_FULL_CAPACITY
)
)
def _on_joint_state(self, msg):
try:
idx = msg.name.index(SimulatedRobotDriver.GRIPPER_JOINT_NAME)
with self._gripper_state_lock:
self._gripper_state.joints[0].position = msg.position[idx]
self._gripper_state.joints[0].velocity = msg.velocity[idx]
self._gripper_state.joints[0].effort = msg.effort[idx]
except ValueError as e:
pass
def _on_battery_to_level(self, battery_voltage, battery_capacity):
def service_responder(req):
with self._robot_state_lock:
self._robot_state.charger.battery_voltage = battery_voltage
self._robot_state.charger.battery_capacity = battery_capacity
return TriggerResponse(success=True)
return service_responder
def _calculate_robot_state(self):
# Make sure to acquire the lock to the robot state before calling this
# function
self._robot_state.faulted = (
self._arm_breaker_state.state == BreakerState.STATE_DISABLED
or self._base_breaker_state.state == BreakerState.STATE_DISABLED
or self._gripper_breaker_state.state == BreakerState.STATE_DISABLED
)
def set_arm_breaker(self, req):
with self._robot_state_lock:
self._arm_breaker_state.state = BreakerState.STATE_ENABLED if req.enable else BreakerState.STATE_DISABLED
self._calculate_robot_state()
return BreakerCommandResponse(self._arm_breaker_state)
def set_base_breaker(self, req):
with self._robot_state_lock:
self._base_breaker_state.state = BreakerState.STATE_ENABLED if req.enable else BreakerState.STATE_DISABLED
self._calculate_robot_state()
return BreakerCommandResponse(self._base_breaker_state)
def set_gripper_breaker(self, req):
with self._gripper_state_lock:
self._gripper_state.ready = req.enable
self._gripper_state.faulted = not req.enable
with self._robot_state_lock:
self._gripper_breaker_state.state = BreakerState.STATE_ENABLED if req.enable else BreakerState.STATE_DISABLED
self._calculate_robot_state()
return BreakerCommandResponse(self._gripper_breaker_state)
def spin(self):
rate = rospy.Rate(self._publish_rate)
post = rospy.Time.now()
rate.sleep()
while not rospy.is_shutdown():
pre = rospy.Time.now()
with self._robot_state_lock:
self._robot_state.header.stamp = pre
self._robot_state.header.seq += 1
self._robot_state.charger.battery_capacity -= (
SimulatedRobotDriver.BATTERY_CAPACITY_DECAY * (pre - post).to_sec()
)
self._robot_state_publisher.publish(self._robot_state)
with self._gripper_state_lock:
self._gripper_state.header.stamp = pre
self._gripper_state.header.seq += 1
self._gripper_state_publisher.publish(self._gripper_state)
self._updater.update()
post = rospy.Time.now()
rate.sleep()
if __name__ == '__main__':
rospy.init_node('robot_driver')
driver = SimulatedRobotDriver()
driver.spin()
| [
"fetch_driver_msgs.msg.ChargerState",
"rospy.Subscriber",
"rospy.is_shutdown",
"rospy.init_node",
"threading.Lock",
"rospy.Service",
"diagnostic_updater.Updater",
"rospy.Time.now",
"power_msgs.msg.BreakerState",
"fetch_driver_msgs.msg.JointState",
"rospy.Rate",
"rospy.Publisher",
"std_srvs.s... | [((8125, 8156), 'rospy.init_node', 'rospy.init_node', (['"""robot_driver"""'], {}), "('robot_driver')\n", (8140, 8156), False, 'import rospy\n'), ((1757, 1823), 'power_msgs.msg.BreakerState', 'BreakerState', ([], {'name': '"""arm_breaker"""', 'state': 'BreakerState.STATE_ENABLED'}), "(name='arm_breaker', state=BreakerState.STATE_ENABLED)\n", (1769, 1823), False, 'from power_msgs.msg import BreakerState\n'), ((1893, 1960), 'power_msgs.msg.BreakerState', 'BreakerState', ([], {'name': '"""base_breaker"""', 'state': 'BreakerState.STATE_ENABLED'}), "(name='base_breaker', state=BreakerState.STATE_ENABLED)\n", (1905, 1960), False, 'from power_msgs.msg import BreakerState\n'), ((2033, 2103), 'power_msgs.msg.BreakerState', 'BreakerState', ([], {'name': '"""gripper_breaker"""', 'state': 'BreakerState.STATE_ENABLED'}), "(name='gripper_breaker', state=BreakerState.STATE_ENABLED)\n", (2045, 2103), False, 'from power_msgs.msg import BreakerState\n'), ((2731, 2737), 'threading.Lock', 'Lock', ([], {}), '()\n', (2735, 2737), False, 'from threading import Lock\n'), ((2811, 2835), 'fetch_driver_msgs.msg.GripperState', 'GripperState', ([], {'ready': '(True)'}), '(ready=True)\n', (2823, 2835), False, 'from fetch_driver_msgs.msg import RobotState, ChargerState, GripperState, JointState as FetchJointState\n'), ((3097, 3103), 'threading.Lock', 'Lock', ([], {}), '()\n', (3101, 3103), False, 'from threading import Lock\n'), ((3169, 3197), 'diagnostic_updater.Updater', 'diagnostic_updater.Updater', ([], {}), '()\n', (3195, 3197), False, 'import diagnostic_updater\n'), ((3609, 3666), 'rospy.Publisher', 'rospy.Publisher', (['"""/robot_state"""', 'RobotState'], {'queue_size': '(1)'}), "('/robot_state', RobotState, queue_size=1)\n", (3624, 3666), False, 'import rospy\n'), ((3707, 3768), 'rospy.Publisher', 'rospy.Publisher', (['"""/gripper_state"""', 'GripperState'], {'queue_size': '(1)'}), "('/gripper_state', GripperState, queue_size=1)\n", (3722, 3768), False, 'import rospy\n'), ((3824, 3891), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/joint_states"""', 'JointState', 'self._on_joint_state'], {}), "('/joint_states', JointState, self._on_joint_state)\n", (3840, 3891), False, 'import rospy\n'), ((3982, 4049), 'rospy.Service', 'rospy.Service', (['"""/arm_breaker"""', 'BreakerCommand', 'self.set_arm_breaker'], {}), "('/arm_breaker', BreakerCommand, self.set_arm_breaker)\n", (3995, 4049), False, 'import rospy\n'), ((4087, 4156), 'rospy.Service', 'rospy.Service', (['"""/base_breaker"""', 'BreakerCommand', 'self.set_base_breaker'], {}), "('/base_breaker', BreakerCommand, self.set_base_breaker)\n", (4100, 4156), False, 'import rospy\n'), ((4197, 4272), 'rospy.Service', 'rospy.Service', (['"""/gripper_breaker"""', 'BreakerCommand', 'self.set_gripper_breaker'], {}), "('/gripper_breaker', BreakerCommand, self.set_gripper_breaker)\n", (4210, 4272), False, 'import rospy\n'), ((7224, 7254), 'rospy.Rate', 'rospy.Rate', (['self._publish_rate'], {}), '(self._publish_rate)\n', (7234, 7254), False, 'import rospy\n'), ((7270, 7286), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (7284, 7286), False, 'import rospy\n'), ((2878, 2946), 'fetch_driver_msgs.msg.JointState', 'FetchJointState', ([], {'name': '"""gripper_joint"""', 'control_mode': '(3)', 'position': '(0.05)'}), "(name='gripper_joint', control_mode=3, position=0.05)\n", (2893, 2946), True, 'from fetch_driver_msgs.msg import RobotState, ChargerState, GripperState, JointState as FetchJointState\n'), ((5644, 5673), 'std_srvs.srv.TriggerResponse', 'TriggerResponse', ([], {'success': '(True)'}), '(success=True)\n', (5659, 5673), False, 'from std_srvs.srv import Trigger, TriggerResponse\n'), ((6375, 6422), 'power_msgs.srv.BreakerCommandResponse', 'BreakerCommandResponse', (['self._arm_breaker_state'], {}), '(self._arm_breaker_state)\n', (6397, 6422), False, 'from power_msgs.srv import BreakerCommand, BreakerCommandResponse\n'), ((6678, 6726), 'power_msgs.srv.BreakerCommandResponse', 'BreakerCommandResponse', (['self._base_breaker_state'], {}), '(self._base_breaker_state)\n', (6700, 6726), False, 'from power_msgs.srv import BreakerCommand, BreakerCommandResponse\n'), ((7136, 7187), 'power_msgs.srv.BreakerCommandResponse', 'BreakerCommandResponse', (['self._gripper_breaker_state'], {}), '(self._gripper_breaker_state)\n', (7158, 7187), False, 'from power_msgs.srv import BreakerCommand, BreakerCommandResponse\n'), ((7327, 7346), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (7344, 7346), False, 'import rospy\n'), ((7366, 7382), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (7380, 7382), False, 'import rospy\n'), ((8050, 8066), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (8064, 8066), False, 'import rospy\n'), ((2366, 2534), 'fetch_driver_msgs.msg.ChargerState', 'ChargerState', ([], {'state': '(0)', 'charging_mode': '(2)', 'battery_voltage': 'SimulatedRobotDriver.BATTERY_FULL_VOLTAGE', 'battery_capacity': 'SimulatedRobotDriver.BATTERY_FULL_CAPACITY'}), '(state=0, charging_mode=2, battery_voltage=SimulatedRobotDriver\n .BATTERY_FULL_VOLTAGE, battery_capacity=SimulatedRobotDriver.\n BATTERY_FULL_CAPACITY)\n', (2378, 2534), False, 'from fetch_driver_msgs.msg import RobotState, ChargerState, GripperState, JointState as FetchJointState\n')] |
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import sys
from pathlib import Path
import sphinx_rtd_theme # noqa: F401
from sphinx.ext import apidoc
from awsstepfuncs import __version__
current_dir = Path(__file__).parent.absolute()
base_dir = current_dir.parents[1]
code_dir = base_dir / "src" / "awsstepfuncs"
sys.path.insert(0, str(code_dir))
readme_dest = current_dir / "README.md"
readme_src = base_dir / "README.md"
if readme_dest.exists():
readme_dest.unlink()
readme_dest.symlink_to(readme_src)
# -- Project information -----------------------------------------------------
project = "awsstepfuncs"
author = "<NAME>"
copyright = "<NAME>" # noqa: A001
# The full version, including alpha/beta/rc tags
release = __version__
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"recommonmark",
"sphinx_markdown_tables",
"sphinx_rtd_theme",
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
]
autodoc_typehints = "description"
# recommonmark extension allows mixed filetypes
source_suffix = [".rst", ".md"]
# Add any paths that contain templates here, relative to this directory.
# templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "sphinx_rtd_theme"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ["_static"]
def run_apidoc(_):
exclude = []
argv = [
"--doc-project",
"Code Reference",
"-M",
"-f",
"-d",
"3",
"--tocfile",
"index",
"-o",
str(current_dir / "_code_reference"),
str(code_dir),
] + exclude
apidoc.main(argv)
def setup(app):
app.connect("builder-inited", run_apidoc)
| [
"sphinx.ext.apidoc.main",
"pathlib.Path"
] | [((2908, 2925), 'sphinx.ext.apidoc.main', 'apidoc.main', (['argv'], {}), '(argv)\n', (2919, 2925), False, 'from sphinx.ext import apidoc\n'), ((711, 725), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (715, 725), False, 'from pathlib import Path\n')] |
from random import randint
from time import sleep
print ('=' * 18)
print ('\033[1mSTONE PAPER AND SCISSORS\033[m')
print ('=' * 18)
print ('I already chose mine now missing you')
sleep (1)
computer = randint (1, 3)
player = int(input('\033[1mChoose between\033[m \033[1;33m1) Stone 2) Paper and 3) Scissors\033[m : '))
sleep(1)
if (player < computer):
print ('\033[1;33mThought about {}\033[m'.format(computer))
print ('\033[1;32mI WIN HEHE :D\033[m')
elif (player == computer):
print ('\033[1;33mThought about {}\033[m'.format(computer))
print ('\033[1;31mTIE, LET´S GO AGAIN\033[m')
else:
print ('\033[1;33mThought about {}\033[m'.format(computer))
print ('\033[1;36mYOUR LOST HAHAHA:D\033[m ')
| [
"random.randint",
"time.sleep"
] | [((180, 188), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (185, 188), False, 'from time import sleep\n'), ((201, 214), 'random.randint', 'randint', (['(1)', '(3)'], {}), '(1, 3)\n', (208, 214), False, 'from random import randint\n'), ((321, 329), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (326, 329), False, 'from time import sleep\n')] |
# @l2g 1673 python3
# [1673] Find the Most Competitive Subsequence
# Difficulty: Medium
# https://leetcode.com/problems/find-the-most-competitive-subsequence
#
# Given an integer array nums and a positive integer k,
# return the most competitive subsequence of nums of size k.
# An array's subsequence is a resulting sequence obtained by erasing some (possibly zero) elements from the array.
# We define that a subsequence a is more competitive than a subsequence b (of the same length) if in the first position where a and b differ,
# subsequence a has a number less than the corresponding number in b.For example,[1,3,
# 4] is more competitive than [1,3,5] because the first position they differ is at the final number,
# and 4 is less than 5.
#
# Example 1:
#
# Input: nums = [3,5,2,6], k = 2
# Output: [2,6]
# Explanation: Among the set of every possible subsequence: {[3,5],[3,2],[3,6],[5,2],[5,6],[2,6]},[2,
# 6] is the most competitive.
#
# Example 2:
#
# Input: nums = [2,4,3,3,5,4,9,6], k = 4
# Output: [2,3,3,4]
#
#
# Constraints:
#
# 1 <= nums.length <= 10^5
# 0 <= nums[i] <= 10^9
# 1 <= k <= nums.length
#
#
from typing import List
class Solution:
def mostCompetitive(self, nums: List[int], k: int) -> List[int]:
ret = nums[:k]
n = len(nums)
j = 1
for i, num in enumerate(nums[1:], 1):
while j > 0 and num < ret[j - 1] and ((n - i) >= (k - j + 1)):
j -= 1
if j < k:
ret[j] = num
j += 1
return ret
if __name__ == "__main__":
import os
import pytest
pytest.main([os.path.join("tests", "test_1673.py")])
| [
"os.path.join"
] | [((1609, 1646), 'os.path.join', 'os.path.join', (['"""tests"""', '"""test_1673.py"""'], {}), "('tests', 'test_1673.py')\n", (1621, 1646), False, 'import os\n')] |
#!/usr/bin/env python
import os
import unittest
import sys
from test_common import TestCommon
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'library'))
from fastly_service import FastlyConfiguration
class TestFastlyLoggingS3(TestCommon):
@TestCommon.vcr.use_cassette()
def test_fastly_s3s(self):
s3s_configuration = self.minimal_configuration.copy()
s3s_configuration.update({
's3s': [{
'name' : 'test_s3',
'domain' : self.FASTLY_TEST_DOMAIN,
'secret_key' : 'SECRET',
'period' : 60,
'bucket_name' : 'prod-fastly-logs',
'timestamp_format' : '%Y-%m-%dT%H:%M:%S.000',
'redundancy' : 'standard',
'access_key' : 'ACCESS_KEY',
'format' : '%{%Y-%m-%dT%H:%S.000}t %h "%r" %>s %b',
}],
})
configuration = FastlyConfiguration(s3s_configuration)
service = self.enforcer.apply_configuration(self.FASTLY_TEST_SERVICE, configuration).service
svc_conf = service.active_version.configuration
self.assertEqual(svc_conf.s3s[0].name, 'test_s3')
self.assertEqual(svc_conf.s3s[0].domain, self.FASTLY_TEST_DOMAIN)
self.assertEqual(svc_conf.s3s[0].secret_key, 'SECRET')
self.assertEqual(svc_conf.s3s[0].period, 60)
self.assertEqual(svc_conf.s3s[0].bucket_name, 'prod-fastly-logs')
self.assertEqual(svc_conf.s3s[0].timestamp_format, '%Y-%m-%dT%H:%M:%S.000')
self.assertEqual(svc_conf.s3s[0].redundancy, 'standard')
self.assertEqual(svc_conf.s3s[0].access_key, 'ACCESS_KEY')
self.assertEqual(svc_conf.s3s[0].format, '%{%Y-%m-%dT%H:%S.000}t %h "%r" %>s %b')
self.assertEqual(svc_conf, configuration)
active_version_number = service.active_version.number
service = self.enforcer.apply_configuration(self.FASTLY_TEST_SERVICE, configuration).service
self.assertEqual(service.active_version.number, active_version_number)
@TestCommon.vcr.use_cassette()
def test_fastly_s3s_remove(self):
s3s_configuration = self.minimal_configuration.copy()
s3s_configuration.update({
's3': [{
'name' : 'test_s3',
}],
})
configuration = FastlyConfiguration(s3s_configuration)
# Configure S3 logging
self.enforcer.apply_configuration(self.FASTLY_TEST_SERVICE, configuration).service
# Now apply a configuration without S3 logging
service = self.enforcer.apply_configuration(self.FASTLY_TEST_SERVICE, FastlyConfiguration(self.minimal_configuration.copy())).service
svc_conf = service.active_version.configuration
self.assertEqual(svc_conf.s3s, [])
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"os.path.dirname",
"test_common.TestCommon.vcr.use_cassette",
"fastly_service.FastlyConfiguration"
] | [((263, 292), 'test_common.TestCommon.vcr.use_cassette', 'TestCommon.vcr.use_cassette', ([], {}), '()\n', (290, 292), False, 'from test_common import TestCommon\n'), ((2124, 2153), 'test_common.TestCommon.vcr.use_cassette', 'TestCommon.vcr.use_cassette', ([], {}), '()\n', (2151, 2153), False, 'from test_common import TestCommon\n'), ((2903, 2918), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2916, 2918), False, 'import unittest\n'), ((125, 150), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (140, 150), False, 'import os\n'), ((999, 1037), 'fastly_service.FastlyConfiguration', 'FastlyConfiguration', (['s3s_configuration'], {}), '(s3s_configuration)\n', (1018, 1037), False, 'from fastly_service import FastlyConfiguration\n'), ((2410, 2448), 'fastly_service.FastlyConfiguration', 'FastlyConfiguration', (['s3s_configuration'], {}), '(s3s_configuration)\n', (2429, 2448), False, 'from fastly_service import FastlyConfiguration\n')] |
#! /usr/bin/env python3
""" PrepareTreeData.py
Prepares the tree data a bit for inclusion into a database.
"""
import pandas as pd
import numpy as np
""" Prepare the tree data for inclusion into a database.
Args:
inname: Input file
outname: Output file
"""
def PrepareTreeData(inname='data/street_trees_2015.csv',
outname='trees_2015.csv'):
trees = pd.read_csv(inname,index_col=0)
trees = trees.drop(['state','x_sp','y_sp','zip_city',
'cncldist','st_assem','st_senate',
'problems','address'
],axis=1)
boromap = {'1':'36061','2':'36005','3':'36047','4':'36081','5':'36085'}
def map_date(date):
m = date[0:2]
d = date[3:5]
y = date[-4:]
return y + '-'+m+'-'+d
trees.created_at = trees.created_at.map(map_date)
def get_tract(boro_ct):
boro_ct = str(boro_ct)
return int(boromap[boro_ct[0]] + boro_ct[1:])
trees['tract'] = trees.boro_ct.map(get_tract)
trees = trees.drop('boro_ct',axis=1)
cols = [col for col in trees.columns]
cols.remove('boroname')
cols.append('boroname')
trees = trees.loc[:,cols]
trees.to_csv(outname)
""" Runs with default arguments"""
def main():
PrepareTreeData()
if __name__=='__main__':
main()
| [
"pandas.read_csv"
] | [((387, 419), 'pandas.read_csv', 'pd.read_csv', (['inname'], {'index_col': '(0)'}), '(inname, index_col=0)\n', (398, 419), True, 'import pandas as pd\n')] |
#encoding: UTF-8
# Copyright (C) 2016 <NAME>
# This file is distributed under the terms of the # MIT License.
# See the file `License' in the root directory of the present distribution.
"""
An earlier and now oblosete implementation of functions for computing the
thermal expansion tensor as a function
of temperature from the Gruneisein parameters, the mode contributions to the
heat capacity, the elastic tensor and the unit cell volume.
Use :py:mod:`alphagruneisenp` instead.
"""
import numpy as np
import time
import math
import sys
from .read import read_Etot, read_freq, read_freq_ext, read_elastic_constants, \
read_elastic_constants_geo, read_freq_ext_geo
from .write import write_freq, write_freq_ext, write_alphaT, write_qha_C, write_qha_CT
from .constants import RY_KBAR, K_BOLTZMANN_RY, kb1
from .fitutils import fit_anis
from .minutils import find_min, fquadratic, fquartic
from .fitfreqgrun import fitfreq, fitfreqxx, freqmingrun, rearrange_freqx
from .fitFvib import fitFvib
from .fitC import rearrange_Cx, fitCxx
from .grunc import c_qvc # This is the same routine c_qv implemented in C to speed it up
################################################################################
#
# Compute the volume given the celldms, only for ibrav=4 for now
def compute_volume(celldms,ibrav=4):
if ibrav==4:
return 0.866025404*celldms[0]*celldms[0]*celldms[2]
#return 0.866025404*celldms[0]*celldms[0]*celldms[0]*celldms[2]
################################################################################
#
# Function to calculate the mode contribution to the heat capacity at a given T
# and omega
# This is a possible bottleneck as it is implemented in Python. It would be
# better to write it in C and link it to CPython or similar
#
#
def c_qv(T,omega):
if (T<1E-9 or omega<1E-9):
return 0.0
x = omega * kb1 / T
expx = math.exp(-x) # exponential term
x2 = math.pow(x,2)
if expx>1E-3: # compute normally
return x2*K_BOLTZMANN_RY*expx/math.pow(expx-1.0,2)
else: # Taylor series
return K_BOLTZMANN_RY*expx* (x/math.pow(x-0.5*math.pow(x,2)+
0.16666666666666667*math.pow(x,3)+0.04166666666666666667*math.pow(x,4),2))
# Same as c_qv but no if. Slightly more efficient, roughly a 30% faster
def c_qv2(T,omega):
x = omega * kb1 / T
expx = math.exp(-x) # exponential term
x2 = math.pow(x,2)
return x2*K_BOLTZMANN_RY*expx/math.pow(expx-1.0,2)
################################################################################
#
# This function computes the thermal expansions alpha using the Gruneisein
# parameters
# more comments to be added
# First with min0, freq and grun T-independent
#
# More ibrav types to be implemented
def compute_alpha_grun(T,V,S,weights,freq,grun,ibrav=4):
nq = freq.shape[0] # total number of q points
modes = freq.shape[1] # number of frequency modes
alpha = np.zeros(6) # inizializations
alphaaux = np.zeros(6)
# compute the Cqv*gruneisen terms, weights for each q-point, and sum
# for each ibrav (crystalline system) proceed in the proper way
if ibrav ==1:
for iq in range(0,nq):
for mode in range(0,modes):
alphaaux[0] += c_qv(T,freq[iq,mode]) * weights[iq] * grun[0,iq,mode]
alphaaux[0] = alphaaux[0] / 3.0
alphaaux[1] = alphaaux[0]
alphaaux[2] = alphaaux[0]
if ibrav ==4:
for iq in range(0,nq):
for mode in range(0,modes):
temp = c_qvc(T,freq[iq,mode]) * weights[iq] # should be quicker with this additional variable
alphaaux[0] += temp * grun[0,iq,mode]
alphaaux[2] += temp * grun[2,iq,mode]
alphaaux[0] = alphaaux[0] / 2.0
alphaaux[1] = alphaaux[0]
else:
print ("Not implemented yet")
# multiply for the elastic compliances
for i in range(0,6):
for j in range(0,6):
alpha[i] += alphaaux[j]*S[i,j]
alpha = -alpha/V
return alpha
def compute_alpha_gruneisein(inputfileEtot,inputfileC,inputfilefreq,rangeT,typeEtot,typefreq,ibrav):
# Read the energies
celldmsx, Ex = read_Etot(inputfileEtot)
# Fit and find the minimun at 0 K
a0, chia0 = fit_anis(celldmsx, Ex, ibrav, out=True, type=typeEtot)
if chia0!=None:
min0, fmin0 = find_min(a0, ibrav, type=typeEtot, guess=guess)
# First read the elastic compliances which are need for the thermal expansions
print ("Reading elastic constants and compliances from file "+inputfileC+"...")
C, S = read_elastic_constants(inputfileC)
#print (S)
# Compute the Gruneisen parameters
weights, freq, grun = fitfreq(celldmsx, min0, inputfilefreq, ibrav, typefreq="quadratic", compute_grun=True)
# Alternatively, we can read the gruneisen parameters from files (already written before)
#weights, freq = read_freq_ext("average_freq0K")
#weights, gruntemp1 = read_freq_ext("output_grun_along_a_ext3Dfit1.0")
#weights, gruntemp2 = read_freq_ext("output_grun_along_c_ext3Dfit1.0")
#nq = gruntemp1.shape[0]
#modes = gruntemp1.shape[1]
#grun = np.zeros((6,nq,modes))
#grun[0] = gruntemp1
#grun[1] = gruntemp1
#grun[2] = gruntemp2
V=compute_volume(min0,ibrav) # eq. volume at 0 K
print ("V = ",str(V))
S = S * RY_KBAR # convert elastic compliances in (Ryd/au)^-1
alphaT= np.zeros((len(rangeT),6))
counterT=0
for T in rangeT:
alpha = compute_alpha_grun(T,V,S,weights,freq,grun)
alphaT[counterT]=alpha
counterT += 1
print ("T= "+str(T)+"\t"+str(alpha[0])+"\t"+str(alpha[2]))
write_alphaT("alpha_gruneisen",rangeT,alphaT,4)
def compute_alpha_gruneiseinT(inputfileEtot,inputfileFvib,inputfileC,inputfilefreq,typeEtot,typeFvib,typefreq,ibrav,guess):
# Read the energies
celldmsx, Ex = read_Etot(inputfileEtot)
T, minT, fminT = fitFvib(inputfileEtot,inputfileFvib,ibrav,typeEtot,typeFvib,guess)
# First read the elastic compliances which are need for the thermal expansions
print ("Reading elastic constants and compliances from file "+inputfileC+"...")
C, S = read_elastic_constants(inputfileC)
print (S)
S = S * RY_KBAR # convert elastic compliances in (Ryd/au)^-1
# get the weigths and the frequencies from files
weightsx, freqx = read_freq_ext_geo(inputfilefreq,range(1,celldmsx.shape[0]+1))
weights = weightsx[0,:]
print ("Rearranging frequencies...")
freqxx = rearrange_freqx(freqx)
print ("Done!")
del freqx
print ("Fitting frequencies...")
afreq, chifreq = fitfreqxx(celldmsx, freqxx, ibrav, True, typefreq)
print ("Done!")
alphaT= np.zeros((len(T),6))
for i in range(0,len(T)):
# Compute the Gruneisen parameters, the average frequencies and alpha at each T
V=compute_volume(minT[i],ibrav)
print ("V = ",str(V))
freq, grun = freqmingrun(afreq, minT[i], freqxx.shape[0],freqxx.shape[1], ibrav, typefreq)
#write_freq_ext(weights,freq,"average_freqPython"+str(T[i]))
#write_freq_ext(weights,grun[0],"output_grun_along_a_ext3Dfit"+str(T[i]))
#write_freq_ext(weights,grun[2],"output_grun_along_c_ext3Dfit"+str(T[i]))
alpha = compute_alpha_grun(T[i],V,S,weights,freq,grun)
print ("T= "+str(T[i]))
print (alpha)
alphaT[i,:] = alpha
write_alphaT("alpha_gruneisenT",T,alphaT,4)
################################################################################
#
# This function is only meant to test the Cqv modes. It has to be removed later...
#
def testCqv(inputfilefreq, rangeT, out="Cqvtest"):
weights, freq = read_freq_ext(inputfilefreq)
nq = freq.shape[0] # total number of q points read
modes = freq.shape[1] # number of frequency modes
for T in rangeT:
Cqv = []
for iq in range(0,nq):
Cqvq=[]
for ifreq in range(0,modes):
temp = c_qv2(T,freq[iq,ifreq])
Cqvq.append(temp)
Cqv.append(Cqvq)
Cqv = np.array(Cqv)
outT = out+str(T)
write_freq_ext(weights,Cqv,outT)
################################################################################
# An auxiliary function for fitting the elastic constant elements of Sxx
#
#
def fitS(inputfileEtot, inputpathCx, ibrav, typeSx="quadratic"):
# Read the energies (this is necessary to read the celldmsx)
celldmsx, Ex = read_Etot(inputfileEtot)
ngeo = len(Ex)
Cx, Sx = read_elastic_constants_geo(ngeo, inputpathCx)
# This function works for both C and S, here I use it for S
Sxx = rearrange_Cx(Sx,ngeo)
write_qha_C(celldmsx, Sxx, ibrav, inputpathCx) # Write the S as a function of T for reference
aS, chiS = fitCxx(celldmsx, Sxx, ibrav, True, typeSx)
return aS, chiS
def fitST(aS,mintemp,typeCx):
S = np.zeros((6,6))
for i in range(0,6):
for j in range(0,6):
if typeCx=="quadratic":
S[i,j] = fquadratic(mintemp,aS[i,j],ibrav=4)
elif typeCx=="quartic":
S[i,j] = fquartic(mintemp,aS[i,j],ibrav=4)
return S
def compute_alpha_gruneiseinCT(inputfileEtot,inputfileFvib,inputpathCx,inputfilefreq,typeEtot,typeFvib,typeSx,typefreq,ibrav,guess):
# Read the energies
celldmsx, Ex = read_Etot(inputfileEtot)
T, minT, fminT = fitFvib(inputfileEtot,inputfileFvib,ibrav,typeEtot,typeFvib,guess)
# Get the polynomial coefficients aS from fitting the elastic compliances (to be used later to get S(T))
aS, chiS = fitS(inputfileEtot, inputpathCx, ibrav, typeSx)
# Now get the polynomial coeffients afreq from fitting the frequencies (to be used later to get average frequencies and
# gruneisen parameters as a function of T)
weightsx, freqx = read_freq_ext_geo(inputfilefreq,range(1,celldmsx.shape[0]+1))
weights = weightsx[0,:]
print ("Rearranging frequencies...")
freqxx = rearrange_freqx(freqx)
print ("Done!")
del freqx
print ("Fitting frequencies...")
afreq, chifreq = fitfreqxx(celldmsx, freqxx, ibrav, True, typefreq)
print ("Done!")
alphaT= np.zeros((len(T),6))
for i in range(0,len(T)):
# Compute the Gruneisen parameters, the average frequencies and alpha at each T
V=compute_volume(minT[i],ibrav)
print ("V = ",str(V))
S = fitST(aS,minT[i],typeSx)
print (S)
S = S * RY_KBAR # convert elastic compliances in (Ryd/au)^-1
freq, grun = freqmingrun(afreq, minT[i], freqxx.shape[0],freqxx.shape[1], ibrav, typefreq)
#write_freq_ext(weights,freq,"average_freqPython"+str(T[i]))
#write_freq_ext(weights,grun[0],"output_grun_along_a_ext3Dfit"+str(T[i]))
#write_freq_ext(weights,grun[2],"output_grun_along_c_ext3Dfit"+str(T[i]))
alpha = compute_alpha_grun(T[i],V,S,weights,freq,grun)
print ("T= "+str(T[i]))
print (alpha)
alphaT[i,:] = alpha
write_alphaT("alpha_gruneisenT",T,alphaT,4)
| [
"math.pow",
"numpy.array",
"math.exp",
"numpy.zeros"
] | [((1910, 1922), 'math.exp', 'math.exp', (['(-x)'], {}), '(-x)\n', (1918, 1922), False, 'import math\n'), ((1953, 1967), 'math.pow', 'math.pow', (['x', '(2)'], {}), '(x, 2)\n', (1961, 1967), False, 'import math\n'), ((2399, 2411), 'math.exp', 'math.exp', (['(-x)'], {}), '(-x)\n', (2407, 2411), False, 'import math\n'), ((2442, 2456), 'math.pow', 'math.pow', (['x', '(2)'], {}), '(x, 2)\n', (2450, 2456), False, 'import math\n'), ((2985, 2996), 'numpy.zeros', 'np.zeros', (['(6)'], {}), '(6)\n', (2993, 2996), True, 'import numpy as np\n'), ((3034, 3045), 'numpy.zeros', 'np.zeros', (['(6)'], {}), '(6)\n', (3042, 3045), True, 'import numpy as np\n'), ((9113, 9129), 'numpy.zeros', 'np.zeros', (['(6, 6)'], {}), '((6, 6))\n', (9121, 9129), True, 'import numpy as np\n'), ((2491, 2514), 'math.pow', 'math.pow', (['(expx - 1.0)', '(2)'], {}), '(expx - 1.0, 2)\n', (2499, 2514), False, 'import math\n'), ((8291, 8304), 'numpy.array', 'np.array', (['Cqv'], {}), '(Cqv)\n', (8299, 8304), True, 'import numpy as np\n'), ((2052, 2075), 'math.pow', 'math.pow', (['(expx - 1.0)', '(2)'], {}), '(expx - 1.0, 2)\n', (2060, 2075), False, 'import math\n'), ((2252, 2266), 'math.pow', 'math.pow', (['x', '(4)'], {}), '(x, 4)\n', (2260, 2266), False, 'import math\n'), ((2215, 2229), 'math.pow', 'math.pow', (['x', '(3)'], {}), '(x, 3)\n', (2223, 2229), False, 'import math\n'), ((2171, 2185), 'math.pow', 'math.pow', (['x', '(2)'], {}), '(x, 2)\n', (2179, 2185), False, 'import math\n')] |
from __future__ import division
import numpy as np
from sklearn.utils import shuffle
from sklearn.metrics import *
"""
Module with different fitness functions implemented to be used by the CRO algorithm.
The functions' only argument must be an individual (coral) and return its fitness, a number.
The fitness might require other arguments, in that case the partial function in python's functools module is a very good option
"""
def max_ones(coral):
"""
Description: Returns the percentage of 1's in the coral. This function assumes 'coral' is a list,
it could be further improved if it was a numpy array
Input:
- coral
Output:
- fitness
"""
return 100*(sum(coral) / len(coral))
def feature_selection(coral, X, y, model,
get_prediction = lambda model, X: model.predict(X),
metric=roc_auc_score, random_seed=None):
"""
Description: Returns the fitness (given by metric) of the selected features given by coral,
when using Xt and yt for training the model clf
Input:
- coral : an individual
- X: Data input
- y: Data output
- model: instance of the model to be trained
- get_prediction: function that accepts the model and X and outputs the vector
that will be used in the metric (predictions, scores...)
- metric: metric that will be used as fitness
Output:
- fitness
"""
# offset % of data for training, the rest for testing
offset = int(X.shape[0] * 0.9)
Xs, ys = shuffle(X, y, random_state=random_seed)
Xs = np.multiply(Xs, coral)
X_train, y_train = Xs[:offset], ys[:offset]
X_test, y_test = Xs[offset:], ys[offset:]
# train model
model.fit(X_train, y_train)
# Compute metric
y_pred = get_prediction(model, X_test)
fitness = metric(y_test, y_pred)
return fitness
| [
"sklearn.utils.shuffle",
"numpy.multiply"
] | [((1584, 1623), 'sklearn.utils.shuffle', 'shuffle', (['X', 'y'], {'random_state': 'random_seed'}), '(X, y, random_state=random_seed)\n', (1591, 1623), False, 'from sklearn.utils import shuffle\n'), ((1633, 1655), 'numpy.multiply', 'np.multiply', (['Xs', 'coral'], {}), '(Xs, coral)\n', (1644, 1655), True, 'import numpy as np\n')] |
from abc import ABC, abstractmethod
from typing import Tuple, Iterable, Any
from pygame.rect import Rect
class Shape(ABC):
"""Abstract shape interface."""
@abstractmethod
def shape(self) -> Any:
pass
@abstractmethod
def top_left(self) -> Tuple:
pass
@abstractmethod
def top_right(self) -> Iterable:
pass
@abstractmethod
def size(self) -> Tuple:
pass
@abstractmethod
def bottom_right(self) -> Iterable:
pass
@abstractmethod
def bottom_left(self) -> Iterable:
pass
@abstractmethod
def inflate(self, x: int, y: int) -> Rect:
pass
class Rectangle(Shape):
"""Rectangle shape."""
def __init__(self, position: Iterable) -> None:
self._shape: Rect = Rect(position)
self._top_left: Tuple = (0, 0)
def shape(self) -> Any:
return self._shape
@property
def top_left(self) -> Tuple:
return self._top_left
@top_left.setter
def top_left(self, position: Tuple) -> None:
self._top_left = position
@property
def top_right(self) -> Iterable:
return self._shape.topright
@property
def bottom_left(self) -> Iterable:
return self._shape.bottomleft
@property
def bottom_right(self) -> Iterable:
return self._shape.bottomright
@property
def size(self) -> Tuple:
return self._shape.size
def inflate(self, x: int, y: int) -> Rect:
return self._shape.inflate(x, y)
| [
"pygame.rect.Rect"
] | [((786, 800), 'pygame.rect.Rect', 'Rect', (['position'], {}), '(position)\n', (790, 800), False, 'from pygame.rect import Rect\n')] |
import numpy as np
from ..local_interpolation import ThirdOrderHermitePolynomialInterpolation
from .runge_kutta import AbstractESDIRK, ButcherTableau
γ = 0.26
a21 = γ
a31 = 0.13
a32 = 0.84033320996790809
a41 = 0.22371961478320505
a42 = 0.47675532319799699
a43 = -0.06470895363112615
a51 = 0.16648564323248321
a52 = 0.10450018841591720
a53 = 0.03631482272098715
a54 = -0.13090704451073998
a61 = 0.13855640231268224
a62 = 0
a63 = -0.04245337201752043
a64 = 0.02446657898003141
a65 = 0.61943039072480676
a71 = 0.13659751177640291
a72 = 0
a73 = -0.05496908796538376
a74 = -0.04118626728321046
a75 = 0.62993304899016403
a76 = 0.06962479448202728
# Predictors taken from
# https://github.com/SciML/OrdinaryDiffEq.jl/blob/54fb35870fa402fc95d665cd5f9502e2759ea436/src/tableaus/sdirk_tableaus.jl#L1444 # noqa: E501
# https://github.com/SciML/OrdinaryDiffEq.jl/blob/54fb35870fa402fc95d665cd5f9502e2759ea436/src/perform_step/kencarp_kvaerno_perform_step.jl#L1123 # noqa: E501
# This is with the exception of α21, which is mistakenly set to zero.
#
# See also /devdocs/predictor_dirk.md
α21 = 1.0
α31 = -1.366025403784441
α32 = 2.3660254037844357
α41 = -0.19650552613122207
α42 = 0.8113579546496623
α43 = 0.38514757148155954
α51 = 0.10375304369958693
α52 = 0.937994698066431
α53 = -0.04174774176601781
α61 = -0.17281112873898072
α62 = 0.6235784481025847
α63 = 0.5492326806363959
α71 = a61
α72 = a62
α73 = a63
α74 = a64
α75 = a65
α76 = γ
_kvaerno5_tableau = ButcherTableau(
a_lower=(
np.array([a21]),
np.array([a31, a32]),
np.array([a41, a42, a43]),
np.array([a51, a52, a53, a54]),
np.array([a61, a62, a63, a64, a65]),
np.array([a71, a72, a73, a74, a75, a76]),
),
a_diagonal=np.array([0, γ, γ, γ, γ, γ, γ]),
a_predictor=(
np.array([α21]),
np.array([α31, α32]),
np.array([α41, α42, α43]),
np.array([α51, α52, α53, 0]),
np.array([α61, α62, α63, 0, 0]),
np.array([α71, α72, α73, α74, α75, α76]),
),
b_sol=np.array([a71, a72, a73, a74, a75, a76, γ]),
b_error=np.array(
[a71 - a61, a72 - a62, a73 - a63, a74 - a64, a75 - a65, a76 - γ, γ]
),
c=np.array(
[0.52, 1.230333209967908, 0.8957659843500759, 0.43639360985864756, 1.0, 1.0]
),
)
class Kvaerno5(AbstractESDIRK):
r"""Kvaerno's 5/4 method.
A-L stable stiffly accurate 5th order ESDIRK method. Has an embedded 4th order
method for adaptive step sizing. Uses 7 stages.
When solving an ODE over the interval $[t_0, t_1]$, note that this method will make
some evaluations slightly past $t_1$.
??? cite "Reference"
```bibtex
@article{kvaerno2004singly,
title={Singly diagonally implicit Runge--Kutta methods with an explicit first
stage},
author={Kv{\ae}rn{\o}, Anne},
journal={BIT Numerical Mathematics},
volume={44},
number={3},
pages={489--502},
year={2004},
publisher={Springer}
}
```
"""
tableau = _kvaerno5_tableau
interpolation_cls = ThirdOrderHermitePolynomialInterpolation.from_k
def order(self, terms):
return 5
| [
"numpy.array"
] | [((1729, 1760), 'numpy.array', 'np.array', (['[0, γ, γ, γ, γ, γ, γ]'], {}), '([0, γ, γ, γ, γ, γ, γ])\n', (1737, 1760), True, 'import numpy as np\n'), ((2016, 2059), 'numpy.array', 'np.array', (['[a71, a72, a73, a74, a75, a76, γ]'], {}), '([a71, a72, a73, a74, a75, a76, γ])\n', (2024, 2059), True, 'import numpy as np\n'), ((2073, 2150), 'numpy.array', 'np.array', (['[a71 - a61, a72 - a62, a73 - a63, a74 - a64, a75 - a65, a76 - γ, γ]'], {}), '([a71 - a61, a72 - a62, a73 - a63, a74 - a64, a75 - a65, a76 - γ, γ])\n', (2081, 2150), True, 'import numpy as np\n'), ((2172, 2262), 'numpy.array', 'np.array', (['[0.52, 1.230333209967908, 0.8957659843500759, 0.43639360985864756, 1.0, 1.0]'], {}), '([0.52, 1.230333209967908, 0.8957659843500759, 0.43639360985864756,\n 1.0, 1.0])\n', (2180, 2262), True, 'import numpy as np\n'), ((1490, 1505), 'numpy.array', 'np.array', (['[a21]'], {}), '([a21])\n', (1498, 1505), True, 'import numpy as np\n'), ((1515, 1535), 'numpy.array', 'np.array', (['[a31, a32]'], {}), '([a31, a32])\n', (1523, 1535), True, 'import numpy as np\n'), ((1545, 1570), 'numpy.array', 'np.array', (['[a41, a42, a43]'], {}), '([a41, a42, a43])\n', (1553, 1570), True, 'import numpy as np\n'), ((1580, 1610), 'numpy.array', 'np.array', (['[a51, a52, a53, a54]'], {}), '([a51, a52, a53, a54])\n', (1588, 1610), True, 'import numpy as np\n'), ((1620, 1655), 'numpy.array', 'np.array', (['[a61, a62, a63, a64, a65]'], {}), '([a61, a62, a63, a64, a65])\n', (1628, 1655), True, 'import numpy as np\n'), ((1665, 1705), 'numpy.array', 'np.array', (['[a71, a72, a73, a74, a75, a76]'], {}), '([a71, a72, a73, a74, a75, a76])\n', (1673, 1705), True, 'import numpy as np\n'), ((1788, 1803), 'numpy.array', 'np.array', (['[α21]'], {}), '([α21])\n', (1796, 1803), True, 'import numpy as np\n'), ((1813, 1833), 'numpy.array', 'np.array', (['[α31, α32]'], {}), '([α31, α32])\n', (1821, 1833), True, 'import numpy as np\n'), ((1843, 1868), 'numpy.array', 'np.array', (['[α41, α42, α43]'], {}), '([α41, α42, α43])\n', (1851, 1868), True, 'import numpy as np\n'), ((1878, 1906), 'numpy.array', 'np.array', (['[α51, α52, α53, 0]'], {}), '([α51, α52, α53, 0])\n', (1886, 1906), True, 'import numpy as np\n'), ((1916, 1947), 'numpy.array', 'np.array', (['[α61, α62, α63, 0, 0]'], {}), '([α61, α62, α63, 0, 0])\n', (1924, 1947), True, 'import numpy as np\n'), ((1957, 1997), 'numpy.array', 'np.array', (['[α71, α72, α73, α74, α75, α76]'], {}), '([α71, α72, α73, α74, α75, α76])\n', (1965, 1997), True, 'import numpy as np\n')] |
#MenuTitle: Remove Zero Deltas in Selected Glyphs
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
__doc__="""
Goes through all layers of each selected glyph, and deletes all TT Delta Hints with an offset of zero. Detailed Report in Macro Window.
"""
def process( Layer ):
try:
count = 0
for i in reversed(range(len(Layer.hints))):
hint = Layer.hints[i]
if hint.type == TTDELTA:
elementDict = hint.elementDict()
if "settings" in elementDict:
settings = elementDict["settings"]
if settings:
for deltaType in ("deltaH","deltaV"):
if deltaType in settings:
for transformType in settings[deltaType]:
deltas = settings[deltaType][transformType]
for ppmSize in deltas:
if deltas[ppmSize] == 0:
del deltas[ppmSize]
count += 1
# clean up delta PPMs:
if len(settings[deltaType][transformType]) == 0:
del settings[deltaType][transformType]
# clean up delta directions:
if len(settings[deltaType]) == 0:
del settings[deltaType]
# clean up hints:
if not elementDict["settings"]:
del Layer.hints[i]
print(" Deleted %i zero delta%s on layer '%s'." % (
count,
"" if count == 1 else "s",
Layer.name,
))
return count
except Exception as e:
Glyphs.showMacroWindow()
import traceback
print(traceback.format_exc())
print()
print(e)
thisFont = Glyphs.font # frontmost font
selectedLayers = thisFont.selectedLayers # active layers of selected glyphs
Glyphs.clearLog() # clears log in Macro window
totalCount = 0
for selectedLayer in selectedLayers:
thisGlyph = selectedLayer.parent
print("%s:" % thisGlyph.name)
thisGlyph.beginUndo() # begin undo grouping
for thisLayer in thisGlyph.layers:
totalCount += process( thisLayer )
thisGlyph.endUndo() # end undo grouping
if totalCount:
Message(
title="%i Zero Delta%s Deleted" % (
totalCount,
"" if totalCount == 1 else "s",
),
message="Deleted %i TT delta hint%s with zero offset in %i selected glyph%s (%s%s). Detailed report in Macro Window." % (
totalCount,
"" if totalCount == 1 else "s",
len(selectedLayers),
"" if len(selectedLayers) == 1 else "s",
", ".join([l.parent.name for l in selectedLayers[:min(20,len(selectedLayers))]]),
",..." if len(selectedLayers) > 20 else "",
),
OKButton=u"👍🏻 OK",
)
else:
Message(
title="No Zero Deltas",
message="No TT delta hints with zero offset were found in selected glyph%s (%s%s)." % (
"" if len(selectedLayers) == 1 else "s",
", ".join([l.parent.name for l in selectedLayers[:min(20,len(selectedLayers))]]),
",..." if len(selectedLayers) > 20 else "",
),
OKButton=u"🍸 Cheers")
| [
"traceback.format_exc"
] | [((1433, 1455), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1453, 1455), False, 'import traceback\n')] |
#!/usr/bin/env python3
import re
import sys
from glob import glob
from subprocess import run
def main(args):
assert len(args) >= 1
from_image = args.pop(0)
optional = [x for x in map(str.strip, args) if x]
optional_used = set()
with open("Dockerfile", "w") as fout:
print(f"from {from_image}", file=fout)
for fname in sorted(glob("*.Dockerfile")):
if fname.startswith("optional."):
if any(x in fname for x in optional):
optional_used.add(
re.search(
r"^optional\.(\d*\.)?(\S+?)\.Dockerfile$", fname
).groups()[1]
)
else:
continue
with open(fname) as fin:
print(fin.read().strip(), file=fout)
our_tag = "orestisfl/env"
if optional_used:
our_tag += "-" + "-".join(sorted(optional_used))
our_tag += ":" + from_image.split(":", 1)[1]
with open("image", "w") as f:
print(our_tag, file=f)
return run(["docker", "build", "-t", our_tag, "."], check=True)
if __name__ == "__main__":
print(main(sys.argv[1:]), file=sys.stderr)
| [
"subprocess.run",
"glob.glob",
"re.search"
] | [((1077, 1133), 'subprocess.run', 'run', (["['docker', 'build', '-t', our_tag, '.']"], {'check': '(True)'}), "(['docker', 'build', '-t', our_tag, '.'], check=True)\n", (1080, 1133), False, 'from subprocess import run\n'), ((366, 386), 'glob.glob', 'glob', (['"""*.Dockerfile"""'], {}), "('*.Dockerfile')\n", (370, 386), False, 'from glob import glob\n'), ((552, 615), 're.search', 're.search', (['"""^optional\\\\.(\\\\d*\\\\.)?(\\\\S+?)\\\\.Dockerfile$"""', 'fname'], {}), "('^optional\\\\.(\\\\d*\\\\.)?(\\\\S+?)\\\\.Dockerfile$', fname)\n", (561, 615), False, 'import re\n')] |
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision
class FPAv2(nn.Module):
def __init__(self, input_dim, output_dim):
super(FPAv2, self).__init__()
self.glob = nn.Sequential(nn.AdaptiveAvgPool2d(1),
nn.Conv2d(input_dim, output_dim, kernel_size=1, bias=False))
self.down2_1 = nn.Sequential(nn.Conv2d(input_dim, input_dim, kernel_size=5, stride=2, padding=2, bias=False),
nn.BatchNorm2d(input_dim),
nn.ELU(True))
self.down2_2 = nn.Sequential(nn.Conv2d(input_dim, output_dim, kernel_size=5, padding=2, bias=False),
nn.BatchNorm2d(output_dim),
nn.ELU(True))
self.down3_1 = nn.Sequential(nn.Conv2d(input_dim, input_dim, kernel_size=3, stride=2, padding=1, bias=False),
nn.BatchNorm2d(input_dim),
nn.ELU(True))
self.down3_2 = nn.Sequential(nn.Conv2d(input_dim, output_dim, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(output_dim),
nn.ELU(True))
self.conv1 = nn.Sequential(nn.Conv2d(input_dim, output_dim, kernel_size=1, bias=False),
nn.BatchNorm2d(output_dim),
nn.ELU(True))
def forward(self, x):
# x shape: 512, 16, 16
x_glob = self.glob(x) # 256, 1, 1
x_glob = F.upsample(x_glob, scale_factor=16, mode='bilinear', align_corners=True) # 256, 16, 16
d2 = self.down2_1(x) # 512, 8, 8
d3 = self.down3_1(d2) # 512, 4, 4
d2 = self.down2_2(d2) # 256, 8, 8
d3 = self.down3_2(d3) # 256, 4, 4
d3 = F.upsample(d3, scale_factor=2, mode='bilinear', align_corners=True) # 256, 8, 8
d2 = d2 + d3
d2 = F.upsample(d2, scale_factor=2, mode='bilinear', align_corners=True) # 256, 16, 16
x = self.conv1(x) # 256, 16, 16
x = x * d2
x = x + x_glob
return x
def conv3x3(input_dim, output_dim, rate=1):
return nn.Sequential(nn.Conv2d(input_dim, output_dim, kernel_size=3, dilation=rate, padding=rate, bias=False),
nn.BatchNorm2d(output_dim),
nn.ELU(True))
class SpatialAttention2d(nn.Module):
def __init__(self, channel):
super(SpatialAttention2d, self).__init__()
self.squeeze = nn.Conv2d(channel, 1, kernel_size=1, bias=False)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
z = self.squeeze(x)
z = self.sigmoid(z)
return x * z
class GAB(nn.Module):
def __init__(self, input_dim, reduction=4):
super(GAB, self).__init__()
self.global_avgpool = nn.AdaptiveAvgPool2d(1)
self.conv1 = nn.Conv2d(input_dim, input_dim // reduction, kernel_size=1, stride=1)
self.conv2 = nn.Conv2d(input_dim // reduction, input_dim, kernel_size=1, stride=1)
self.relu = nn.ReLU(inplace=True)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
z = self.global_avgpool(x)
z = self.relu(self.conv1(z))
z = self.sigmoid(self.conv2(z))
return x * z
class Decoder(nn.Module):
def __init__(self, in_channels, channels, out_channels):
super(Decoder, self).__init__()
self.conv1 = conv3x3(in_channels, channels)
self.conv2 = conv3x3(channels, out_channels)
self.s_att = SpatialAttention2d(out_channels)
self.c_att = GAB(out_channels, 16)
def forward(self, x, e=None):
x = F.upsample(input=x, scale_factor=2, mode='bilinear', align_corners=True)
if e is not None:
x = torch.cat([x, e], 1)
x = self.conv1(x)
x = self.conv2(x)
s = self.s_att(x)
c = self.c_att(x)
output = s + c
return output
class Decoderv2(nn.Module):
def __init__(self, up_in, x_in, n_out):
super(Decoderv2, self).__init__()
up_out = x_out = n_out // 2
self.x_conv = nn.Conv2d(x_in, x_out, 1, bias=False)
self.tr_conv = nn.ConvTranspose2d(up_in, up_out, 2, stride=2)
self.bn = nn.BatchNorm2d(n_out)
self.relu = nn.ReLU(True)
self.s_att = SpatialAttention2d(n_out)
self.c_att = GAB(n_out, 16)
def forward(self, up_p, x_p):
up_p = self.tr_conv(up_p)
x_p = self.x_conv(x_p)
cat_p = torch.cat([up_p, x_p], 1)
cat_p = self.relu(self.bn(cat_p))
s = self.s_att(cat_p)
c = self.c_att(cat_p)
return s + c
class SCse(nn.Module):
def __init__(self, dim):
super(SCse, self).__init__()
self.satt = SpatialAttention2d(dim)
self.catt = GAB(dim)
def forward(self, x):
return self.satt(x) + self.catt(x)
# stage1 model
class Res34Unetv4(nn.Module):
def __init__(self, n_classes=4):
super(Res34Unetv4, self).__init__()
self.resnet = torchvision.models.resnet34(True)
self.conv1 = nn.Sequential(
self.resnet.conv1,
self.resnet.bn1,
self.resnet.relu)
self.encode2 = nn.Sequential(self.resnet.layer1,
SCse(64))
self.encode3 = nn.Sequential(self.resnet.layer2,
SCse(128))
self.encode4 = nn.Sequential(self.resnet.layer3,
SCse(256))
self.encode5 = nn.Sequential(self.resnet.layer4,
SCse(512))
self.center = nn.Sequential(FPAv2(512, 256),
nn.MaxPool2d(2, 2))
self.decode5 = Decoderv2(256, 512, 64)
self.decode4 = Decoderv2(64, 256, 64)
self.decode3 = Decoderv2(64, 128, 64)
self.decode2 = Decoderv2(64, 64, 64)
self.decode1 = Decoder(64, 32, 64)
self.logit = nn.Sequential(nn.Conv2d(320, 64, kernel_size=3, padding=1),
nn.ELU(True),
nn.Conv2d(64, n_classes, kernel_size=1, bias=False))
def forward(self, x):
# x: (batch_size, 3, 256, 256)
x = self.conv1(x) # 64, 128, 128
e2 = self.encode2(x) # 64, 128, 128
e3 = self.encode3(e2) # 128, 64, 64
e4 = self.encode4(e3) # 256, 32, 32
e5 = self.encode5(e4) # 512, 16, 16
f = self.center(e5) # 256, 8, 8
d5 = self.decode5(f, e5) # 64, 16, 16
d4 = self.decode4(d5, e4) # 64, 32, 32
d3 = self.decode3(d4, e3) # 64, 64, 64
d2 = self.decode2(d3, e2) # 64, 128, 128
d1 = self.decode1(d2) # 64, 256, 256
f = torch.cat((d1,
F.upsample(d2, scale_factor=2, mode='bilinear', align_corners=True),
F.upsample(d3, scale_factor=4, mode='bilinear', align_corners=True),
F.upsample(d4, scale_factor=8, mode='bilinear', align_corners=True),
F.upsample(d5, scale_factor=16, mode='bilinear', align_corners=True)), 1) # 320, 256, 256
logit = self.logit(f) # n_classes, 256, 256
return logit
# stage2 model
class Res34Unetv3(nn.Module):
def __init__(self, n_classes=4):
super(Res34Unetv3, self).__init__()
self.resnet = torchvision.models.resnet34(True)
self.conv1 = nn.Sequential(
self.resnet.conv1,
self.resnet.bn1,
self.resnet.relu)
self.encode2 = nn.Sequential(self.resnet.layer1,
SCse(64))
self.encode3 = nn.Sequential(self.resnet.layer2,
SCse(128))
self.encode4 = nn.Sequential(self.resnet.layer3,
SCse(256))
self.encode5 = nn.Sequential(self.resnet.layer4,
SCse(512))
self.center = nn.Sequential(FPAv2(512, 256),
nn.MaxPool2d(2, 2))
self.decode5 = Decoderv2(256, 512, 64)
self.decode4 = Decoderv2(64, 256, 64)
self.decode3 = Decoderv2(64, 128, 64)
self.decode2 = Decoderv2(64, 64, 64)
self.decode1 = Decoder(64, 32, 64)
self.dropout2d = nn.Dropout2d(0.4)
self.dropout = nn.Dropout(0.4)
self.fuse_pixel = conv3x3(320, 64)
self.logit_pixel = nn.Conv2d(64, 1, kernel_size=1, bias=False)
self.fuse_image = nn.Sequential(nn.Linear(512, 64),
nn.ELU(True))
self.logit_image = nn.Sequential(nn.Linear(64, 1),
nn.Sigmoid())
self.logit = nn.Sequential(nn.Conv2d(128, 64, kernel_size=3, padding=1, bias=False),
nn.ELU(True),
nn.Conv2d(64, n_classes, kernel_size=1, bias=False))
def forward(self, x):
# x: (batch_size, 3, 256, 256)
batch_size, c, h, w = x.shape
x = self.conv1(x) # 64, 128, 128
e2 = self.encode2(x) # 64, 128, 128
e3 = self.encode3(e2) # 128, 64, 64
e4 = self.encode4(e3) # 256, 32, 32
e5 = self.encode5(e4) # 512, 16, 16
e = F.adaptive_avg_pool2d(e5, output_size=1).view(batch_size, -1) # 512
e = self.dropout(e)
f = self.center(e5) # 256, 8, 8
d5 = self.decode5(f, e5) # 64, 16, 16
d4 = self.decode4(d5, e4) # 64, 32, 32
d3 = self.decode3(d4, e3) # 64, 64, 64
d2 = self.decode2(d3, e2) # 64, 128, 128
d1 = self.decode1(d2) # 64, 256, 256
f = torch.cat((d1,
F.upsample(d2, scale_factor=2, mode='bilinear', align_corners=True),
F.upsample(d3, scale_factor=4, mode='bilinear', align_corners=True),
F.upsample(d4, scale_factor=8, mode='bilinear', align_corners=True),
F.upsample(d5, scale_factor=16, mode='bilinear', align_corners=True)), 1) # 320, 256, 256
f = self.dropout2d(f)
# segmentation process
fuse_pixel = self.fuse_pixel(f) # 64, 256, 256
logit_pixel = self.logit_pixel(fuse_pixel) # 1, 256, 256
# classification process
fuse_image = self.fuse_image(e) # 64
logit_image = self.logit_image(fuse_image) # 1
# combine segmentation and classification
fuse = torch.cat([fuse_pixel,
F.upsample(fuse_image.view(batch_size, -1, 1, 1), scale_factor=256, mode='bilinear',
align_corners=True)], 1) # 128, 256, 256
logit = self.logit(fuse) # n_classes, 256, 256
return logit, logit_pixel, logit_image.view(-1)
# stage3 model
class Res34Unetv5(nn.Module):
def __init__(self, n_classes):
super(Res34Unetv5, self).__init__()
self.resnet = torchvision.models.resnet34(True)
self.conv1 = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=3, padding=1, bias=False),
self.resnet.bn1,
self.resnet.relu)
self.encode2 = nn.Sequential(self.resnet.layer1,
SCse(64))
self.encode3 = nn.Sequential(self.resnet.layer2,
SCse(128))
self.encode4 = nn.Sequential(self.resnet.layer3,
SCse(256))
self.encode5 = nn.Sequential(self.resnet.layer4,
SCse(512))
self.center = nn.Sequential(FPAv2(512, 256),
nn.MaxPool2d(2, 2))
self.decode5 = Decoderv2(256, 512, 64)
self.decode4 = Decoderv2(64, 256, 64)
self.decode3 = Decoderv2(64, 128, 64)
self.decode2 = Decoderv2(64, 64, 64)
self.logit = nn.Sequential(nn.Conv2d(256, 32, kernel_size=3, padding=1),
nn.ELU(True),
nn.Conv2d(32, n_classes, kernel_size=1, bias=False))
def forward(self, x):
# x: batch_size, 3, 128, 128
x = self.conv1(x) # 64, 128, 128
e2 = self.encode2(x) # 64, 128, 128
e3 = self.encode3(e2) # 128, 64, 64
e4 = self.encode4(e3) # 256, 32, 32
e5 = self.encode5(e4) # 512, 16, 16
f = self.center(e5) # 256, 8, 8
d5 = self.decode5(f, e5) # 64, 16, 16
d4 = self.decode4(d5, e4) # 64, 32, 32
d3 = self.decode3(d4, e3) # 64, 64, 64
d2 = self.decode2(d3, e2) # 64, 128, 128
f = torch.cat((d2,
F.upsample(d3, scale_factor=2, mode='bilinear', align_corners=True),
F.upsample(d4, scale_factor=4, mode='bilinear', align_corners=True),
F.upsample(d5, scale_factor=8, mode='bilinear', align_corners=True)), 1) # 256, 128, 128
f = F.dropout2d(f, p=0.4)
logit = self.logit(f) # n_classes, 128, 128
return logit | [
"torch.nn.functional.upsample",
"torch.nn.BatchNorm2d",
"torch.nn.Sigmoid",
"torch.nn.ReLU",
"torch.nn.Dropout",
"torch.nn.functional.adaptive_avg_pool2d",
"torch.nn.Sequential",
"torch.nn.Dropout2d",
"torch.nn.Conv2d",
"torchvision.models.resnet34",
"torch.nn.MaxPool2d",
"torch.nn.AdaptiveAvg... | [((1596, 1668), 'torch.nn.functional.upsample', 'F.upsample', (['x_glob'], {'scale_factor': '(16)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(x_glob, scale_factor=16, mode='bilinear', align_corners=True)\n", (1606, 1668), True, 'import torch.nn.functional as F\n'), ((1871, 1938), 'torch.nn.functional.upsample', 'F.upsample', (['d3'], {'scale_factor': '(2)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(d3, scale_factor=2, mode='bilinear', align_corners=True)\n", (1881, 1938), True, 'import torch.nn.functional as F\n'), ((1987, 2054), 'torch.nn.functional.upsample', 'F.upsample', (['d2'], {'scale_factor': '(2)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(d2, scale_factor=2, mode='bilinear', align_corners=True)\n", (1997, 2054), True, 'import torch.nn.functional as F\n'), ((2243, 2335), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_dim', 'output_dim'], {'kernel_size': '(3)', 'dilation': 'rate', 'padding': 'rate', 'bias': '(False)'}), '(input_dim, output_dim, kernel_size=3, dilation=rate, padding=rate,\n bias=False)\n', (2252, 2335), True, 'import torch.nn as nn\n'), ((2358, 2384), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['output_dim'], {}), '(output_dim)\n', (2372, 2384), True, 'import torch.nn as nn\n'), ((2411, 2423), 'torch.nn.ELU', 'nn.ELU', (['(True)'], {}), '(True)\n', (2417, 2423), True, 'import torch.nn as nn\n'), ((2571, 2619), 'torch.nn.Conv2d', 'nn.Conv2d', (['channel', '(1)'], {'kernel_size': '(1)', 'bias': '(False)'}), '(channel, 1, kernel_size=1, bias=False)\n', (2580, 2619), True, 'import torch.nn as nn\n'), ((2643, 2655), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (2653, 2655), True, 'import torch.nn as nn\n'), ((2898, 2921), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (2918, 2921), True, 'import torch.nn as nn\n'), ((2943, 3012), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_dim', '(input_dim // reduction)'], {'kernel_size': '(1)', 'stride': '(1)'}), '(input_dim, input_dim // reduction, kernel_size=1, stride=1)\n', (2952, 3012), True, 'import torch.nn as nn\n'), ((3034, 3103), 'torch.nn.Conv2d', 'nn.Conv2d', (['(input_dim // reduction)', 'input_dim'], {'kernel_size': '(1)', 'stride': '(1)'}), '(input_dim // reduction, input_dim, kernel_size=1, stride=1)\n', (3043, 3103), True, 'import torch.nn as nn\n'), ((3124, 3145), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (3131, 3145), True, 'import torch.nn as nn\n'), ((3169, 3181), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (3179, 3181), True, 'import torch.nn as nn\n'), ((3720, 3792), 'torch.nn.functional.upsample', 'F.upsample', ([], {'input': 'x', 'scale_factor': '(2)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(input=x, scale_factor=2, mode='bilinear', align_corners=True)\n", (3730, 3792), True, 'import torch.nn.functional as F\n'), ((4179, 4216), 'torch.nn.Conv2d', 'nn.Conv2d', (['x_in', 'x_out', '(1)'], {'bias': '(False)'}), '(x_in, x_out, 1, bias=False)\n', (4188, 4216), True, 'import torch.nn as nn\n'), ((4240, 4286), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['up_in', 'up_out', '(2)'], {'stride': '(2)'}), '(up_in, up_out, 2, stride=2)\n', (4258, 4286), True, 'import torch.nn as nn\n'), ((4305, 4326), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['n_out'], {}), '(n_out)\n', (4319, 4326), True, 'import torch.nn as nn\n'), ((4347, 4360), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (4354, 4360), True, 'import torch.nn as nn\n'), ((4561, 4586), 'torch.cat', 'torch.cat', (['[up_p, x_p]', '(1)'], {}), '([up_p, x_p], 1)\n', (4570, 4586), False, 'import torch\n'), ((5094, 5127), 'torchvision.models.resnet34', 'torchvision.models.resnet34', (['(True)'], {}), '(True)\n', (5121, 5127), False, 'import torchvision\n'), ((5150, 5217), 'torch.nn.Sequential', 'nn.Sequential', (['self.resnet.conv1', 'self.resnet.bn1', 'self.resnet.relu'], {}), '(self.resnet.conv1, self.resnet.bn1, self.resnet.relu)\n', (5163, 5217), True, 'import torch.nn as nn\n'), ((7447, 7480), 'torchvision.models.resnet34', 'torchvision.models.resnet34', (['(True)'], {}), '(True)\n', (7474, 7480), False, 'import torchvision\n'), ((7503, 7570), 'torch.nn.Sequential', 'nn.Sequential', (['self.resnet.conv1', 'self.resnet.bn1', 'self.resnet.relu'], {}), '(self.resnet.conv1, self.resnet.bn1, self.resnet.relu)\n', (7516, 7570), True, 'import torch.nn as nn\n'), ((8392, 8409), 'torch.nn.Dropout2d', 'nn.Dropout2d', (['(0.4)'], {}), '(0.4)\n', (8404, 8409), True, 'import torch.nn as nn\n'), ((8433, 8448), 'torch.nn.Dropout', 'nn.Dropout', (['(0.4)'], {}), '(0.4)\n', (8443, 8448), True, 'import torch.nn as nn\n'), ((8520, 8563), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(1)'], {'kernel_size': '(1)', 'bias': '(False)'}), '(64, 1, kernel_size=1, bias=False)\n', (8529, 8563), True, 'import torch.nn as nn\n'), ((11020, 11053), 'torchvision.models.resnet34', 'torchvision.models.resnet34', (['(True)'], {}), '(True)\n', (11047, 11053), False, 'import torchvision\n'), ((13012, 13033), 'torch.nn.functional.dropout2d', 'F.dropout2d', (['f'], {'p': '(0.4)'}), '(f, p=0.4)\n', (13023, 13033), True, 'import torch.nn.functional as F\n'), ((231, 254), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (251, 254), True, 'import torch.nn as nn\n'), ((290, 349), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_dim', 'output_dim'], {'kernel_size': '(1)', 'bias': '(False)'}), '(input_dim, output_dim, kernel_size=1, bias=False)\n', (299, 349), True, 'import torch.nn as nn\n'), ((389, 468), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_dim', 'input_dim'], {'kernel_size': '(5)', 'stride': '(2)', 'padding': '(2)', 'bias': '(False)'}), '(input_dim, input_dim, kernel_size=5, stride=2, padding=2, bias=False)\n', (398, 468), True, 'import torch.nn as nn\n'), ((507, 532), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['input_dim'], {}), '(input_dim)\n', (521, 532), True, 'import torch.nn as nn\n'), ((571, 583), 'torch.nn.ELU', 'nn.ELU', (['(True)'], {}), '(True)\n', (577, 583), True, 'import torch.nn as nn\n'), ((622, 692), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_dim', 'output_dim'], {'kernel_size': '(5)', 'padding': '(2)', 'bias': '(False)'}), '(input_dim, output_dim, kernel_size=5, padding=2, bias=False)\n', (631, 692), True, 'import torch.nn as nn\n'), ((731, 757), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['output_dim'], {}), '(output_dim)\n', (745, 757), True, 'import torch.nn as nn\n'), ((796, 808), 'torch.nn.ELU', 'nn.ELU', (['(True)'], {}), '(True)\n', (802, 808), True, 'import torch.nn as nn\n'), ((848, 927), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_dim', 'input_dim'], {'kernel_size': '(3)', 'stride': '(2)', 'padding': '(1)', 'bias': '(False)'}), '(input_dim, input_dim, kernel_size=3, stride=2, padding=1, bias=False)\n', (857, 927), True, 'import torch.nn as nn\n'), ((966, 991), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['input_dim'], {}), '(input_dim)\n', (980, 991), True, 'import torch.nn as nn\n'), ((1030, 1042), 'torch.nn.ELU', 'nn.ELU', (['(True)'], {}), '(True)\n', (1036, 1042), True, 'import torch.nn as nn\n'), ((1081, 1151), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_dim', 'output_dim'], {'kernel_size': '(3)', 'padding': '(1)', 'bias': '(False)'}), '(input_dim, output_dim, kernel_size=3, padding=1, bias=False)\n', (1090, 1151), True, 'import torch.nn as nn\n'), ((1190, 1216), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['output_dim'], {}), '(output_dim)\n', (1204, 1216), True, 'import torch.nn as nn\n'), ((1255, 1267), 'torch.nn.ELU', 'nn.ELU', (['(True)'], {}), '(True)\n', (1261, 1267), True, 'import torch.nn as nn\n'), ((1305, 1364), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_dim', 'output_dim'], {'kernel_size': '(1)', 'bias': '(False)'}), '(input_dim, output_dim, kernel_size=1, bias=False)\n', (1314, 1364), True, 'import torch.nn as nn\n'), ((1401, 1427), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['output_dim'], {}), '(output_dim)\n', (1415, 1427), True, 'import torch.nn as nn\n'), ((1464, 1476), 'torch.nn.ELU', 'nn.ELU', (['(True)'], {}), '(True)\n', (1470, 1476), True, 'import torch.nn as nn\n'), ((3835, 3855), 'torch.cat', 'torch.cat', (['[x, e]', '(1)'], {}), '([x, e], 1)\n', (3844, 3855), False, 'import torch\n'), ((5765, 5783), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)', '(2)'], {}), '(2, 2)\n', (5777, 5783), True, 'import torch.nn as nn\n'), ((6049, 6093), 'torch.nn.Conv2d', 'nn.Conv2d', (['(320)', '(64)'], {'kernel_size': '(3)', 'padding': '(1)'}), '(320, 64, kernel_size=3, padding=1)\n', (6058, 6093), True, 'import torch.nn as nn\n'), ((6130, 6142), 'torch.nn.ELU', 'nn.ELU', (['(True)'], {}), '(True)\n', (6136, 6142), True, 'import torch.nn as nn\n'), ((6179, 6230), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', 'n_classes'], {'kernel_size': '(1)', 'bias': '(False)'}), '(64, n_classes, kernel_size=1, bias=False)\n', (6188, 6230), True, 'import torch.nn as nn\n'), ((8118, 8136), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)', '(2)'], {}), '(2, 2)\n', (8130, 8136), True, 'import torch.nn as nn\n'), ((8605, 8623), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(64)'], {}), '(512, 64)\n', (8614, 8623), True, 'import torch.nn as nn\n'), ((8665, 8677), 'torch.nn.ELU', 'nn.ELU', (['(True)'], {}), '(True)\n', (8671, 8677), True, 'import torch.nn as nn\n'), ((8720, 8736), 'torch.nn.Linear', 'nn.Linear', (['(64)', '(1)'], {}), '(64, 1)\n', (8729, 8736), True, 'import torch.nn as nn\n'), ((8779, 8791), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (8789, 8791), True, 'import torch.nn as nn\n'), ((8828, 8884), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128)', '(64)'], {'kernel_size': '(3)', 'padding': '(1)', 'bias': '(False)'}), '(128, 64, kernel_size=3, padding=1, bias=False)\n', (8837, 8884), True, 'import torch.nn as nn\n'), ((8921, 8933), 'torch.nn.ELU', 'nn.ELU', (['(True)'], {}), '(True)\n', (8927, 8933), True, 'import torch.nn as nn\n'), ((8970, 9021), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', 'n_classes'], {'kernel_size': '(1)', 'bias': '(False)'}), '(64, n_classes, kernel_size=1, bias=False)\n', (8979, 9021), True, 'import torch.nn as nn\n'), ((11103, 11157), 'torch.nn.Conv2d', 'nn.Conv2d', (['(3)', '(64)'], {'kernel_size': '(3)', 'padding': '(1)', 'bias': '(False)'}), '(3, 64, kernel_size=3, padding=1, bias=False)\n', (11112, 11157), True, 'import torch.nn as nn\n'), ((11728, 11746), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)', '(2)'], {}), '(2, 2)\n', (11740, 11746), True, 'import torch.nn as nn\n'), ((11969, 12013), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(32)'], {'kernel_size': '(3)', 'padding': '(1)'}), '(256, 32, kernel_size=3, padding=1)\n', (11978, 12013), True, 'import torch.nn as nn\n'), ((12050, 12062), 'torch.nn.ELU', 'nn.ELU', (['(True)'], {}), '(True)\n', (12056, 12062), True, 'import torch.nn as nn\n'), ((12099, 12150), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', 'n_classes'], {'kernel_size': '(1)', 'bias': '(False)'}), '(32, n_classes, kernel_size=1, bias=False)\n', (12108, 12150), True, 'import torch.nn as nn\n'), ((6854, 6921), 'torch.nn.functional.upsample', 'F.upsample', (['d2'], {'scale_factor': '(2)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(d2, scale_factor=2, mode='bilinear', align_corners=True)\n", (6864, 6921), True, 'import torch.nn.functional as F\n'), ((6946, 7013), 'torch.nn.functional.upsample', 'F.upsample', (['d3'], {'scale_factor': '(4)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(d3, scale_factor=4, mode='bilinear', align_corners=True)\n", (6956, 7013), True, 'import torch.nn.functional as F\n'), ((7038, 7105), 'torch.nn.functional.upsample', 'F.upsample', (['d4'], {'scale_factor': '(8)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(d4, scale_factor=8, mode='bilinear', align_corners=True)\n", (7048, 7105), True, 'import torch.nn.functional as F\n'), ((7130, 7198), 'torch.nn.functional.upsample', 'F.upsample', (['d5'], {'scale_factor': '(16)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(d5, scale_factor=16, mode='bilinear', align_corners=True)\n", (7140, 7198), True, 'import torch.nn.functional as F\n'), ((9363, 9403), 'torch.nn.functional.adaptive_avg_pool2d', 'F.adaptive_avg_pool2d', (['e5'], {'output_size': '(1)'}), '(e5, output_size=1)\n', (9384, 9403), True, 'import torch.nn.functional as F\n'), ((9793, 9860), 'torch.nn.functional.upsample', 'F.upsample', (['d2'], {'scale_factor': '(2)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(d2, scale_factor=2, mode='bilinear', align_corners=True)\n", (9803, 9860), True, 'import torch.nn.functional as F\n'), ((9885, 9952), 'torch.nn.functional.upsample', 'F.upsample', (['d3'], {'scale_factor': '(4)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(d3, scale_factor=4, mode='bilinear', align_corners=True)\n", (9895, 9952), True, 'import torch.nn.functional as F\n'), ((9977, 10044), 'torch.nn.functional.upsample', 'F.upsample', (['d4'], {'scale_factor': '(8)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(d4, scale_factor=8, mode='bilinear', align_corners=True)\n", (9987, 10044), True, 'import torch.nn.functional as F\n'), ((10069, 10137), 'torch.nn.functional.upsample', 'F.upsample', (['d5'], {'scale_factor': '(16)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(d5, scale_factor=16, mode='bilinear', align_corners=True)\n", (10079, 10137), True, 'import torch.nn.functional as F\n'), ((12725, 12792), 'torch.nn.functional.upsample', 'F.upsample', (['d3'], {'scale_factor': '(2)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(d3, scale_factor=2, mode='bilinear', align_corners=True)\n", (12735, 12792), True, 'import torch.nn.functional as F\n'), ((12817, 12884), 'torch.nn.functional.upsample', 'F.upsample', (['d4'], {'scale_factor': '(4)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(d4, scale_factor=4, mode='bilinear', align_corners=True)\n", (12827, 12884), True, 'import torch.nn.functional as F\n'), ((12909, 12976), 'torch.nn.functional.upsample', 'F.upsample', (['d5'], {'scale_factor': '(8)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(d5, scale_factor=8, mode='bilinear', align_corners=True)\n", (12919, 12976), True, 'import torch.nn.functional as F\n')] |
from django.utils.translation import ugettext_lazy as _
SHIPPING_STANDARD = 'Standard'
SHIPPING_EXPEDITED = 'Expedited'
SHIPPING_PRIORITY = 'Priority'
SHIPPING_SPEED_CATEGORIES = (
(SHIPPING_STANDARD, _("Standard")),
(SHIPPING_EXPEDITED, _("Expedited")),
(SHIPPING_PRIORITY, _("Priority")),
)
METHOD_CONSUMER = 'Consumer'
METHOD_REMOVAL = 'Removal'
FULFILLMENT_METHODS = (
(METHOD_CONSUMER, _("Consumer")),
(METHOD_REMOVAL, _("Removal")),
)
FILL_OR_KILL = 'FillOrKill'
FILL_ALL = 'FillAll'
FILL_ALL_AVAILABLE = 'FillAllAvailable'
class MwsFulfillmentError(BaseException):
pass
| [
"django.utils.translation.ugettext_lazy"
] | [((208, 221), 'django.utils.translation.ugettext_lazy', '_', (['"""Standard"""'], {}), "('Standard')\n", (209, 221), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((249, 263), 'django.utils.translation.ugettext_lazy', '_', (['"""Expedited"""'], {}), "('Expedited')\n", (250, 263), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((290, 303), 'django.utils.translation.ugettext_lazy', '_', (['"""Priority"""'], {}), "('Priority')\n", (291, 303), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((412, 425), 'django.utils.translation.ugettext_lazy', '_', (['"""Consumer"""'], {}), "('Consumer')\n", (413, 425), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((449, 461), 'django.utils.translation.ugettext_lazy', '_', (['"""Removal"""'], {}), "('Removal')\n", (450, 461), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
import os
from tkinter import *
from tkinter import filedialog
global archivo
archivo= []
cadena2 = []
archivo = []
global cadena_inicial
cadena_inicial = []
global lista_numeros
lista_numeros = []
global intlist
intlist= []
global numeros1
numeros1=[]
global lista_buscar2
lista_buscar2 = []
lista_buscar_numero = []
global ordenar
ordenar=[]
global lista_ordenar
lista_ordenar=[]
global ordenar_final
ordenar_final=[]
global numeros_html_2
numeros_html_2=[]
global lista2
lista2=[]
global lista_ruta
lista_ruta=[]
global lista_buscar3
lista_buscar3=[]
global lista_numeros_anidados
lista_numeros_anidados=[]
global lista_numeros_anidados_buscar
lista_numeros_anidados_buscar=[]
global intlist4
intlist4=[]
global lista_numeros2_buscar
lista_numeros2_buscar=[]
def buscar_(lista,key):
lista2=[]
flag=False
for i in range(len(lista)):
if lista[i]==key:
flag=True
lista2.append(i)
if flag==True:
print("")
else:
return "No se encontro el número"
return lista2
def ordenamiento_burbuja(lista):
n=len(lista)
for i in range(n-1):
for j in range(0,n-i-1):
if lista[j]>lista[j+1]:
lista[j],lista[j+1]=lista[j+1],lista[j]
return lista
def inicio():
while True:
print("\n")
print("\t1) Cargar Archivo de Entrada")
print("\t2) Desplegar listas ordenadas")
print("\t3) Desplegar búsquedas")
print("\t4) Desplegar todas")
print("\t5) Desplegar todas a archivos")
print("\t6. Salir")
ruta=""
global file_path
op = int(input("\tEliga una opción\n"))
if op == 1:
root=Tk()
root.fileName=filedialog.askopenfilename()
lista_ruta.append(root.fileName)
archivo = open(lista_ruta[0], 'r')
print("\t Cargando...")
print("\t Se cargo con éxito")
if op == 2:
print("\n")
archivo = open(lista_ruta[0], 'r')
for i in archivo:
try:
cadena2 = i.split("=")
numeros = cadena2[1].split(" ")
numeros2 = re.split(r' ', numeros[1])
ordenar = re.split(r',BUSCAR', numeros[1])
buscar = re.split(r'ORDENAR,', numeros[1])
if ordenar == re.split(r',BUSCAR', numeros[1]) and buscar == re.split(r'ORDENAR,', numeros[1]):
ordenar = re.split(r',BUSCAR', numeros[1])
buscar = re.split(r'ORDENAR,', numeros[1])
#buscar_numero = re.split(r', ', numeros[2])
cadena_inicial = cadena2[0].split(",")
lista_numeros = numeros[0].split(",")
lista_numeros_ordenado=numeros[0].split(",")
lista_ordenar = ordenar[0].split(",")
# lista_buscar=buscar[1].split(",")
#lista_buscar_numero = buscar_numero[0].split(",")
buscar = re.split(r' ', numeros[1])
if buscar == re.split(r' ', numeros[1]):
buscar = re.split(r' ', numeros[1])
lista_buscar2 = re.split(r'ORDENAR,', numeros[1])
"""
"""
if "ORDENAR" in ordenar:
print(cadena_inicial[0], ":", lista_numeros, " | ", "Resultado de ordenar", ":",ordenamiento_burbuja(lista_numeros_ordenado),
" ,", ordenar[0])
if "ORDENAR\n" in ordenar:
print(cadena_inicial[0], ":", lista_numeros, " | ", "Resultado de ordenar", ":",ordenamiento_burbuja(lista_numeros_ordenado),
" ,", ordenar[0])
except:
print("")
if op == 3:
print("\n")
archivo = open(lista_ruta[0], 'r')
for i in archivo:
try:
cadena2 = i.split("=")
numeros = cadena2[1].split(" ")
numeros2 = re.split(r' ', numeros[1])
ordenar = re.split(r',BUSCAR', numeros[1])
buscar = re.split(r'ORDENAR,', numeros[1])
if ordenar == re.split(r',BUSCAR', numeros[1]) and buscar == re.split(r'ORDENAR,', numeros[1]):
ordenar = re.split(r',BUSCAR', numeros[1])
buscar = re.split(r'ORDENAR,', numeros[1])
buscar_numero = re.split(r' ', numeros[2])
cadena_inicial = cadena2[0].split(",")
lista_numeros = numeros[0].split(",")
lista_ordenar = ordenar[0].split(",")
#lista_buscar=buscar[1].split(",")
lista_buscar_numero = buscar_numero[0].split(" ")
buscar = re.split(r' ', numeros[1])
if buscar == re.split(r' ', numeros[1]):
buscar = re.split(r' ', numeros[1])
lista_buscar2 = re.split(r'ORDENAR,', numeros[1])
lista_buscarnum = lista_buscar_numero[0]
intlist = [int(x) for x in lista_numeros]
if "BUSCAR" in lista_buscar2:
print(cadena_inicial[0],":", lista_numeros, " | ", " valor buscado: ", lista_buscar_numero[0], " | ","encontrado: ", str(buscar_(intlist,int(lista_buscar_numero[0]))))
except:
print("")
if op==4:
print("\n")
archivo = open(lista_ruta[0], 'r')
for i in archivo:
try:
cadena2 = i.split("=")
numeros = cadena2[1].split(" ")
numeros2 = re.split(r' ', numeros[1])
ordenar = re.split(r',BUSCAR', numeros[1])
buscar = re.split(r'ORDENAR,', numeros[1])
if ordenar == re.split(r',BUSCAR', numeros[1]) and buscar == re.split(r'ORDENAR,', numeros[1]):
ordenar = re.split(r',BUSCAR', numeros[1])
buscar = re.split(r'ORDENAR,', numeros[1])
buscar_numero = re.split(r' ', numeros[2])
cadena_inicial = cadena2[0].split(",")
lista_numeros = numeros[0].split(",")
lista_ordenar = ordenar[0].split(",")
# lista_buscar=buscar[1].split(",")
lista_buscar_numero = buscar_numero[0].split(" ")
buscar = re.split(r' ', numeros[1])
if buscar == re.split(r' ', numeros[1]):
buscar = re.split(r' ', numeros[1])
lista_buscar2 = re.split(r'ORDENAR,', numeros[1])
lista_buscarnum = lista_buscar_numero[0]
intlist = [int(x) for x in lista_numeros]
if "ORDENAR" in ordenar:
print(cadena_inicial[0], ":", lista_numeros, " | ", "Resultado de ordenar", ":", sorted(lista_numeros),
" ,", ordenar[0])
if "ORDENAR\n" in ordenar:
print(cadena_inicial[0], ":", lista_numeros, " | ", "Resultado de ordenar", ":", sorted(lista_numeros),
" ,", ordenar[0])
if "BUSCAR" in lista_buscar2:
print(cadena_inicial[0], ":",lista_numeros," | ", " valor buscado: ", lista_buscar_numero[0], " | ","encontrado: ", str(buscar_(intlist,int(lista_buscar_numero[0]))))
except:
print("")
if op==5:
print("\n")
archivo = open(lista_ruta[0], 'r')
for i in archivo:
try:
cadena2 = i.split("=")
numeros = cadena2[1].split(" ")
numeros2 = re.split(r' ', numeros[1])
ordenar = re.split(r',BUSCAR', numeros[1])
buscar = re.split(r'ORDENAR,', numeros[1])
if ordenar == re.split(r',BUSCAR', numeros[1]) and buscar == re.split(r'ORDENAR,', numeros[1]):
ordenar = re.split(r',BUSCAR', numeros[1])
buscar = re.split(r'ORDENAR,', numeros[1])
#buscar_numero = re.split(r', ', numeros[2])
cadena_inicial = cadena2[0].split(",")
lista_numeros = numeros[0].split(",")
lista_numeros_ordenado = numeros[0].split(",")
lista_ordenar = ordenar[0].split(",")
# lista_buscar=buscar[1].split(",")
#lista_buscar_numero = buscar_numero[0].split(",")
lista_numeros_busqueda=numeros[0].split(",")
buscar = re.split(r' ', numeros[1])
if buscar == re.split(r' ', numeros[1]):
buscar = re.split(r' ', numeros[1])
lista_buscar2 = re.split(r'ORDENAR,', numeros[1])
if "BUSCAR" in lista_buscar2:
lista_buscar_numero = re.split(r' ', numeros[2])
lista_buscar_numero2 = lista_buscar_numero[0].split(",")
lista_buscarnum = lista_buscar_numero[0]
except:
print("")
for j in range(len(cadena_inicial)):
ordenar_final.append(cadena_inicial[0])
if "BUSCAR" in lista_buscar2:
for numeros_buscados in range(len(lista_buscar_numero)):
lista_numeros_anidados.append(lista_buscar_numero[0])
for numero_buscar_lista in range(len(lista_buscar_numero2)):
lista_numeros_anidados_buscar.append(lista_buscar_numero2[0])
intlist4 = [int(x) for x in lista_numeros_anidados]
lista_numeros2_buscar=[int(x) for x in lista_numeros_busqueda]
numeros1.append(lista_numeros)
numeros_html_2.append(lista_numeros_ordenado)
try:
if "ORDENAR\n" in ordenar or "ORDENAR" in ordenar:
print("Generando...")
print("Creando html")
file=open("reporte0.html","w")
file.write("<!DOCTYPE HTML>"+"\n")
file.write("<html>"+"\n")
file.write("<head>"+"\n")
file.write("<link rel=stylesheet href=style.css type=text/css>")
file.write("<style>"+"\n")
file.write("table, td, th {border: 1px solid black;}table {width: 100%;border-collapse: collapse;}")
file.write("</style>"+"\n")
file.write("</head>"+"\n")
file.write("<body>"+"\n")
file.write("<h2>Práctica 1 Lenguajes Formales y de Programación</h2>"+"\n")
file.write("<table id=tabla1>"+"\n")
file.write("<thead>"+"\n")
file.write("<tr>"+"\n")
file.write("<th>Lista Original</th>"+"\n")
file.write("<th>Lista Ordenada</th>"+"\n")
file.write("</tr>"+"\n")
file.write("</thead>"+"\n")
file.write("<tbody>"+"\n")
for original in range(len(ordenar_final)):
file.write("<tr>")
file.write("<td>"+str(ordenar_final[original])+" "+str(numeros1[original])+" ORDENAR "+"</td>")
file.write("<td>"+str(ordenar_final[original])+" Ordenado "+str(ordenamiento_burbuja(numeros_html_2[original])) +"</td>")
file.write("</tr>")
file.write("</tbody>"+"\n")
file.write("</table>"+"\n")
file.write("<br/>"+"\n")
if "BUSCAR" in lista_buscar2 or "BUSCAR\n" in lista_buscar2:
#Tabla Búsquedas
file.write("<table id=tabla2>"+"\n")
file.write("<thead>"+"\n")
file.write("<tr>"+"\n")
file.write("<th>Lista a buscar</th>"+"\n")
file.write("<th>Posición</th>"+"\n")
file.write("</tr>"+"\n")
file.write("</thead>"+"\n")
file.write("<tbody>"+"\n")
for bus in range(len(lista_numeros_anidados)):
file.write("<tr>")
file.write("<td>"+str(ordenar_final[bus])+"="+str(numeros1[bus])+" "+" BUSCAR "+str(lista_numeros_anidados[bus])+"</td>")
#file.write("<td>"+str(lista_numeros_busqueda[bus])+"</td>")
file.write("<td>"+str(buscar_(numeros1[bus],str(intlist4[bus])))+"</td>")
file.write("</tr>")
file.write("</tbody>"+"\n")
file.write("</table>"+"\n")
file.write("</body>"+"\n")
file.write("</html>"+"\n")
file.close()
except:
print("")
print("Se creo el reporte html correctamente")
os.startfile("reporte0.html")
if op==6:
print("\t 201901073")
print("\t <NAME>")
print("\t <EMAIL>")
print("\t Lenguajes Formales y de Programación")
exit()
iniciar=inicio()
| [
"os.startfile",
"tkinter.filedialog.askopenfilename"
] | [((1844, 1872), 'tkinter.filedialog.askopenfilename', 'filedialog.askopenfilename', ([], {}), '()\n', (1870, 1872), False, 'from tkinter import filedialog\n'), ((14254, 14283), 'os.startfile', 'os.startfile', (['"""reporte0.html"""'], {}), "('reporte0.html')\n", (14266, 14283), False, 'import os\n')] |
from pyexocross.hitran.hitran import HITRANLinelist
from pyexocross.pyexocross import PyExocross
from pyexocross.exomol.exomolbroads import ExomolBroadener
import numpy as np
from pyexocross.util import create_grid_res, convert_to_wavenumber
from pyexocross.writer.hdf5writer import HDF5Writer
import matplotlib.pyplot as plt
wngrid = 10000/create_grid_res(15000,1.1,2.0)[::-1,0]
#hl_h2o = HITRANLinelist('/Users/ahmed/Documents/molecular_data/HITRAN/H2O/H2O.par')
hl_h2o= HITRANLinelist('/Users/ahmed/Documents/molecular_data/HITRAN/CH4/CH4.par')
#hl = HITRANLinelist('/Users/ahmed/Documents/molecular_data/HITRAN/CO2/12C16O2.par')
h2_h2o = ExomolBroadener(0.0209,0.027,filename='/Users/ahmed/Documents/molecular_data/HITRAN/CH4/1H2-16O__H2.broad',species='H2')
he_h2o = ExomolBroadener(0.0042,0.20,filename='/Users/ahmed/Documents/molecular_data/HITRAN/CH4/1H2-16O__He.broad',species='He')
hl_h2o.add_broadener(h2_h2o,ratio=0.704)
hl_h2o.add_broadener(he_h2o,ratio=0.121)
hl_h2o.add_self_broadener(ratio=0.1)
# h2_ch4 = ExomolBroadener(0.0603,0.5,filename='/Users/ahmed/Documents/molecular_data/HITRAN/CH4/12C-1H4__H2.broad',species='H2')
# he_ch4 = ExomolBroadener(0.0382,0.30,filename='/Users/ahmed/Documents/molecular_data/HITRAN/CH4/12C-1H4__He.broad',species='He')
# hl_ch4.add_broadener(h2_ch4,ratio=0.83)
# hl_ch4.add_broadener(he_ch4,ratio=0.17)
pyexo_h2o = PyExocross(hl_h2o)
#pyexo_ch4 = PyExocross(hl_ch4)
t = 200
p = 1.0
if __name__ == "__main__":
wn_h2o_self,xsec_h2o_self = pyexo_h2o.compute_xsec_parallel(wngrid,t,p, chunksize=1000, threshold=0.0, wing_cutoff=25.0,max_workers=2)
hl_h2o.set_broadener_ratio('self',ratio=1e-10)
wn_h2o,xsec_h2o = pyexo_h2o.compute_xsec_parallel(wngrid,t,p, chunksize=1000, threshold=0.0, wing_cutoff=25.0,max_workers=2)
#wn_ch4,xsec_ch4 = pyexo_ch4.compute_xsec(wngrid,t,p, chunksize=100, threshold=0.0, wing_cutoff=25.0)
plt.figure()
# plt.plot(wn,xsec,label='pyexo')
plt.plot(wn_h2o_self,xsec_h2o_self,label='H2O self')
plt.plot(wn_h2o,xsec_h2o,label='H2O')
#plt.plot(10000/wn_ch4,xsec_ch4,label='CH4')
plt.xlabel(r'Wavelength um')
plt.ylabel(r'Cross-section cm$^{2}$/molecule')
plt.yscale('log')
plt.legend()
plt.show()
| [
"matplotlib.pyplot.show",
"pyexocross.exomol.exomolbroads.ExomolBroadener",
"pyexocross.hitran.hitran.HITRANLinelist",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.figure",
"pyexocross.util.create_grid_res",
"matplotlib.pyplot.yscale",
"matpl... | [((475, 549), 'pyexocross.hitran.hitran.HITRANLinelist', 'HITRANLinelist', (['"""/Users/ahmed/Documents/molecular_data/HITRAN/CH4/CH4.par"""'], {}), "('/Users/ahmed/Documents/molecular_data/HITRAN/CH4/CH4.par')\n", (489, 549), False, 'from pyexocross.hitran.hitran import HITRANLinelist\n'), ((644, 776), 'pyexocross.exomol.exomolbroads.ExomolBroadener', 'ExomolBroadener', (['(0.0209)', '(0.027)'], {'filename': '"""/Users/ahmed/Documents/molecular_data/HITRAN/CH4/1H2-16O__H2.broad"""', 'species': '"""H2"""'}), "(0.0209, 0.027, filename=\n '/Users/ahmed/Documents/molecular_data/HITRAN/CH4/1H2-16O__H2.broad',\n species='H2')\n", (659, 776), False, 'from pyexocross.exomol.exomolbroads import ExomolBroadener\n'), ((774, 904), 'pyexocross.exomol.exomolbroads.ExomolBroadener', 'ExomolBroadener', (['(0.0042)', '(0.2)'], {'filename': '"""/Users/ahmed/Documents/molecular_data/HITRAN/CH4/1H2-16O__He.broad"""', 'species': '"""He"""'}), "(0.0042, 0.2, filename=\n '/Users/ahmed/Documents/molecular_data/HITRAN/CH4/1H2-16O__He.broad',\n species='He')\n", (789, 904), False, 'from pyexocross.exomol.exomolbroads import ExomolBroadener\n'), ((1374, 1392), 'pyexocross.pyexocross.PyExocross', 'PyExocross', (['hl_h2o'], {}), '(hl_h2o)\n', (1384, 1392), False, 'from pyexocross.pyexocross import PyExocross\n'), ((1899, 1911), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1909, 1911), True, 'import matplotlib.pyplot as plt\n'), ((1954, 2008), 'matplotlib.pyplot.plot', 'plt.plot', (['wn_h2o_self', 'xsec_h2o_self'], {'label': '"""H2O self"""'}), "(wn_h2o_self, xsec_h2o_self, label='H2O self')\n", (1962, 2008), True, 'import matplotlib.pyplot as plt\n'), ((2011, 2050), 'matplotlib.pyplot.plot', 'plt.plot', (['wn_h2o', 'xsec_h2o'], {'label': '"""H2O"""'}), "(wn_h2o, xsec_h2o, label='H2O')\n", (2019, 2050), True, 'import matplotlib.pyplot as plt\n'), ((2102, 2129), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Wavelength um"""'], {}), "('Wavelength um')\n", (2112, 2129), True, 'import matplotlib.pyplot as plt\n'), ((2135, 2180), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Cross-section cm$^{2}$/molecule"""'], {}), "('Cross-section cm$^{2}$/molecule')\n", (2145, 2180), True, 'import matplotlib.pyplot as plt\n'), ((2186, 2203), 'matplotlib.pyplot.yscale', 'plt.yscale', (['"""log"""'], {}), "('log')\n", (2196, 2203), True, 'import matplotlib.pyplot as plt\n'), ((2208, 2220), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2218, 2220), True, 'import matplotlib.pyplot as plt\n'), ((2225, 2235), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2233, 2235), True, 'import matplotlib.pyplot as plt\n'), ((342, 374), 'pyexocross.util.create_grid_res', 'create_grid_res', (['(15000)', '(1.1)', '(2.0)'], {}), '(15000, 1.1, 2.0)\n', (357, 374), False, 'from pyexocross.util import create_grid_res, convert_to_wavenumber\n')] |
from bigflow.workflow import Workflow, hourly_start_time
from datetime import datetime
from datetime import timedelta
class HourlyJob:
def __init__(self):
self.id = 'hourly_job'
def run(self, runtime):
print(f'I should process data with timestamps from: {runtime} '
f'to {datetime.strptime(runtime, "%Y-%m-%d %H:%M:%S") + timedelta(minutes=59, seconds=59)}')
hourly_workflow = Workflow(
workflow_id='hourly_workflow',
schedule_interval='@hourly',
start_time_factory=hourly_start_time,
definition=[HourlyJob()])
if __name__ == '__main__':
hourly_workflow.run('2020-01-01 00:00:00')
| [
"datetime.datetime.strptime",
"datetime.timedelta"
] | [((314, 361), 'datetime.datetime.strptime', 'datetime.strptime', (['runtime', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(runtime, '%Y-%m-%d %H:%M:%S')\n", (331, 361), False, 'from datetime import datetime\n'), ((364, 397), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(59)', 'seconds': '(59)'}), '(minutes=59, seconds=59)\n', (373, 397), False, 'from datetime import timedelta\n')] |
#!/usr/local/bin/env python3.7
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# The MIT License (MIT)
# Copyright (c) 2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
###############################################################################
""" This is the main module which execute the program """
import indicators.regression.linear_regression as lr
import indicators.regression.mann_kendall as mk
import charting as cht
import pandas as pd
from optimize_ import Optimize
from manip_data import ManipData as md
class Main(Optimize):
def __init__(self):
super().__init__()
super().__call__()
self.cht_ = cht.Charting(self.series, self.date_name,
self.default_data, **self.indicator)
def chart_signal(self):
"""Marks signal on chart (no entry, only when the indicators trigger a signal)"""
self.cht_.chart_rsquare(list(self.indicator.keys())[0],r_square_level=self.r_square_level)
def chart_trigger(self):
"""Marks entry and exit level on chart"""
mark_up = md.pd_tolist(self.trades_track, self.entry_row)
mark_down = md.pd_tolist(self.trades_track, self.exit_row)
marks_ = {'marker_entry': {self.marker_: '^', self.color_mark: 'g', self.marker_signal: mark_up},
'marker_exit': {self.marker_: 'v', self.color_mark: 'r', self.marker_signal: mark_down}}
self.cht_.chart_marker(self.marker_signal, self.marker_, self.color_mark,**marks_)
if __name__ == '__main__':
main_ = Main()
#main_.chart_signal()
main_.chart_trigger()
t= 5 | [
"charting.Charting",
"manip_data.ManipData.pd_tolist"
] | [((1759, 1837), 'charting.Charting', 'cht.Charting', (['self.series', 'self.date_name', 'self.default_data'], {}), '(self.series, self.date_name, self.default_data, **self.indicator)\n', (1771, 1837), True, 'import charting as cht\n'), ((2188, 2235), 'manip_data.ManipData.pd_tolist', 'md.pd_tolist', (['self.trades_track', 'self.entry_row'], {}), '(self.trades_track, self.entry_row)\n', (2200, 2235), True, 'from manip_data import ManipData as md\n'), ((2256, 2302), 'manip_data.ManipData.pd_tolist', 'md.pd_tolist', (['self.trades_track', 'self.exit_row'], {}), '(self.trades_track, self.exit_row)\n', (2268, 2302), True, 'from manip_data import ManipData as md\n')] |
# Generated by Django 3.1.2 on 2021-03-14 11:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cart', '0022_delete_sendpassword'),
]
operations = [
migrations.AlterField(
model_name='orderbeta',
name='finish_price',
field=models.PositiveIntegerField(default=0),
),
]
| [
"django.db.models.PositiveIntegerField"
] | [((343, 381), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(0)'}), '(default=0)\n', (370, 381), False, 'from django.db import migrations, models\n')] |
import logging.config
import os
import yaml
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
def read_logging_config():
with open(os.path.join(__location__, 'logging.cfg'), 'r') as stream:
try:
logging_config = yaml.safe_load(stream)
logging.config.dictConfig(logging_config)
# do not log azure info messages
logging.getLogger(
'azure.core.pipeline.policies.http_logging_policy').setLevel(logging.WARNING)
return logging
except yaml.YAMLError as exc:
print(exc)
except Exception as ex:
print(ex)
return None
def read_azure_config():
with open(os.path.join(__location__, 'azure.cfg'), 'r') as stream:
try:
azure_config = yaml.safe_load(stream)
return \
azure_config['az_storage_connection_str'],\
azure_config['az_storage_blob_sas_url'],\
azure_config['az_storage_blob_sas_token']
except yaml.YAMLError as exc:
print(exc)
except Exception as ex:
print(ex)
return None, None, None
| [
"os.path.dirname",
"os.path.join",
"yaml.safe_load",
"os.getcwd"
] | [((96, 107), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (105, 107), False, 'import os\n'), ((109, 134), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (124, 134), False, 'import os\n'), ((180, 221), 'os.path.join', 'os.path.join', (['__location__', '"""logging.cfg"""'], {}), "(__location__, 'logging.cfg')\n", (192, 221), False, 'import os\n'), ((281, 303), 'yaml.safe_load', 'yaml.safe_load', (['stream'], {}), '(stream)\n', (295, 303), False, 'import yaml\n'), ((731, 770), 'os.path.join', 'os.path.join', (['__location__', '"""azure.cfg"""'], {}), "(__location__, 'azure.cfg')\n", (743, 770), False, 'import os\n'), ((828, 850), 'yaml.safe_load', 'yaml.safe_load', (['stream'], {}), '(stream)\n', (842, 850), False, 'import yaml\n')] |
import requests
import logging
from bbot.core import ChatbotEngine, BBotException, BBotCore, BBotExtensionException
from engines.dotflow2.chatbot_engine import DotFlow2LoggerAdapter
class DotFlow2MSCSSentimentAnalysis():
"""ChatScript DotFlow2 function"""
def __init__(self, config: dict, dotbot: dict) -> None:
"""
Initialize class
"""
self.config = config
self.dotbot = dotbot
self.bot = None
self.logger = None
self.azure_location = ''
self.azure_subscription_key = ''
self.logger_level = ''
def init(self, bot: ChatbotEngine):
"""
Initialize extension
:param bot:
:return:
"""
self.bot = bot
self.logger = DotFlow2LoggerAdapter(logging.getLogger('df2_ext.ssent_an'), self, self.bot, '$simpleSentimentAnalysis')
bot.register_dotflow2_function('simpleSentimentAnalysis', {'object': self, 'method': 'df2_simpleSentimentAnalysis', 'cost': 0.5, 'register_enabled': True})
def df2_simpleSentimentAnalysis(self, args, f_type):
"""
Detects sentiment analysis using Microsoft Cognitive Services
:param args:
:param f_type:
:return:
"""
try:
input_text = self.bot.resolve_arg(args[0], f_type)
except IndexError:
input_text = self.bot.call_dotflow2_function('input', [], 'R') # optional. default input()
headers = {
# Request headers
'Content-Type': 'application/json',
'Ocp-Apim-Subscription-Key': self.azure_subscription_key,
}
payload = {
"documents": [
{
"language": "en",
"id": "1",
"text": input_text
}
]
}
self.logger.debug('Requesting sentiment analysis score to Microsoft Cognitive Services...')
r = requests.post(
f'https://{self.azure_location}.api.cognitive.microsoft.com/text/analytics/v2.0/sentiment',
json=payload, headers=headers)
response = r.json()
self.logger.debug('Returned response: ' + str(response))
if 'error' in response:
self.logger.critical(response['error']['message'])
raise BBotExtensionException(response['error']['message'], BBotCore.FNC_RESPONSE_ERROR)
score = response['documents'][0]['score']
return score
| [
"logging.getLogger",
"bbot.core.BBotExtensionException",
"requests.post"
] | [((1984, 2130), 'requests.post', 'requests.post', (['f"""https://{self.azure_location}.api.cognitive.microsoft.com/text/analytics/v2.0/sentiment"""'], {'json': 'payload', 'headers': 'headers'}), "(\n f'https://{self.azure_location}.api.cognitive.microsoft.com/text/analytics/v2.0/sentiment'\n , json=payload, headers=headers)\n", (1997, 2130), False, 'import requests\n'), ((796, 833), 'logging.getLogger', 'logging.getLogger', (['"""df2_ext.ssent_an"""'], {}), "('df2_ext.ssent_an')\n", (813, 833), False, 'import logging\n'), ((2357, 2443), 'bbot.core.BBotExtensionException', 'BBotExtensionException', (["response['error']['message']", 'BBotCore.FNC_RESPONSE_ERROR'], {}), "(response['error']['message'], BBotCore.\n FNC_RESPONSE_ERROR)\n", (2379, 2443), False, 'from bbot.core import ChatbotEngine, BBotException, BBotCore, BBotExtensionException\n')] |
import os
import pandas as pd
from poor_trader import config
from poor_trader import trading
from poor_trader import systems
class CombinedIndicators(trading.TradingSystem):
def __init__(self, portfolio, systems_method_list, name='CombinedIndicators'):
super(CombinedIndicators, self).__init__(name=name)
self.portfolio = portfolio
self.market = self.portfolio.market
self.systems_method_list = systems_method_list
self.fpath = config.TRADING_SYSTEMS_PATH / '{}.pkl'.format(self.name)
self.df_indicators = pd.DataFrame()
self.init_indicators()
def init_indicators(self):
if os.path.exists(self.fpath):
self.df_indicators = pd.read_pickle(self.fpath)
else:
symbols = self.market.symbols
df_group_quotes = self.market.historical_data
df = pd.DataFrame()
for fname, df_positions in self.systems_method_list:
df_positions.columns = ['{}_{}'.format(col, fname) for col in df_positions.columns]
df = df.join(df_positions, how='outer')
self.df_indicators = df.copy()
self.df_indicators.to_pickle(self.fpath)
def get_indicators(self, trading_period, symbol, direction):
df = self.df_indicators.filter(regex='^{}_'.format(symbol))
df.columns = [col.replace('{}_'.format(symbol), '') for col in df.columns]
positions = df.loc[:trading_period].dropna().shift(1).iloc[-1]
df = pd.DataFrame()
df['Position'] = positions
direction_str = 'LONG' if direction == trading.Direction.LONG else 'SHORT'
return df[df['Position'] == direction_str]
def get_indicator_name(self, trading_period, symbol, direction):
return '_'.join(self.get_indicators(trading_period, symbol, direction).index.values)
def get_close_indicator_name(self, trading_period, symbol, open_direction):
close_direction = trading.Direction.LONG if open_direction == trading.Direction.SHORT else trading.Direction.SHORT
return self.get_indicator_name(trading_period, symbol, close_direction)
def is_long(self, trading_period, symbol):
open_position = self.portfolio.get_open_position(symbol)
if open_position.empty:
return len(self.get_indicators(trading_period, symbol, trading.Direction.LONG).index.values) > 0
return False
def is_short(self, trading_period, symbol):
open_position = self.portfolio.get_open_position(symbol)
if open_position.empty:
return len(self.get_indicators(trading_period, symbol, trading.Direction.SHORT).index.values) > 0
return False
def is_close(self, trading_period, symbol, open_trades):
short_indicators = self.get_indicator_name(trading_period, symbol, trading.Direction.SHORT)
if len(open_trades.index.values) > 1:
print(open_trades)
raise NotImplementedError
for index in open_trades.index.values:
open_indicators = open_trades.loc[index]['Indicator'].split('_')
close_indicators = short_indicators.split('_')
remaining_indicators = [_ for _ in open_indicators if _ not in close_indicators]
return len(remaining_indicators) <= 0
class Turtle(CombinedIndicators):
def __init__(self, portfolio, name='Turtle'):
symbols = portfolio.market.symbols
df_group_quotes = portfolio.df_group_quotes
super(Turtle, self).__init__(portfolio,
[systems.run_atr_channel_breakout(symbols, df_group_quotes),
systems.run_dcsma(symbols, df_group_quotes),
systems.run_slsma(symbols, df_group_quotes)],
name=name)
| [
"pandas.read_pickle",
"os.path.exists",
"poor_trader.systems.run_slsma",
"poor_trader.systems.run_dcsma",
"poor_trader.systems.run_atr_channel_breakout",
"pandas.DataFrame"
] | [((561, 575), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (573, 575), True, 'import pandas as pd\n'), ((650, 676), 'os.path.exists', 'os.path.exists', (['self.fpath'], {}), '(self.fpath)\n', (664, 676), False, 'import os\n'), ((1502, 1516), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (1514, 1516), True, 'import pandas as pd\n'), ((711, 737), 'pandas.read_pickle', 'pd.read_pickle', (['self.fpath'], {}), '(self.fpath)\n', (725, 737), True, 'import pandas as pd\n'), ((869, 883), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (881, 883), True, 'import pandas as pd\n'), ((3555, 3613), 'poor_trader.systems.run_atr_channel_breakout', 'systems.run_atr_channel_breakout', (['symbols', 'df_group_quotes'], {}), '(symbols, df_group_quotes)\n', (3587, 3613), False, 'from poor_trader import systems\n'), ((3653, 3696), 'poor_trader.systems.run_dcsma', 'systems.run_dcsma', (['symbols', 'df_group_quotes'], {}), '(symbols, df_group_quotes)\n', (3670, 3696), False, 'from poor_trader import systems\n'), ((3736, 3779), 'poor_trader.systems.run_slsma', 'systems.run_slsma', (['symbols', 'df_group_quotes'], {}), '(symbols, df_group_quotes)\n', (3753, 3779), False, 'from poor_trader import systems\n')] |
# The MIT License (MIT)
#
# Copyright (c) 2018 PyBER
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import os
from data.refs.readers.aff3ct_trace_reader import aff3ctTraceReader
import subprocess
import time
import lib.pyqtgraph.pyqtgraph as pg
from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets
from lib.pyqtgraph.pyqtgraph.dockarea import *
import numpy as np
class AdvTreeView(QtGui.QTreeView):
wBER = []
wFER = []
wBEFE = []
wThr = []
wDeta = []
fsWatcher = []
lBER = []
lFER = []
lBEFE = []
lThr = []
NoiseTypeIdx = []
Curves = []
dataBEFE = []
dataName = []
# 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15, 16
colors = [0, 1, 2, 4, 5, 6, 7, 8, 9, 10, 12, 13, 14, 15, 16, 17]
lastNoise = []
paths = []
styles = [QtCore.Qt.SolidLine, QtCore.Qt.DashLine, QtCore.Qt.DotLine, QtCore.Qt.DashDotLine, QtCore.Qt.DashDotDotLine]
dashPatterns = [[1, 3, 4, 3], [2, 3, 4, 3], [1, 3, 1, 3], [4, 3, 4, 3], [3, 3, 2, 3], [4, 3, 1, 3]]
NoiseType = ["ebn0", "esn0", "mi", "rop", "ep" ]
NoiseTypeLabel = ["Eb/N0 (dB)", "Es/N0 (dB)", "Mutual Info", "Received Optical Power (dB)", "Event Probability"]
BERLegendPosition = ["BottomLeft", "BottomLeft", "BottomLeft", "BottomLeft", "BottomRight" ]
FERLegendPosition = ["BottomLeft", "BottomLeft", "BottomLeft", "BottomLeft", "BottomRight" ]
BEFELegendPosition = ["TopRight", "TopRight", "TopRight", "TopRight", "BottomRight" ]
ThrLegendPosition = ["BottomRight", "BottomRight", "BottomRight", "BottomRight", "BottomRight" ]
def __init__(self, wBER, wFER, wBEFE, wThr, wDeta):
super().__init__()
self.wBER = wBER
self.wFER = wFER
self.wBEFE = wBEFE
self.wThr = wThr
self.wDeta = wDeta
# create a legend on the plots
self.lBER = self.wBER .addLegend()
self.lFER = self.wFER .addLegend()
self.lBEFE = self.wBEFE.addLegend()
self.lThr = self.wThr .addLegend()
self.NoiseTypeIdx = 0
self.NoiseSelectedByUser = False
self.refreshing_time = time.time()
self.hideLegend()
self.doubleClicked.connect(self.openFileOrDir)
self.fsWatcher = QtCore.QFileSystemWatcher()
self.fsWatcher.fileChanged.connect(self.updateDataAndCurve)
def switchNoiseType(self):
self.NoiseTypeIdx += 1
if self.NoiseTypeIdx == len(self.NoiseType):
self.NoiseTypeIdx = 0
self.refresh()
self.setLabel()
self.NoiseSelectedByUser = True
def switchNoiseTypeRevert(self):
if self.NoiseTypeIdx == 0:
self.NoiseTypeIdx = len(self.NoiseType) -1
else:
self.NoiseTypeIdx -= 1
self.refresh()
self.setLabel()
self.NoiseSelectedByUser = True
def setLabel(self):
newLabel = self.NoiseTypeLabel[self.NoiseTypeIdx]
self.wBER .setLabel('bottom', newLabel)
self.wFER .setLabel('bottom', newLabel)
self.wBEFE.setLabel('bottom', newLabel)
self.wThr .setLabel('bottom', newLabel)
if len(self.paths):
self.showLegend()
else:
self.hideLegend()
def refresh(self):
for name in self.dataName:
self.removeLegendItem(name)
self.Curves = [[] for x in range(len(self.paths))]
self.dataBEFE = [[] for x in range(len(self.paths))]
self.dataName = [[] for x in range(len(self.paths))]
for path in self.paths:
self.updateData(path)
self.updateCurves ()
self.updateDetails()
def switchFileFilter(self):
self.model().setNameFilterDisables(not self.model().nameFilterDisables())
def openFileOrDir(self, *args):
paths = [ self.model().filePath(index) for index in args ]
if len(paths):
if sys.platform == "linux" or sys.platform == "linux2":
subprocess.call(["xdg-open", paths[0]])
elif sys.platform == "darwin":
subprocess.call(["open", paths[0]])
else:
os.startfile(paths[0])
def hideLegend(self):
# hide the legend
if self.lBER: self.lBER = self.setLegendPosition(self.lBER, "Hide")
if self.lFER: self.lFER = self.setLegendPosition(self.lFER, "Hide")
if self.lBEFE: self.lBEFE = self.setLegendPosition(self.lBEFE, "Hide")
if self.lThr: self.lThr = self.setLegendPosition(self.lThr, "Hide")
def setLegendPosition(self, legend, pos):
if pos == "BottomLeft":
legend.anchor(itemPos=(0,1), parentPos=(0,1), offset=( 10,-10))
elif pos == "BottomRight":
legend.anchor(itemPos=(1,1), parentPos=(1,1), offset=(-10,-10))
elif pos == "TopRight":
legend.anchor(itemPos=(1,0), parentPos=(1,0), offset=(-10, 10))
elif pos == "TopLeft":
legend.anchor(itemPos=(0,0), parentPos=(0,0), offset=( 10, 10))
elif pos == "Hide":
legend.anchor(itemPos=(1,0), parentPos=(1,0), offset=(100, 100))
return legend
def showLegend(self):
# display the legend
if self.lBER: self.lBER = self.setLegendPosition(self.lBER, self.BERLegendPosition [self.NoiseTypeIdx])
if self.lFER: self.lFER = self.setLegendPosition(self.lFER, self.FERLegendPosition [self.NoiseTypeIdx])
if self.lBEFE: self.lBEFE = self.setLegendPosition(self.lBEFE, self.BEFELegendPosition[self.NoiseTypeIdx])
if self.lThr: self.lThr = self.setLegendPosition(self.lThr, self.ThrLegendPosition [self.NoiseTypeIdx])
def removeLegendItem(self, name):
if self.lBER: self.lBER .removeItem(name)
if self.lFER: self.lFER .removeItem(name)
if self.lBEFE: self.lBEFE.removeItem(name)
if self.lThr: self.lThr .removeItem(name)
def getPathId(self, path):
if path in self.paths:
curId = 0
for p in self.paths:
if p == path:
return curId
else:
curId = curId +1
return -1
else:
return -1
def updateData(self, path):
pathId = self.getPathId(path)
if pathId == -1:
return
self.Curves [pathId] = aff3ctTraceReader(path)
self.dataBEFE[pathId] = [b/f for b,f in zip(self.Curves[pathId].getTrace("n_be"), self.Curves[pathId].getTrace("n_fe"))]
dataName = self.Curves[pathId].getMetadata("title")
if not dataName:
self.dataName[pathId] = "Curve " + str(pathId)
elif dataName in self.dataName:
self.dataName[pathId] = dataName + "_" + str(pathId)
else:
self.dataName[pathId] = dataName
if not self.Curves[pathId].legendKeyAvailable(self.NoiseType[self.NoiseTypeIdx]):
self.dataName[pathId] = "**" + self.dataName[pathId] + "**"
def updateCurves(self):
self.wBER .clearPlots()
self.wFER .clearPlots()
self.wBEFE.clearPlots()
self.wThr .clearPlots()
# plot the curves
for pathId in range(len(self.paths)):
icolor = self.colors[pathId % len(self.colors)]
pen = pg.mkPen(color=(icolor,8), width=2, style=QtCore.Qt.CustomDashLine)
pen.setDashPattern(self.dashPatterns[pathId % len(self.dashPatterns)])
self.removeLegendItem(self.dataName[pathId])
noiseKey = self.NoiseType[self.NoiseTypeIdx]
if self.Curves[pathId].legendKeyAvailable(noiseKey):
self.wBER. plot(x=self.Curves[pathId].getTrace(noiseKey), y=self.Curves[pathId].getTrace("be_rate"), pen=pen, symbol='x', name=self.dataName[pathId])
self.wFER. plot(x=self.Curves[pathId].getTrace(noiseKey), y=self.Curves[pathId].getTrace("fe_rate"), pen=pen, symbol='x', name=self.dataName[pathId])
self.wBEFE.plot(x=self.Curves[pathId].getTrace(noiseKey), y=self.dataBEFE[pathId], pen=pen, symbol='x', name=self.dataName[pathId])
self.wThr. plot(x=self.Curves[pathId].getTrace(noiseKey), y=self.Curves[pathId].getTrace("sim_thr"), pen=pen, symbol='x', name=self.dataName[pathId])
else:
self.wBER. plot(x=[], y=[], pen=pen, symbol='x', name=self.dataName[pathId])
self.wFER. plot(x=[], y=[], pen=pen, symbol='x', name=self.dataName[pathId])
self.wBEFE.plot(x=[], y=[], pen=pen, symbol='x', name=self.dataName[pathId])
self.wThr. plot(x=[], y=[], pen=pen, symbol='x', name=self.dataName[pathId])
def updateDataAndCurve(self, path):
if (self.refreshing_time + 0.1) < time.time(): # timer to not freeze because of several refreshes asked at the same time
self.refresh()
self.refreshing_time = time.time()
def updateDetails(self):
self.wDeta.clear()
for pathId in range(len(self.paths)):
icolor = self.colors[pathId % len(self.colors)]
path = self.paths[pathId]
# for filename in self.paths:
pen = pg.mkPen(color=(icolor,8), width=2, style=QtCore.Qt.CustomDashLine)
pen.setDashPattern(self.dashPatterns[pathId % len(self.dashPatterns)])
legendArea = DockArea()
dInfo = Dock("", size=(250,900))
legendArea.addDock(dInfo, 'bottom')
firstTitle = True;
layoutLegend = QtGui.QFormLayout()
for entry in self.Curves[pathId].SimuHeader:
if len(entry) == 3 and entry[1]:
if entry[2] == 1:
if not firstTitle:
line = QtGui.QFrame()
line.setFrameShape(QtGui.QFrame.HLine)
line.setFrameShadow(QtGui.QFrame.Sunken)
layoutLegend.addRow(line)
firstTitle = False
layoutLegend.addRow("<h3><u>" + entry[0] + "<u></h3>", QtGui.QLabel(""))
elif entry[2] == 2:
layoutLegend.addRow("<b><u>" + entry[0] + ":<u></b>", QtGui.QLabel(""))
elif entry[2] == 3:
layoutLegend.addRow("<b>" + entry[0] + "</b>: ", QtGui.QLabel(entry[1]))
# Add an horizontal line to seperate
line = QtGui.QFrame()
line.setFrameShape(QtGui.QFrame.HLine)
line.setFrameShadow(QtGui.QFrame.Plain)
layoutLegend.addRow(line)
layoutLegend.addRow("<h3><u>Metadata<u></h3>", QtGui.QLabel(""))
for entry in self.Curves[pathId].Metadata:
if entry == "doi":
url = QtGui.QLineEdit("https://doi.org/" + self.Curves[pathId].Metadata[entry])
url.setReadOnly(True)
layoutLegend.addRow("<b>" + entry + "</b>: ", url)
# if entry == "url":
# url = QtGui.QLabel(str(self.Curves[pathId].Metadata[entry]))
# url.setOpenExternalLinks(True)
# url.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse | QtCore.Qt.TextSelectableByMouse)
# layoutLegend.addRow("<b>" + entry + "</b>: ", url)
# elif entry == "filename":
# url = QtGui.QLabel(str(self.Curves[pathId].Metadata[entry]))
# url.setOpenInternalLinks(True)
# url.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse | QtCore.Qt.TextSelectableByMouse)
# layoutLegend.addRow("<b>" + entry + "</b>: ", url)
else:
lineEdit = QtGui.QLineEdit(self.Curves[pathId].Metadata[entry])
lineEdit.setReadOnly(True)
layoutLegend.addRow("<b>" + entry + "</b>: ", lineEdit)
wCur = QtGui.QWidget()
wCur.setLayout(layoutLegend)
sCur = QtGui.QScrollArea()
sCur.setWidget(wCur)
sCur.setWidgetResizable(True)
dInfo.addWidget(sCur)
self.wDeta.addTab(legendArea, self.dataName[pathId])
def selectionChanged(self, selected, deselected):
super().selectionChanged(selected, deselected)
newPaths = [ self.model().filePath(index) for index in self.selectedIndexes()
if not self.model().isDir(index)] # TODO: remove this restriction
pathsToRemove = []
for p in self.paths:
if p not in newPaths:
pathsToRemove.append(p)
for p in pathsToRemove:
pId = self.getPathId(p)
self.paths.pop(pId)
pathsToAdd = []
for p in newPaths:
if p not in self.paths:
pathsToAdd.append(p)
for p in pathsToAdd:
self.paths.append(p)
if len(pathsToRemove) > 0:
self.fsWatcher.removePaths(pathsToRemove)
if len(pathsToAdd) > 0:
self.fsWatcher.addPaths(pathsToAdd)
self.refresh ()
self.setLabel()
if not self.NoiseSelectedByUser:
self.autoSelectNoise()
def autoSelectNoise(self):
save = self.NoiseTypeIdx
found = False
for i in range(len(self.NoiseType)):
self.NoiseTypeIdx = i
self.refresh()
noiseKey = self.NoiseType[self.NoiseTypeIdx]
for t in self.Curves:
if t.legendKeyAvailable(noiseKey):
found = True
break;
if found:
self.setLabel()
break;
if not found:
self.NoiseTypeIdx = save
self.refresh ()
self.setLabel()
self.NoiseSelectedByUser = False
def selectFolder(self):
options = QtWidgets.QFileDialog.Options()
# options |= QtWidgets.QFileDialog.DontUseNativeDialog
# options |= QtGui.QFileDialog.ShowDirsOnly
dirPath = QtWidgets.QFileDialog.getExistingDirectory(self, "Open a folder", "", options=options)
if dirPath:
oldModel = self.model()
model = createFileSystemModel(dirPath)
self.setModel(model)
self.setRootIndex(model.index(dirPath, 0))
del oldModel
def createFileSystemModel(dirPath):
model = QtGui.QFileSystemModel()
model.setReadOnly(True)
model.setRootPath(dirPath)
model.setFilter(QtCore.QDir.NoDotAndDotDot | QtCore.QDir.AllDirs | QtCore.QDir.AllEntries | QtCore.QDir.Files)
model.setNameFilters(['*.perf', '*.dat', '*.txt', '*.data'])
model.setNameFilterDisables(False)
return model
def generatePannel(wBER, wFER, wBEFE, wThr, wDeta):
if len(sys.argv) >= 2:
os.chdir(sys.argv[1])
else:
os.chdir("./data/")
model = createFileSystemModel(QtCore.QDir.currentPath())
view = AdvTreeView(wBER, wFER, wBEFE, wThr, wDeta)
view.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
view.setModel(model)
view.hideColumn(1);
view.hideColumn(2);
view.hideColumn(3);
view.setColumnWidth(30, 1)
view.setRootIndex(model.index(QtCore.QDir.currentPath(), 0))
view.setAnimated(True)
view.setIconSize(QtCore.QSize(24,24))
view.setExpandsOnDoubleClick(False);
return view
| [
"lib.pyqtgraph.pyqtgraph.Qt.QtGui.QFrame",
"lib.pyqtgraph.pyqtgraph.Qt.QtGui.QFormLayout",
"lib.pyqtgraph.pyqtgraph.Qt.QtGui.QLineEdit",
"os.startfile",
"lib.pyqtgraph.pyqtgraph.Qt.QtGui.QScrollArea",
"lib.pyqtgraph.pyqtgraph.Qt.QtGui.QFileSystemModel",
"lib.pyqtgraph.pyqtgraph.Qt.QtGui.QWidget",
"os.... | [((13455, 13479), 'lib.pyqtgraph.pyqtgraph.Qt.QtGui.QFileSystemModel', 'QtGui.QFileSystemModel', ([], {}), '()\n', (13477, 13479), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((3260, 3271), 'time.time', 'time.time', ([], {}), '()\n', (3269, 3271), False, 'import time\n'), ((3362, 3389), 'lib.pyqtgraph.pyqtgraph.Qt.QtCore.QFileSystemWatcher', 'QtCore.QFileSystemWatcher', ([], {}), '()\n', (3387, 3389), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((6826, 6849), 'data.refs.readers.aff3ct_trace_reader.aff3ctTraceReader', 'aff3ctTraceReader', (['path'], {}), '(path)\n', (6843, 6849), False, 'from data.refs.readers.aff3ct_trace_reader import aff3ctTraceReader\n'), ((13006, 13037), 'lib.pyqtgraph.pyqtgraph.Qt.QtWidgets.QFileDialog.Options', 'QtWidgets.QFileDialog.Options', ([], {}), '()\n', (13035, 13037), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((13153, 13243), 'lib.pyqtgraph.pyqtgraph.Qt.QtWidgets.QFileDialog.getExistingDirectory', 'QtWidgets.QFileDialog.getExistingDirectory', (['self', '"""Open a folder"""', '""""""'], {'options': 'options'}), "(self, 'Open a folder', '',\n options=options)\n", (13195, 13243), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((13837, 13858), 'os.chdir', 'os.chdir', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (13845, 13858), False, 'import os\n'), ((13868, 13887), 'os.chdir', 'os.chdir', (['"""./data/"""'], {}), "('./data/')\n", (13876, 13887), False, 'import os\n'), ((13920, 13945), 'lib.pyqtgraph.pyqtgraph.Qt.QtCore.QDir.currentPath', 'QtCore.QDir.currentPath', ([], {}), '()\n', (13943, 13945), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((14283, 14303), 'lib.pyqtgraph.pyqtgraph.Qt.QtCore.QSize', 'QtCore.QSize', (['(24)', '(24)'], {}), '(24, 24)\n', (14295, 14303), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((7631, 7699), 'lib.pyqtgraph.pyqtgraph.mkPen', 'pg.mkPen', ([], {'color': '(icolor, 8)', 'width': '(2)', 'style': 'QtCore.Qt.CustomDashLine'}), '(color=(icolor, 8), width=2, style=QtCore.Qt.CustomDashLine)\n', (7639, 7699), True, 'import lib.pyqtgraph.pyqtgraph as pg\n'), ((8934, 8945), 'time.time', 'time.time', ([], {}), '()\n', (8943, 8945), False, 'import time\n'), ((9065, 9076), 'time.time', 'time.time', ([], {}), '()\n', (9074, 9076), False, 'import time\n'), ((9291, 9359), 'lib.pyqtgraph.pyqtgraph.mkPen', 'pg.mkPen', ([], {'color': '(icolor, 8)', 'width': '(2)', 'style': 'QtCore.Qt.CustomDashLine'}), '(color=(icolor, 8), width=2, style=QtCore.Qt.CustomDashLine)\n', (9299, 9359), True, 'import lib.pyqtgraph.pyqtgraph as pg\n'), ((9585, 9604), 'lib.pyqtgraph.pyqtgraph.Qt.QtGui.QFormLayout', 'QtGui.QFormLayout', ([], {}), '()\n', (9602, 9604), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((10259, 10273), 'lib.pyqtgraph.pyqtgraph.Qt.QtGui.QFrame', 'QtGui.QFrame', ([], {}), '()\n', (10271, 10273), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((11468, 11483), 'lib.pyqtgraph.pyqtgraph.Qt.QtGui.QWidget', 'QtGui.QWidget', ([], {}), '()\n', (11481, 11483), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((11527, 11546), 'lib.pyqtgraph.pyqtgraph.Qt.QtGui.QScrollArea', 'QtGui.QScrollArea', ([], {}), '()\n', (11544, 11546), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((14210, 14235), 'lib.pyqtgraph.pyqtgraph.Qt.QtCore.QDir.currentPath', 'QtCore.QDir.currentPath', ([], {}), '()\n', (14233, 14235), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((4805, 4844), 'subprocess.call', 'subprocess.call', (["['xdg-open', paths[0]]"], {}), "(['xdg-open', paths[0]])\n", (4820, 4844), False, 'import subprocess\n'), ((10438, 10454), 'lib.pyqtgraph.pyqtgraph.Qt.QtGui.QLabel', 'QtGui.QLabel', (['""""""'], {}), "('')\n", (10450, 10454), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((4883, 4918), 'subprocess.call', 'subprocess.call', (["['open', paths[0]]"], {}), "(['open', paths[0]])\n", (4898, 4918), False, 'import subprocess\n'), ((4932, 4954), 'os.startfile', 'os.startfile', (['paths[0]'], {}), '(paths[0])\n', (4944, 4954), False, 'import os\n'), ((10537, 10610), 'lib.pyqtgraph.pyqtgraph.Qt.QtGui.QLineEdit', 'QtGui.QLineEdit', (["('https://doi.org/' + self.Curves[pathId].Metadata[entry])"], {}), "('https://doi.org/' + self.Curves[pathId].Metadata[entry])\n", (10552, 10610), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((11311, 11363), 'lib.pyqtgraph.pyqtgraph.Qt.QtGui.QLineEdit', 'QtGui.QLineEdit', (['self.Curves[pathId].Metadata[entry]'], {}), '(self.Curves[pathId].Metadata[entry])\n', (11326, 11363), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((9752, 9766), 'lib.pyqtgraph.pyqtgraph.Qt.QtGui.QFrame', 'QtGui.QFrame', ([], {}), '()\n', (9764, 9766), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((9980, 9996), 'lib.pyqtgraph.pyqtgraph.Qt.QtGui.QLabel', 'QtGui.QLabel', (['""""""'], {}), "('')\n", (9992, 9996), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((10084, 10100), 'lib.pyqtgraph.pyqtgraph.Qt.QtGui.QLabel', 'QtGui.QLabel', (['""""""'], {}), "('')\n", (10096, 10100), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n'), ((10183, 10205), 'lib.pyqtgraph.pyqtgraph.Qt.QtGui.QLabel', 'QtGui.QLabel', (['entry[1]'], {}), '(entry[1])\n', (10195, 10205), False, 'from lib.pyqtgraph.pyqtgraph.Qt import QtCore, QtGui, QtWidgets\n')] |
# -*- coding: utf-8 -*-
import os
import json
import torch
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
class Config:
def __init__(self, json_file):
self.config = json.loads(open(json_file).read())
self.device = torch.device("cuda:1" if torch.cuda.is_available() else "cpu")
self.cpu_device = torch.device("cpu")
@property
def shuffle_before_epoch_enable(self):
return self.config['Training']['shuffle_before_epoch_enable']
@property
def is_LOMO(self):
return 'test_allele' in self.config['Data']
@property
def test_allele(self):
return self.config['Data'].get('test_allele', None)
@property
def weight_decay(self):
return self.config['Training']['weight_decay']
@property
def bind_core_file(self):
return os.path.join(BASE_DIR, 'dataset', self.config['Data']['bind_core_file'])
@property
def max_len_hla_A(self):
return self.config['Data']['max_len_hla_A']
@property
def max_len_hla_B(self):
return self.config['Data']['max_len_hla_B']
@property
def max_len_pep(self):
return self.config['Data']['max_len_pep']
@property
def validation_ratio(self):
return self.config['Data']['validation_ratio']
@property
def batch_size(self):
return self.config['Training']['batch_size']
@property
def working_dir(self):
return os.path.join(BASE_DIR, self.config['Paths']['working_dir'])
@property
def data_file(self):
return os.path.join(BASE_DIR, 'dataset', self.config['Data']['data_file'])
@property
def test_file(self):
return os.path.join(BASE_DIR, 'dataset', self.config['Data']['test_file'])
@property
def model_save_path(self):
return os.path.join(self.working_dir, 'best_model.pytorch')
@property
def model_config(self):
return self.config['Model']
@property
def grad_clip(self):
return self.config['Training']['grad_clip']
@property
def start_lr(self):
return self.config['Training']['start_lr']
@property
def min_lr(self):
return self.config['Training']['min_lr']
@property
def epochs(self):
return self.config['Training']['epochs']
@property
def loss_delta(self):
return self.config['Training']['loss_delta']
@property
def seq_encode_dim(self):
return self.model_config['seq_encoding_dim']
@property
def encoding_method(self):
return self.model_config['encoding_method']
@property
def do_train(self):
return self.config['do_train']
@property
def do_test(self):
return self.config['do_test']
| [
"os.path.dirname",
"torch.cuda.is_available",
"os.path.join",
"torch.device"
] | [((89, 114), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (104, 114), False, 'import os\n'), ((334, 353), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (346, 353), False, 'import torch\n'), ((832, 904), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""dataset"""', "self.config['Data']['bind_core_file']"], {}), "(BASE_DIR, 'dataset', self.config['Data']['bind_core_file'])\n", (844, 904), False, 'import os\n'), ((1442, 1501), 'os.path.join', 'os.path.join', (['BASE_DIR', "self.config['Paths']['working_dir']"], {}), "(BASE_DIR, self.config['Paths']['working_dir'])\n", (1454, 1501), False, 'import os\n'), ((1557, 1624), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""dataset"""', "self.config['Data']['data_file']"], {}), "(BASE_DIR, 'dataset', self.config['Data']['data_file'])\n", (1569, 1624), False, 'import os\n'), ((1680, 1747), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""dataset"""', "self.config['Data']['test_file']"], {}), "(BASE_DIR, 'dataset', self.config['Data']['test_file'])\n", (1692, 1747), False, 'import os\n'), ((1809, 1861), 'os.path.join', 'os.path.join', (['self.working_dir', '"""best_model.pytorch"""'], {}), "(self.working_dir, 'best_model.pytorch')\n", (1821, 1861), False, 'import os\n'), ((270, 295), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (293, 295), False, 'import torch\n')] |
import unittest
import torch
import numpy as np
from spectralgp.samplers import MeanEllipticalSlice
class TestMeanEllipticalSlice(unittest.TestCase):
def test_m_ess(self, nsamples=10000):
pmean = torch.zeros(2)
pmean[0] = -2.
prior_dist = torch.distributions.MultivariateNormal(pmean, covariance_matrix=torch.eye(2))
lmean = torch.zeros(2)
lmean[0] = 2.
likelihood = torch.distributions.MultivariateNormal(lmean, covariance_matrix=torch.eye(2))
prior_inv = torch.inverse(prior_dist.covariance_matrix)
lik_inv = torch.inverse(likelihood.covariance_matrix)
true_postsigma = torch.inverse(prior_inv + lik_inv)
true_postmu = true_postsigma.matmul(prior_inv.matmul(pmean) + lik_inv.matmul(lmean))
def lfn(x):
lmean = torch.zeros(2)
lmean[0] = 2.
likelihood = torch.distributions.MultivariateNormal(lmean, covariance_matrix=torch.eye(2))
return likelihood.log_prob(x)
#lfn = lambda x: likelihood.log_prob(x)
init = torch.zeros(2)
m_ess_runner = MeanEllipticalSlice(init, prior_dist, lfn, nsamples)
samples, _ = m_ess_runner.run()
samples = samples.numpy()
samples = samples[:, int(nsamples/2):]
est_mean = np.mean(samples,1)
print(est_mean)
est_cov = np.cov(samples)
print(np.linalg.norm(est_mean - true_postmu.numpy()))
print(np.linalg.norm(est_cov - true_postsigma.numpy()))
# import matplotlib.pyplot as plt
# N = 60
# X = np.linspace(-3, 3, N)
# Y = np.linspace(-3, 4, N)
# X, Y = np.meshgrid(X, Y)
# # Pack X and Y into a single 3-dimensional array
# pos = np.empty(X.shape + (2,))
# pos[:, :, 0] = X
# pos[:, :, 1] = Y
# pos = torch.tensor(pos).float()
# posterior_dist = torch.distributions.MultivariateNormal(true_postmu, true_postsigma)
# Z = posterior_dist.log_prob(pos).numpy()
# plt.contourf(X, Y, Z)
# plt.scatter(samples[0,:], samples[1,:], color='black', alpha = 0.3)
# plt.show()
if __name__ == "__main__":
unittest.main()
| [
"numpy.mean",
"spectralgp.samplers.MeanEllipticalSlice",
"torch.eye",
"unittest.main",
"numpy.cov",
"torch.zeros",
"torch.inverse"
] | [((2193, 2208), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2206, 2208), False, 'import unittest\n'), ((210, 224), 'torch.zeros', 'torch.zeros', (['(2)'], {}), '(2)\n', (221, 224), False, 'import torch\n'), ((372, 386), 'torch.zeros', 'torch.zeros', (['(2)'], {}), '(2)\n', (383, 386), False, 'import torch\n'), ((529, 572), 'torch.inverse', 'torch.inverse', (['prior_dist.covariance_matrix'], {}), '(prior_dist.covariance_matrix)\n', (542, 572), False, 'import torch\n'), ((591, 634), 'torch.inverse', 'torch.inverse', (['likelihood.covariance_matrix'], {}), '(likelihood.covariance_matrix)\n', (604, 634), False, 'import torch\n'), ((661, 695), 'torch.inverse', 'torch.inverse', (['(prior_inv + lik_inv)'], {}), '(prior_inv + lik_inv)\n', (674, 695), False, 'import torch\n'), ((1081, 1095), 'torch.zeros', 'torch.zeros', (['(2)'], {}), '(2)\n', (1092, 1095), False, 'import torch\n'), ((1120, 1172), 'spectralgp.samplers.MeanEllipticalSlice', 'MeanEllipticalSlice', (['init', 'prior_dist', 'lfn', 'nsamples'], {}), '(init, prior_dist, lfn, nsamples)\n', (1139, 1172), False, 'from spectralgp.samplers import MeanEllipticalSlice\n'), ((1314, 1333), 'numpy.mean', 'np.mean', (['samples', '(1)'], {}), '(samples, 1)\n', (1321, 1333), True, 'import numpy as np\n'), ((1375, 1390), 'numpy.cov', 'np.cov', (['samples'], {}), '(samples)\n', (1381, 1390), True, 'import numpy as np\n'), ((830, 844), 'torch.zeros', 'torch.zeros', (['(2)'], {}), '(2)\n', (841, 844), False, 'import torch\n'), ((333, 345), 'torch.eye', 'torch.eye', (['(2)'], {}), '(2)\n', (342, 345), False, 'import torch\n'), ((494, 506), 'torch.eye', 'torch.eye', (['(2)'], {}), '(2)\n', (503, 506), False, 'import torch\n'), ((960, 972), 'torch.eye', 'torch.eye', (['(2)'], {}), '(2)\n', (969, 972), False, 'import torch\n')] |
"""
The input widgets generally allow entering arbitrary information into
a text field or similar.
"""
from __future__ import absolute_import, division, unicode_literals
import ast
from base64 import b64decode, b64encode
from datetime import datetime
from six import string_types
import param
from bokeh.models.widgets import (
CheckboxGroup as _BkCheckboxGroup, ColorPicker as _BkColorPicker,
DatePicker as _BkDatePicker, Div as _BkDiv, TextInput as _BkTextInput,
Spinner as _BkSpinner)
from ..models import FileInput as _BkFileInput
from ..util import as_unicode
from .base import Widget
class TextInput(Widget):
value = param.String(default='', allow_None=True)
placeholder = param.String(default='')
_widget_type = _BkTextInput
class FileInput(Widget):
mime_type = param.String(default=None)
value = param.Parameter(default=None)
_widget_type = _BkFileInput
_rename = {'name': None, 'mime_type': None}
def _process_param_change(self, msg):
msg = super(FileInput, self)._process_param_change(msg)
if 'value' in msg:
if self.mime_type:
template = 'data:{mime};base64,{data}'
data = b64encode(msg['value'])
msg['value'] = template.format(data=data.decode('utf-8'),
mime=self.mime_type)
else:
msg['value'] = ''
return msg
def _process_property_change(self, msg):
msg = super(FileInput, self)._process_property_change(msg)
if 'value' in msg:
header, content = msg['value'].split(",", 1)
msg['mime_type'] = header.split(':')[1].split(';')[0]
msg['value'] = b64decode(content)
return msg
def save(self, filename):
"""
Saves the uploaded FileInput data to a file or BytesIO object.
Arguments
---------
filename (str): File path or file-like object
"""
if isinstance(filename, string_types):
with open(filename, 'wb') as f:
f.write(self.value)
else:
filename.write(self.value)
class StaticText(Widget):
style = param.Dict(default=None, doc="""
Dictionary of CSS property:value pairs to apply to this Div.""")
value = param.Parameter(default=None)
_widget_type = _BkDiv
_format = '<b>{title}</b>: {value}'
_rename = {'name': 'title', 'value': 'text'}
def _process_param_change(self, msg):
msg = super(StaticText, self)._process_property_change(msg)
msg.pop('title', None)
if 'value' in msg:
text = as_unicode(msg.pop('value'))
if self.name:
text = self._format.format(title=self.name, value=text)
msg['text'] = text
return msg
class DatePicker(Widget):
value = param.Date(default=None)
start = param.Date(default=None)
end = param.Date(default=None)
_widget_type = _BkDatePicker
_rename = {'start': 'min_date', 'end': 'max_date', 'name': 'title'}
def _process_property_change(self, msg):
msg = super(DatePicker, self)._process_property_change(msg)
if 'value' in msg:
msg['value'] = datetime.strptime(msg['value'][4:], '%b %d %Y')
return msg
class ColorPicker(Widget):
value = param.Color(default=None, doc="""
The selected color""")
_widget_type = _BkColorPicker
_rename = {'value': 'color', 'name': 'title'}
class Spinner(Widget):
start = param.Number(default=None, doc="""
Optional minimum allowable value""")
end = param.Number(default=None, doc="""
Optional maximum allowable value""")
value = param.Number(default=0, doc="""
The initial value of the spinner""")
step = param.Number(default=1, doc="""
The step added or subtracted to the current value""")
_widget_type = _BkSpinner
_rename = {'name': 'title', 'start': 'low', 'end': 'high'}
class LiteralInput(Widget):
"""
LiteralInput allows declaring Python literals using a text
input widget. Optionally a type may be declared.
"""
type = param.ClassSelector(default=None, class_=(type, tuple),
is_instance=True)
value = param.Parameter(default=None)
_widget_type = _BkTextInput
def __init__(self, **params):
super(LiteralInput, self).__init__(**params)
self._state = ''
self._validate(None)
self.param.watch(self._validate, 'value')
def _validate(self, event):
if self.type is None: return
new = self.value
if not isinstance(new, self.type):
if event:
self.value = event.old
types = repr(self.type) if isinstance(self.type, tuple) else self.type.__name__
raise ValueError('LiteralInput expected %s type but value %s '
'is of type %s.' %
(types, new, type(new).__name__))
def _process_property_change(self, msg):
msg = super(LiteralInput, self)._process_property_change(msg)
new_state = ''
if 'value' in msg:
value = msg.pop('value')
try:
value = ast.literal_eval(value)
except:
new_state = ' (invalid)'
value = self.value
else:
if self.type and not isinstance(value, self.type):
new_state = ' (wrong type)'
value = self.value
msg['value'] = value
msg['name'] = msg.get('title', self.name).replace(self._state, '') + new_state
self._state = new_state
self.param.trigger('name')
return msg
def _process_param_change(self, msg):
msg = super(LiteralInput, self)._process_param_change(msg)
msg.pop('type', None)
if 'value' in msg:
msg['value'] = '' if msg['value'] is None else as_unicode(msg['value'])
msg['title'] = self.name
return msg
class DatetimeInput(LiteralInput):
"""
DatetimeInput allows declaring Python literals using a text
input widget. Optionally a type may be declared.
"""
format = param.String(default='%Y-%m-%d %H:%M:%S', doc="""
Datetime format used for parsing and formatting the datetime.""")
value = param.Date(default=None)
start = param.Date(default=None)
end = param.Date(default=None)
type = datetime
def __init__(self, **params):
super(DatetimeInput, self).__init__(**params)
self.param.watch(self._validate, 'value')
self._validate(None)
def _validate(self, event):
new = self.value
if new is not None and ((self.start is not None and self.start > new) or
(self.end is not None and self.end < new)):
value = datetime.strftime(new, self.format)
start = datetime.strftime(self.start, self.format)
end = datetime.strftime(self.end, self.format)
if event:
self.value = event.old
raise ValueError('DatetimeInput value must be between {start} and {end}, '
'supplied value is {value}'.format(start=start, end=end,
value=value))
def _process_property_change(self, msg):
msg = Widget._process_property_change(self, msg)
new_state = ''
if 'value' in msg:
value = msg.pop('value')
try:
value = datetime.strptime(value, self.format)
except:
new_state = ' (invalid)'
value = self.value
else:
if value is not None and ((self.start is not None and self.start > value) or
(self.end is not None and self.end < value)):
new_state = ' (out of bounds)'
value = self.value
msg['value'] = value
msg['name'] = msg.get('title', self.name).replace(self._state, '') + new_state
self._state = new_state
return msg
def _process_param_change(self, msg):
msg = {k: v for k, v in msg.items() if k not in ('type', 'format', 'start', 'end')}
if 'value' in msg:
value = msg['value']
if value is None:
value = ''
else:
value = datetime.strftime(msg['value'], self.format)
msg['value'] = value
msg['title'] = self.name
return msg
class Checkbox(Widget):
value = param.Boolean(default=False)
_supports_embed = True
_widget_type = _BkCheckboxGroup
def _process_property_change(self, msg):
msg = super(Checkbox, self)._process_property_change(msg)
if 'active' in msg:
msg['value'] = 0 in msg.pop('active')
return msg
def _process_param_change(self, msg):
msg = super(Checkbox, self)._process_param_change(msg)
if 'value' in msg:
msg['active'] = [0] if msg.pop('value', None) else []
if 'title' in msg:
msg['labels'] = [msg.pop('title')]
return msg
def _get_embed_state(self, root, max_opts=3):
return (self, self._models[root.ref['id']][0], [False, True],
lambda x: 0 in x.active, 'active', 'cb_obj.active.indexOf(0) >= 0')
| [
"param.ClassSelector",
"param.Number",
"param.Dict",
"datetime.datetime.strptime",
"param.Date",
"param.Boolean",
"datetime.datetime.strftime",
"base64.b64decode",
"base64.b64encode",
"param.Parameter",
"ast.literal_eval",
"param.String",
"param.Color"
] | [((647, 688), 'param.String', 'param.String', ([], {'default': '""""""', 'allow_None': '(True)'}), "(default='', allow_None=True)\n", (659, 688), False, 'import param\n'), ((708, 732), 'param.String', 'param.String', ([], {'default': '""""""'}), "(default='')\n", (720, 732), False, 'import param\n'), ((810, 836), 'param.String', 'param.String', ([], {'default': 'None'}), '(default=None)\n', (822, 836), False, 'import param\n'), ((850, 879), 'param.Parameter', 'param.Parameter', ([], {'default': 'None'}), '(default=None)\n', (865, 879), False, 'import param\n'), ((2208, 2323), 'param.Dict', 'param.Dict', ([], {'default': 'None', 'doc': '"""\n Dictionary of CSS property:value pairs to apply to this Div."""'}), '(default=None, doc=\n """\n Dictionary of CSS property:value pairs to apply to this Div."""\n )\n', (2218, 2323), False, 'import param\n'), ((2327, 2356), 'param.Parameter', 'param.Parameter', ([], {'default': 'None'}), '(default=None)\n', (2342, 2356), False, 'import param\n'), ((2881, 2905), 'param.Date', 'param.Date', ([], {'default': 'None'}), '(default=None)\n', (2891, 2905), False, 'import param\n'), ((2919, 2943), 'param.Date', 'param.Date', ([], {'default': 'None'}), '(default=None)\n', (2929, 2943), False, 'import param\n'), ((2955, 2979), 'param.Date', 'param.Date', ([], {'default': 'None'}), '(default=None)\n', (2965, 2979), False, 'import param\n'), ((3364, 3428), 'param.Color', 'param.Color', ([], {'default': 'None', 'doc': '"""\n The selected color"""'}), '(default=None, doc="""\n The selected color""")\n', (3375, 3428), False, 'import param\n'), ((3553, 3632), 'param.Number', 'param.Number', ([], {'default': 'None', 'doc': '"""\n Optional minimum allowable value"""'}), '(default=None, doc="""\n Optional minimum allowable value""")\n', (3565, 3632), False, 'import param\n'), ((3644, 3723), 'param.Number', 'param.Number', ([], {'default': 'None', 'doc': '"""\n Optional maximum allowable value"""'}), '(default=None, doc="""\n Optional maximum allowable value""")\n', (3656, 3723), False, 'import param\n'), ((3737, 3813), 'param.Number', 'param.Number', ([], {'default': '(0)', 'doc': '"""\n The initial value of the spinner"""'}), '(default=0, doc="""\n The initial value of the spinner""")\n', (3749, 3813), False, 'import param\n'), ((3826, 3924), 'param.Number', 'param.Number', ([], {'default': '(1)', 'doc': '"""\n The step added or subtracted to the current value"""'}), '(default=1, doc=\n """\n The step added or subtracted to the current value""")\n', (3838, 3924), False, 'import param\n'), ((4189, 4262), 'param.ClassSelector', 'param.ClassSelector', ([], {'default': 'None', 'class_': '(type, tuple)', 'is_instance': '(True)'}), '(default=None, class_=(type, tuple), is_instance=True)\n', (4208, 4262), False, 'import param\n'), ((4307, 4336), 'param.Parameter', 'param.Parameter', ([], {'default': 'None'}), '(default=None)\n', (4322, 4336), False, 'import param\n'), ((6268, 6401), 'param.String', 'param.String', ([], {'default': '"""%Y-%m-%d %H:%M:%S"""', 'doc': '"""\n Datetime format used for parsing and formatting the datetime."""'}), '(default=\'%Y-%m-%d %H:%M:%S\', doc=\n """\n Datetime format used for parsing and formatting the datetime."""\n )\n', (6280, 6401), False, 'import param\n'), ((6405, 6429), 'param.Date', 'param.Date', ([], {'default': 'None'}), '(default=None)\n', (6415, 6429), False, 'import param\n'), ((6443, 6467), 'param.Date', 'param.Date', ([], {'default': 'None'}), '(default=None)\n', (6453, 6467), False, 'import param\n'), ((6479, 6503), 'param.Date', 'param.Date', ([], {'default': 'None'}), '(default=None)\n', (6489, 6503), False, 'import param\n'), ((8686, 8714), 'param.Boolean', 'param.Boolean', ([], {'default': '(False)'}), '(default=False)\n', (8699, 8714), False, 'import param\n'), ((1732, 1750), 'base64.b64decode', 'b64decode', (['content'], {}), '(content)\n', (1741, 1750), False, 'from base64 import b64decode, b64encode\n'), ((3255, 3302), 'datetime.datetime.strptime', 'datetime.strptime', (["msg['value'][4:]", '"""%b %d %Y"""'], {}), "(msg['value'][4:], '%b %d %Y')\n", (3272, 3302), False, 'from datetime import datetime\n'), ((6928, 6963), 'datetime.datetime.strftime', 'datetime.strftime', (['new', 'self.format'], {}), '(new, self.format)\n', (6945, 6963), False, 'from datetime import datetime\n'), ((6984, 7026), 'datetime.datetime.strftime', 'datetime.strftime', (['self.start', 'self.format'], {}), '(self.start, self.format)\n', (7001, 7026), False, 'from datetime import datetime\n'), ((7045, 7085), 'datetime.datetime.strftime', 'datetime.strftime', (['self.end', 'self.format'], {}), '(self.end, self.format)\n', (7062, 7085), False, 'from datetime import datetime\n'), ((1205, 1228), 'base64.b64encode', 'b64encode', (["msg['value']"], {}), "(msg['value'])\n", (1214, 1228), False, 'from base64 import b64decode, b64encode\n'), ((5283, 5306), 'ast.literal_eval', 'ast.literal_eval', (['value'], {}), '(value)\n', (5299, 5306), False, 'import ast\n'), ((7629, 7666), 'datetime.datetime.strptime', 'datetime.strptime', (['value', 'self.format'], {}), '(value, self.format)\n', (7646, 7666), False, 'from datetime import datetime\n'), ((8517, 8561), 'datetime.datetime.strftime', 'datetime.strftime', (["msg['value']", 'self.format'], {}), "(msg['value'], self.format)\n", (8534, 8561), False, 'from datetime import datetime\n')] |
# -*- coding=utf-8 -*-
import urllib2
import json
import logger
import traceback
def send(apiUrl,data,method=None):
logger.debug("调用内部系统[%s],data[%r]",apiUrl,data)
try:
data_json = json.dumps(data)
headers = {'Content-Type': 'application/json'} # 设置数据为json格式,很重要
request = urllib2.Request(url=apiUrl, headers=headers, data=data_json)
if method is not None:
request.get_method = method
response = urllib2.urlopen(request)
result = {'code':response.getcode(),'content':response.read()}
logger.debug("调用[%s]返回结果:%r",apiUrl,result)
return result
except Exception as e:
#traceback.print_stack()
logger.exception(e,"调用内部系统[%s],data[%r],发生错误[%r]", apiUrl, data,e)
return None
if __name__ == "__main__":
logger.init_4_debug()
| [
"urllib2.urlopen",
"logger.init_4_debug",
"json.dumps",
"urllib2.Request",
"logger.exception",
"logger.debug"
] | [((121, 170), 'logger.debug', 'logger.debug', (['"""调用内部系统[%s],data[%r]"""', 'apiUrl', 'data'], {}), "('调用内部系统[%s],data[%r]', apiUrl, data)\n", (133, 170), False, 'import logger\n'), ((815, 836), 'logger.init_4_debug', 'logger.init_4_debug', ([], {}), '()\n', (834, 836), False, 'import logger\n'), ((198, 214), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (208, 214), False, 'import json\n'), ((306, 366), 'urllib2.Request', 'urllib2.Request', ([], {'url': 'apiUrl', 'headers': 'headers', 'data': 'data_json'}), '(url=apiUrl, headers=headers, data=data_json)\n', (321, 366), False, 'import urllib2\n'), ((458, 482), 'urllib2.urlopen', 'urllib2.urlopen', (['request'], {}), '(request)\n', (473, 482), False, 'import urllib2\n'), ((562, 607), 'logger.debug', 'logger.debug', (['"""调用[%s]返回结果:%r"""', 'apiUrl', 'result'], {}), "('调用[%s]返回结果:%r', apiUrl, result)\n", (574, 607), False, 'import logger\n'), ((696, 764), 'logger.exception', 'logger.exception', (['e', '"""调用内部系统[%s],data[%r],发生错误[%r]"""', 'apiUrl', 'data', 'e'], {}), "(e, '调用内部系统[%s],data[%r],发生错误[%r]', apiUrl, data, e)\n", (712, 764), False, 'import logger\n')] |
from sklearn.datasets import load_wine
from sklearn.neighbors import KNeighborsClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.linear_model import LogisticRegression
wine = load_wine(as_frame=True)
X, y = wine.data, wine.target
X_train, X_test, y_train, y_test = train_test_split(
X, y, random_state=0, stratify=y
)
knn = KNeighborsClassifier()
rfc = RandomForestClassifier()
lr = LogisticRegression()
knn.fit(X_train, y_train)
rfc.fit(X_train, y_train)
lr.fit(X_train, y_train)
print("kn train: ", knn.score(X_train, y_train))
print("rf train: ", rfc.score(X_train, y_train))
print("lr train: ", lr.score(X_train, y_train))
print("kn test: ", knn.score(X_test, y_test))
print("rf test: ", rfc.score(X_test, y_test))
print("lr test: ", lr.score(X_test, y_test))
| [
"sklearn.neighbors.KNeighborsClassifier",
"sklearn.datasets.load_wine",
"sklearn.ensemble.RandomForestClassifier",
"sklearn.linear_model.LogisticRegression"
] | [((202, 226), 'sklearn.datasets.load_wine', 'load_wine', ([], {'as_frame': '(True)'}), '(as_frame=True)\n', (211, 226), False, 'from sklearn.datasets import load_wine\n'), ((358, 380), 'sklearn.neighbors.KNeighborsClassifier', 'KNeighborsClassifier', ([], {}), '()\n', (378, 380), False, 'from sklearn.neighbors import KNeighborsClassifier\n'), ((387, 411), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {}), '()\n', (409, 411), False, 'from sklearn.ensemble import RandomForestClassifier\n'), ((417, 437), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (435, 437), False, 'from sklearn.linear_model import LogisticRegression\n')] |
# Generated by Django 3.0.3 on 2020-02-25 18:50
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserData',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('notifyInvoice', models.BooleanField(default=True)),
('notifyNews', models.BooleanField(default=True)),
('notifyFeature', models.BooleanField(default=True)),
('avatar', models.URLField(blank=True, default='', max_length=100)),
('city', models.CharField(blank=True, default='', max_length=100)),
('country', models.CharField(blank=True, default='', max_length=100)),
('created', models.DateTimeField(auto_now_add=True)),
('user', models.ForeignKey(default='1', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'UserData',
},
),
migrations.CreateModel(
name='Project',
fields=[
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('name', models.CharField(blank=True, default='', max_length=100)),
('preview', models.URLField(blank=True, default='', max_length=100)),
('classes', models.CharField(blank=True, default='fa fa-picture-o gjs-block gjs-one-bg gjs-four-color-h', max_length=100)),
('domain', models.URLField(blank=True, default='', max_length=100)),
('published', models.BooleanField(default=False)),
('lastPublished', models.DateTimeField(auto_now_add=True)),
('user', models.ForeignKey(default='1', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Page',
fields=[
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('name', models.CharField(blank=True, default='', max_length=100)),
('thumbnail', models.URLField(blank=True, default='', max_length=100)),
('favicon', models.URLField(blank=True, default='', max_length=100)),
('webclip', models.URLField(blank=True, default='', max_length=100)),
('html', models.TextField()),
('css', models.TextField()),
('js', models.TextField()),
('components', models.TextField()),
('style', models.TextField()),
('metaTitle', models.CharField(blank=True, default='', max_length=100)),
('metaDesc', models.CharField(blank=True, default='', max_length=100)),
('created', models.DateTimeField(auto_now_add=True)),
('lastSaved', models.DateTimeField(auto_now_add=True)),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.Project')),
],
options={
'ordering': ['created'],
},
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('plan', models.CharField(choices=[('HO', 'Hobbyist'), ('DV', 'Developer'), ('ET', 'Enterprise')], default='HO', max_length=2)),
('amt', models.FloatField()),
('active', models.BooleanField(default=False)),
('created', models.DateTimeField(auto_now_add=True)),
('expires', models.DateTimeField()),
('invoiceUrl', models.URLField(blank=True, default='', max_length=100)),
('user', models.ForeignKey(default='1', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['created'],
},
),
migrations.CreateModel(
name='Logic',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default='', max_length=100)),
('category', models.CharField(blank=True, default='Extra', max_length=100)),
('description', models.TextField()),
('js', models.TextField()),
('created', models.DateTimeField(auto_now_add=True)),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.Project')),
],
options={
'ordering': ['created'],
},
),
migrations.CreateModel(
name='Block',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default='', max_length=100)),
('category', models.CharField(blank=True, default='Extra', max_length=100)),
('description', models.TextField()),
('html', models.TextField()),
('css', models.TextField()),
('preview', models.URLField(blank=True, default='', max_length=100)),
('classes', models.CharField(blank=True, default='gjs-fonts gjs-f-b1 gjs-block gjs-one-bg gjs-four-color-h', max_length=100)),
('created', models.DateTimeField(auto_now_add=True)),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.Project')),
],
options={
'ordering': ['created'],
},
),
migrations.CreateModel(
name='Asset',
fields=[
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('filename', models.CharField(blank=True, default='', max_length=100)),
('type', models.CharField(choices=[('IMG', 'Image'), ('SVG', 'SVG'), ('VID', 'Video')], default='IMG', max_length=3)),
('url', models.URLField(blank=True, default='', max_length=100)),
('size', models.IntegerField()),
('added', models.DateTimeField(auto_now_add=True)),
('user', models.ForeignKey(default='1', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['added'],
},
),
]
| [
"django.db.models.UUIDField",
"django.db.models.FloatField",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.DateTimeField",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.URLField",
"django.db.migrat... | [((259, 316), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (290, 316), False, 'from django.db import migrations, models\n'), ((449, 542), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (465, 542), False, 'from django.db import migrations, models\n'), ((575, 608), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (594, 608), False, 'from django.db import migrations, models\n'), ((642, 675), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (661, 675), False, 'from django.db import migrations, models\n'), ((712, 745), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (731, 745), False, 'from django.db import migrations, models\n'), ((775, 830), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (790, 830), False, 'from django.db import migrations, models\n'), ((858, 914), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (874, 914), False, 'from django.db import migrations, models\n'), ((945, 1001), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (961, 1001), False, 'from django.db import migrations, models\n'), ((1032, 1071), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1052, 1071), False, 'from django.db import migrations, models\n'), ((1099, 1207), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '"""1"""', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), "(default='1', on_delete=django.db.models.deletion.CASCADE,\n to=settings.AUTH_USER_MODEL)\n", (1116, 1207), False, 'from django.db import migrations, models\n'), ((1426, 1517), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False)\n', (1442, 1517), False, 'from django.db import migrations, models\n'), ((1541, 1597), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (1557, 1597), False, 'from django.db import migrations, models\n'), ((1628, 1683), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (1643, 1683), False, 'from django.db import migrations, models\n'), ((1714, 1828), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '"""fa fa-picture-o gjs-block gjs-one-bg gjs-four-color-h"""', 'max_length': '(100)'}), "(blank=True, default=\n 'fa fa-picture-o gjs-block gjs-one-bg gjs-four-color-h', max_length=100)\n", (1730, 1828), False, 'from django.db import migrations, models\n'), ((1853, 1908), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (1868, 1908), False, 'from django.db import migrations, models\n'), ((1941, 1975), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1960, 1975), False, 'from django.db import migrations, models\n'), ((2012, 2051), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (2032, 2051), False, 'from django.db import migrations, models\n'), ((2079, 2187), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '"""1"""', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), "(default='1', on_delete=django.db.models.deletion.CASCADE,\n to=settings.AUTH_USER_MODEL)\n", (2096, 2187), False, 'from django.db import migrations, models\n'), ((2315, 2406), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False)\n', (2331, 2406), False, 'from django.db import migrations, models\n'), ((2430, 2486), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (2446, 2486), False, 'from django.db import migrations, models\n'), ((2519, 2574), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (2534, 2574), False, 'from django.db import migrations, models\n'), ((2605, 2660), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (2620, 2660), False, 'from django.db import migrations, models\n'), ((2691, 2746), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (2706, 2746), False, 'from django.db import migrations, models\n'), ((2774, 2792), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2790, 2792), False, 'from django.db import migrations, models\n'), ((2819, 2837), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2835, 2837), False, 'from django.db import migrations, models\n'), ((2863, 2881), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2879, 2881), False, 'from django.db import migrations, models\n'), ((2915, 2933), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2931, 2933), False, 'from django.db import migrations, models\n'), ((2962, 2980), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2978, 2980), False, 'from django.db import migrations, models\n'), ((3013, 3069), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (3029, 3069), False, 'from django.db import migrations, models\n'), ((3101, 3157), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (3117, 3157), False, 'from django.db import migrations, models\n'), ((3188, 3227), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (3208, 3227), False, 'from django.db import migrations, models\n'), ((3260, 3299), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (3280, 3299), False, 'from django.db import migrations, models\n'), ((3330, 3416), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""main.Project"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'main.Project')\n", (3347, 3416), False, 'from django.db import migrations, models\n'), ((3620, 3713), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (3636, 3713), False, 'from django.db import migrations, models\n'), ((3737, 3858), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('HO', 'Hobbyist'), ('DV', 'Developer'), ('ET', 'Enterprise')]", 'default': '"""HO"""', 'max_length': '(2)'}), "(choices=[('HO', 'Hobbyist'), ('DV', 'Developer'), ('ET',\n 'Enterprise')], default='HO', max_length=2)\n", (3753, 3858), False, 'from django.db import migrations, models\n'), ((3881, 3900), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (3898, 3900), False, 'from django.db import migrations, models\n'), ((3930, 3964), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (3949, 3964), False, 'from django.db import migrations, models\n'), ((3995, 4034), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (4015, 4034), False, 'from django.db import migrations, models\n'), ((4065, 4087), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (4085, 4087), False, 'from django.db import migrations, models\n'), ((4121, 4176), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (4136, 4176), False, 'from django.db import migrations, models\n'), ((4204, 4312), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '"""1"""', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), "(default='1', on_delete=django.db.models.deletion.CASCADE,\n to=settings.AUTH_USER_MODEL)\n", (4221, 4312), False, 'from django.db import migrations, models\n'), ((4517, 4610), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (4533, 4610), False, 'from django.db import migrations, models\n'), ((4634, 4690), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (4650, 4690), False, 'from django.db import migrations, models\n'), ((4722, 4783), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '"""Extra"""', 'max_length': '(100)'}), "(blank=True, default='Extra', max_length=100)\n", (4738, 4783), False, 'from django.db import migrations, models\n'), ((4818, 4836), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (4834, 4836), False, 'from django.db import migrations, models\n'), ((4862, 4880), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (4878, 4880), False, 'from django.db import migrations, models\n'), ((4911, 4950), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (4931, 4950), False, 'from django.db import migrations, models\n'), ((4981, 5067), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""main.Project"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'main.Project')\n", (4998, 5067), False, 'from django.db import migrations, models\n'), ((5271, 5364), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (5287, 5364), False, 'from django.db import migrations, models\n'), ((5388, 5444), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (5404, 5444), False, 'from django.db import migrations, models\n'), ((5476, 5537), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '"""Extra"""', 'max_length': '(100)'}), "(blank=True, default='Extra', max_length=100)\n", (5492, 5537), False, 'from django.db import migrations, models\n'), ((5572, 5590), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (5588, 5590), False, 'from django.db import migrations, models\n'), ((5618, 5636), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (5634, 5636), False, 'from django.db import migrations, models\n'), ((5663, 5681), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (5679, 5681), False, 'from django.db import migrations, models\n'), ((5712, 5767), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (5727, 5767), False, 'from django.db import migrations, models\n'), ((5798, 5915), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '"""gjs-fonts gjs-f-b1 gjs-block gjs-one-bg gjs-four-color-h"""', 'max_length': '(100)'}), "(blank=True, default=\n 'gjs-fonts gjs-f-b1 gjs-block gjs-one-bg gjs-four-color-h', max_length=100)\n", (5814, 5915), False, 'from django.db import migrations, models\n'), ((5941, 5980), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (5961, 5980), False, 'from django.db import migrations, models\n'), ((6011, 6097), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""main.Project"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'main.Project')\n", (6028, 6097), False, 'from django.db import migrations, models\n'), ((6303, 6394), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False)\n', (6319, 6394), False, 'from django.db import migrations, models\n'), ((6422, 6478), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (6438, 6478), False, 'from django.db import migrations, models\n'), ((6506, 6618), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('IMG', 'Image'), ('SVG', 'SVG'), ('VID', 'Video')]", 'default': '"""IMG"""', 'max_length': '(3)'}), "(choices=[('IMG', 'Image'), ('SVG', 'SVG'), ('VID', 'Video'\n )], default='IMG', max_length=3)\n", (6522, 6618), False, 'from django.db import migrations, models\n'), ((6640, 6695), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)'}), "(blank=True, default='', max_length=100)\n", (6655, 6695), False, 'from django.db import migrations, models\n'), ((6723, 6744), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (6742, 6744), False, 'from django.db import migrations, models\n'), ((6773, 6812), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (6793, 6812), False, 'from django.db import migrations, models\n'), ((6840, 6948), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '"""1"""', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), "(default='1', on_delete=django.db.models.deletion.CASCADE,\n to=settings.AUTH_USER_MODEL)\n", (6857, 6948), False, 'from django.db import migrations, models\n')] |
"""
Summary:
- Command-line Interface (CLI) Utilities Module
- Python3
Module Functions:
- convert_strtime_datetime:
Convert human-readable datetime string into a datetime object for
conducting time operations.
- convert_timedelta:
Convert a datetime duration object into human-readable components
(weeks, days, hours, etc).
- convert_dt_time:
Convert datetime objects to human-readable string output with Formatting
"""
import datetime
import inspect
import logging
from pyaws import __version__
logger = logging.getLogger(__version__)
logger.setLevel(logging.INFO)
def convert_strtime_datetime(dt_str):
""" Converts datetime isoformat string to datetime (dt) object
Args:
:dt_str (str): input string in '2017-12-30T18:48:00.353Z' form
or similar
Returns:
TYPE: datetime object
"""
dt, _, us = dt_str.partition(".")
dt = datetime.datetime.strptime(dt, "%Y-%m-%dT%H:%M:%S")
us = int(us.rstrip("Z"), 10)
return dt + datetime.timedelta(microseconds=us)
def convert_timedelta(duration):
"""
Summary:
Convert duration into component time units
Args:
:duration (datetime.timedelta): time duration to convert
Returns:
days, hours, minutes, seconds | TYPE: tuple (integers)
"""
try:
days, seconds = duration.days, duration.seconds
hours = seconds // 3600
minutes = (seconds % 3600) // 60
seconds = (seconds % 60)
except Exception:
logger.exception(
f'{inspect.stack()[0][3]}: Input must be datetime.timedelta object'
)
return 0, 0, 0, 0
return days, hours, minutes, seconds
def convert_dt_time(duration, return_iter=False):
"""
Summary:
convert timedelta objects to human readable output
Args:
:duration (datetime.timedelta): time duration to convert
:return_iter (tuple): tuple containing time sequence
Returns:
days, hours, minutes, seconds | TYPE: tuple (integers), OR
human readable, notated units | TYPE: string
"""
try:
days, hours, minutes, seconds = convert_timedelta(duration)
if return_iter:
return days, hours, minutes, seconds
# string format conversions
if days > 0:
format_string = (
'{} day{}, {} hour{}'.format(
days, 's' if days != 1 else '', hours, 's' if hours != 1 else ''))
elif hours > 1:
format_string = (
'{} hour{}, {} minute{}'.format(
hours, 's' if hours != 1 else '', minutes, 's' if minutes != 1 else ''))
else:
format_string = (
'{} minute{}, {} sec{}'.format(
minutes, 's' if minutes != 1 else '', seconds, 's' if seconds != 1 else ''))
except AttributeError as e:
logger.exception(
'%s: Type mismatch when converting timedelta objects (Code: %s)' %
(inspect.stack()[0][3], str(e)))
raise e
except Exception as e:
logger.exception(
'%s: Unknown error when converting datetime objects (Code: %s)' %
(inspect.stack()[0][3], str(e)))
raise e
return format_string
| [
"logging.getLogger",
"datetime.timedelta",
"datetime.datetime.strptime",
"inspect.stack"
] | [((568, 598), 'logging.getLogger', 'logging.getLogger', (['__version__'], {}), '(__version__)\n', (585, 598), False, 'import logging\n'), ((937, 988), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['dt', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(dt, '%Y-%m-%dT%H:%M:%S')\n", (963, 988), False, 'import datetime\n'), ((1038, 1073), 'datetime.timedelta', 'datetime.timedelta', ([], {'microseconds': 'us'}), '(microseconds=us)\n', (1056, 1073), False, 'import datetime\n'), ((1578, 1593), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (1591, 1593), False, 'import inspect\n'), ((3030, 3045), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (3043, 3045), False, 'import inspect\n'), ((3222, 3237), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (3235, 3237), False, 'import inspect\n')] |
import json
def translate_name(name):
with open("name_translator.json", "r") as file:
data = json.load(file)
return data[name]
| [
"json.load"
] | [((107, 122), 'json.load', 'json.load', (['file'], {}), '(file)\n', (116, 122), False, 'import json\n')] |
#!/usr/bin/env python
import sys
import collections
listas = list(map(str.split, sys.stdin.readlines()))
entrada = [item for sublist in listas for item in sublist] # simplificando as listas, para facilitar
contador = collections.Counter()
palavras = []
for palavra in entrada:
if palavra == "#":
break
palavra_simplificada = "".join(sorted(palavra.lower()))
if len(palavra) >= 1:
contador[palavra_simplificada] += 1
palavras.append((palavra, palavra_simplificada))
lista_auxiliar_palavras = []
for palavra, palavra_simplificada in palavras:
if not palavra_simplificada in contador or contador[palavra_simplificada] < 2:
lista_auxiliar_palavras.append(palavra)
for resultado in sorted(lista_auxiliar_palavras):
print(resultado) | [
"collections.Counter",
"sys.stdin.readlines"
] | [((219, 240), 'collections.Counter', 'collections.Counter', ([], {}), '()\n', (238, 240), False, 'import collections\n'), ((83, 104), 'sys.stdin.readlines', 'sys.stdin.readlines', ([], {}), '()\n', (102, 104), False, 'import sys\n')] |
import torch
from viz.visualizer import Visualizer
from modules.deepmil import Attention
from msidata.dataset_msi_features_with_patients import PreProcessedMSIFeatureDataset
from testing.logistic_regression import get_precomputed_dataloader
import argparse
from experiment import ex
from utils import post_config_hook
@ex.automain
def main(_run, _log):
args = argparse.Namespace(**_run.config)
args = post_config_hook(args, _run)
args.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# Get test data to be visualized
_, test_loader = get_precomputed_dataloader(args, args.use_precomputed_features_id)
# Load model to be used
model = Attention()
# Initialize visualizer.. not necessary?
viz = Visualizer()
viz.visualize_first_patient(test_loader, model, method='deepmil')
print('done')
# for step, data in enumerate(loader):
# optimizer.zero_grad()
# x = data[0]
# y = data[1]
if __name__=="__main__":
main() | [
"utils.post_config_hook",
"modules.deepmil.Attention",
"torch.cuda.is_available",
"argparse.Namespace",
"viz.visualizer.Visualizer",
"testing.logistic_regression.get_precomputed_dataloader"
] | [((372, 405), 'argparse.Namespace', 'argparse.Namespace', ([], {}), '(**_run.config)\n', (390, 405), False, 'import argparse\n'), ((417, 445), 'utils.post_config_hook', 'post_config_hook', (['args', '_run'], {}), '(args, _run)\n', (433, 445), False, 'from utils import post_config_hook\n'), ((586, 652), 'testing.logistic_regression.get_precomputed_dataloader', 'get_precomputed_dataloader', (['args', 'args.use_precomputed_features_id'], {}), '(args, args.use_precomputed_features_id)\n', (612, 652), False, 'from testing.logistic_regression import get_precomputed_dataloader\n'), ((694, 705), 'modules.deepmil.Attention', 'Attention', ([], {}), '()\n', (703, 705), False, 'from modules.deepmil import Attention\n'), ((768, 780), 'viz.visualizer.Visualizer', 'Visualizer', ([], {}), '()\n', (778, 780), False, 'from viz.visualizer import Visualizer\n'), ((489, 514), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (512, 514), False, 'import torch\n')] |
import socket
s = socket.socket()
host = input(str("Please enter the host address of the sender: "))
port = 8080
s.connect((host, port))
print(f"[+] CONNECTED TO {host}:{port}")
filename = input(str("Please enter filename for the incoming file: "))
file = open(filename, 'wb')
file_data = s.recv(1024)
file.write(file_data)
file.close()
print(f"[+] FILE SAVED SUCCESSFULLY {filename}")
| [
"socket.socket"
] | [((19, 34), 'socket.socket', 'socket.socket', ([], {}), '()\n', (32, 34), False, 'import socket\n')] |
from django.contrib import admin
from .models import Book, Favorite
admin.site.register(Book)
admin.site.register(Favorite) | [
"django.contrib.admin.site.register"
] | [((71, 96), 'django.contrib.admin.site.register', 'admin.site.register', (['Book'], {}), '(Book)\n', (90, 96), False, 'from django.contrib import admin\n'), ((97, 126), 'django.contrib.admin.site.register', 'admin.site.register', (['Favorite'], {}), '(Favorite)\n', (116, 126), False, 'from django.contrib import admin\n')] |
from django.core import exceptions
from django.http import FileResponse
from django.utils.text import format_lazy
from django.utils.translation import gettext_lazy as _
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.parsers import MultiPartParser
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from shared.audit_log.viewsets import AuditLoggingModelViewSet
from shared.oidc.auth import EAuthRestAuthentication
from applications.api.v1.auth import StaffAuthentication
from applications.api.v1.permissions import (
ALLOWED_APPLICATION_UPDATE_STATUSES,
ALLOWED_APPLICATION_VIEW_STATUSES,
ApplicationPermission,
get_user_company,
StaffPermission,
SummerVoucherPermission,
)
from applications.api.v1.serializers import (
ApplicationSerializer,
AttachmentSerializer,
SummerVoucherSerializer,
)
from applications.enums import ApplicationStatus
from applications.models import Application, SummerVoucher
class ApplicationViewSet(AuditLoggingModelViewSet):
queryset = Application.objects.all()
serializer_class = ApplicationSerializer
permission_classes = [IsAuthenticated, ApplicationPermission]
def get_queryset(self):
"""
Fetch all DRAFT status applications of the user & company.
Should inlcude only 1 application since we don't allow creation of multiple
DRAFT applications per user & company.
"""
queryset = (
super()
.get_queryset()
.select_related("company")
.prefetch_related("summer_vouchers")
)
user = self.request.user
if user.is_anonymous:
return queryset.none()
user_company = get_user_company(self.request)
return queryset.filter(
company=user_company,
user=user,
status__in=ALLOWED_APPLICATION_VIEW_STATUSES,
)
def create(self, request, *args, **kwargs):
"""
Allow only 1 (DRAFT) application per user & company.
"""
if self.get_queryset().filter(status=ApplicationStatus.DRAFT).exists():
raise ValidationError("Company & user can have only one draft application")
return super().create(request, *args, **kwargs)
def update(self, request, *args, **kwargs):
"""
Allow to update only DRAFT status applications.
"""
instance = self.get_object()
if instance.status not in ALLOWED_APPLICATION_UPDATE_STATUSES:
raise ValidationError("Only DRAFT applications can be updated")
return super().update(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
class SummerVoucherViewSet(AuditLoggingModelViewSet):
queryset = SummerVoucher.objects.all()
serializer_class = SummerVoucherSerializer
authentication_classes = [EAuthRestAuthentication, StaffAuthentication]
permission_classes = [IsAuthenticated, SummerVoucherPermission | StaffPermission]
def get_queryset(self):
"""
Fetch summer vouchers of DRAFT status applications of the user & company.
"""
queryset = (
super()
.get_queryset()
.select_related("application")
.prefetch_related("attachments")
)
user = self.request.user
if user.is_staff:
return queryset
elif user.is_anonymous:
return queryset.none()
user_company = get_user_company(self.request)
return queryset.filter(
application__company=user_company,
application__user=user,
application__status__in=ALLOWED_APPLICATION_VIEW_STATUSES,
)
def create(self, request, *args, **kwargs):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
def update(self, request, *args, **kwargs):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
def retrieve(self, request, *args, **kwargs):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
def list(self, request, *args, **kwargs):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
def destroy(self, request, *args, **kwargs):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
@action(
methods=("POST",),
detail=True,
url_path="attachments",
parser_classes=(MultiPartParser,),
)
def post_attachment(self, request, *args, **kwargs):
"""
Upload a single file as attachment
"""
obj = self.get_object()
if obj.application.status not in ALLOWED_APPLICATION_UPDATE_STATUSES:
raise ValidationError(
"Attachments can be uploaded only for DRAFT applications"
)
# Validate request data
serializer = AttachmentSerializer(
data={
"summer_voucher": obj.id,
"attachment_file": request.data["attachment_file"],
"content_type": request.data["attachment_file"].content_type,
"attachment_type": request.data["attachment_type"],
}
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
@action(
methods=(
"GET",
"DELETE",
),
detail=True,
url_path="attachments/(?P<attachment_pk>[^/.]+)",
)
def handle_attachment(self, request, attachment_pk, *args, **kwargs):
obj = self.get_object()
if request.method == "GET":
"""
Read a single attachment as file
"""
attachment = obj.attachments.filter(pk=attachment_pk).first()
if not attachment or not attachment.attachment_file:
return Response(
{
"detail": format_lazy(
_("File not found."),
)
},
status=status.HTTP_404_NOT_FOUND,
)
return FileResponse(attachment.attachment_file)
elif request.method == "DELETE":
"""
Delete a single attachment as file
"""
if obj.application.status not in ALLOWED_APPLICATION_UPDATE_STATUSES:
raise ValidationError(
"Attachments can be deleted only for DRAFT applications"
)
if (
obj.application.status
not in AttachmentSerializer.ATTACHMENT_MODIFICATION_ALLOWED_STATUSES
):
return Response(
{"detail": _("Operation not allowed for this application status.")},
status=status.HTTP_403_FORBIDDEN,
)
try:
instance = obj.attachments.get(id=attachment_pk)
except exceptions.ObjectDoesNotExist:
return Response(
{"detail": _("File not found.")}, status=status.HTTP_404_NOT_FOUND
)
instance.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
| [
"applications.models.SummerVoucher.objects.all",
"applications.api.v1.serializers.AttachmentSerializer",
"django.utils.translation.gettext_lazy",
"rest_framework.decorators.action",
"applications.models.Application.objects.all",
"rest_framework.response.Response",
"django.http.FileResponse",
"rest_fra... | [((1161, 1186), 'applications.models.Application.objects.all', 'Application.objects.all', ([], {}), '()\n', (1184, 1186), False, 'from applications.models import Application, SummerVoucher\n'), ((2943, 2970), 'applications.models.SummerVoucher.objects.all', 'SummerVoucher.objects.all', ([], {}), '()\n', (2968, 2970), False, 'from applications.models import Application, SummerVoucher\n'), ((4476, 4577), 'rest_framework.decorators.action', 'action', ([], {'methods': "('POST',)", 'detail': '(True)', 'url_path': '"""attachments"""', 'parser_classes': '(MultiPartParser,)'}), "(methods=('POST',), detail=True, url_path='attachments',\n parser_classes=(MultiPartParser,))\n", (4482, 4577), False, 'from rest_framework.decorators import action\n'), ((5501, 5602), 'rest_framework.decorators.action', 'action', ([], {'methods': "('GET', 'DELETE')", 'detail': '(True)', 'url_path': '"""attachments/(?P<attachment_pk>[^/.]+)"""'}), "(methods=('GET', 'DELETE'), detail=True, url_path=\n 'attachments/(?P<attachment_pk>[^/.]+)')\n", (5507, 5602), False, 'from rest_framework.decorators import action\n'), ((1839, 1869), 'applications.api.v1.permissions.get_user_company', 'get_user_company', (['self.request'], {}), '(self.request)\n', (1855, 1869), False, 'from applications.api.v1.permissions import ALLOWED_APPLICATION_UPDATE_STATUSES, ALLOWED_APPLICATION_VIEW_STATUSES, ApplicationPermission, get_user_company, StaffPermission, SummerVoucherPermission\n'), ((2820, 2871), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_405_METHOD_NOT_ALLOWED'}), '(status=status.HTTP_405_METHOD_NOT_ALLOWED)\n', (2828, 2871), False, 'from rest_framework.response import Response\n'), ((3661, 3691), 'applications.api.v1.permissions.get_user_company', 'get_user_company', (['self.request'], {}), '(self.request)\n', (3677, 3691), False, 'from applications.api.v1.permissions import ALLOWED_APPLICATION_UPDATE_STATUSES, ALLOWED_APPLICATION_VIEW_STATUSES, ApplicationPermission, get_user_company, StaffPermission, SummerVoucherPermission\n'), ((3953, 4004), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_405_METHOD_NOT_ALLOWED'}), '(status=status.HTTP_405_METHOD_NOT_ALLOWED)\n', (3961, 4004), False, 'from rest_framework.response import Response\n'), ((4069, 4120), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_405_METHOD_NOT_ALLOWED'}), '(status=status.HTTP_405_METHOD_NOT_ALLOWED)\n', (4077, 4120), False, 'from rest_framework.response import Response\n'), ((4187, 4238), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_405_METHOD_NOT_ALLOWED'}), '(status=status.HTTP_405_METHOD_NOT_ALLOWED)\n', (4195, 4238), False, 'from rest_framework.response import Response\n'), ((4301, 4352), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_405_METHOD_NOT_ALLOWED'}), '(status=status.HTTP_405_METHOD_NOT_ALLOWED)\n', (4309, 4352), False, 'from rest_framework.response import Response\n'), ((4418, 4469), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_405_METHOD_NOT_ALLOWED'}), '(status=status.HTTP_405_METHOD_NOT_ALLOWED)\n', (4426, 4469), False, 'from rest_framework.response import Response\n'), ((5025, 5258), 'applications.api.v1.serializers.AttachmentSerializer', 'AttachmentSerializer', ([], {'data': "{'summer_voucher': obj.id, 'attachment_file': request.data[\n 'attachment_file'], 'content_type': request.data['attachment_file'].\n content_type, 'attachment_type': request.data['attachment_type']}"}), "(data={'summer_voucher': obj.id, 'attachment_file':\n request.data['attachment_file'], 'content_type': request.data[\n 'attachment_file'].content_type, 'attachment_type': request.data[\n 'attachment_type']})\n", (5045, 5258), False, 'from applications.api.v1.serializers import ApplicationSerializer, AttachmentSerializer, SummerVoucherSerializer\n'), ((5437, 5494), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_201_CREATED'}), '(serializer.data, status=status.HTTP_201_CREATED)\n', (5445, 5494), False, 'from rest_framework.response import Response\n'), ((2260, 2329), 'rest_framework.exceptions.ValidationError', 'ValidationError', (['"""Company & user can have only one draft application"""'], {}), "('Company & user can have only one draft application')\n", (2275, 2329), False, 'from rest_framework.exceptions import ValidationError\n'), ((2641, 2698), 'rest_framework.exceptions.ValidationError', 'ValidationError', (['"""Only DRAFT applications can be updated"""'], {}), "('Only DRAFT applications can be updated')\n", (2656, 2698), False, 'from rest_framework.exceptions import ValidationError\n'), ((4866, 4940), 'rest_framework.exceptions.ValidationError', 'ValidationError', (['"""Attachments can be uploaded only for DRAFT applications"""'], {}), "('Attachments can be uploaded only for DRAFT applications')\n", (4881, 4940), False, 'from rest_framework.exceptions import ValidationError\n'), ((6315, 6355), 'django.http.FileResponse', 'FileResponse', (['attachment.attachment_file'], {}), '(attachment.attachment_file)\n', (6327, 6355), False, 'from django.http import FileResponse\n'), ((7363, 7406), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_204_NO_CONTENT'}), '(status=status.HTTP_204_NO_CONTENT)\n', (7371, 7406), False, 'from rest_framework.response import Response\n'), ((6581, 6654), 'rest_framework.exceptions.ValidationError', 'ValidationError', (['"""Attachments can be deleted only for DRAFT applications"""'], {}), "('Attachments can be deleted only for DRAFT applications')\n", (6596, 6654), False, 'from rest_framework.exceptions import ValidationError\n'), ((6153, 6173), 'django.utils.translation.gettext_lazy', '_', (['"""File not found."""'], {}), "('File not found.')\n", (6154, 6173), True, 'from django.utils.translation import gettext_lazy as _\n'), ((6914, 6969), 'django.utils.translation.gettext_lazy', '_', (['"""Operation not allowed for this application status."""'], {}), "('Operation not allowed for this application status.')\n", (6915, 6969), True, 'from django.utils.translation import gettext_lazy as _\n'), ((7240, 7260), 'django.utils.translation.gettext_lazy', '_', (['"""File not found."""'], {}), "('File not found.')\n", (7241, 7260), True, 'from django.utils.translation import gettext_lazy as _\n')] |
import json
import requests
import time
from discord_webhook import DiscordWebhook, DiscordEmbed
webhook_url = 'https://discordapp.com/api/webhooks/672159508675690497/4UtaClAc7rKMJsEvbR4iYf-Razv4M3ZWtkYDOxBzLfiDzJhI7RSFpoLn6iijBiRcaNOR'
webhook = DiscordWebhook(webhook_url)
pid = '508214-660'
headers = {
'Connection': 'keep-alive',
'accept': 'application/json',
'Origin': 'https://www.goat.com',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Safari/537.36',
'content-type': 'application/x-www-form-urlencoded',
'Sec-Fetch-Site': 'cross-site',
'Sec-Fetch-Mode': 'cors',
'Referer': 'https://www.goat.com/search?query='+ pid,
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-US,en;q=0.9',
}
params = {
'x-algolia-agent': 'Algolia for vanilla JavaScript 3.25.1',
'x-algolia-application-id': '2FWOTDVM2O',
'x-algolia-api-key': 'ac96de6fef0e02bb95d433d8d5c7038a',
}
data = {
"distinct": 'true',
'facetFilters': 'product_category: shoes',
'facets': 'size',
'hitsPerPage': '48',
'numericFilters': '[]',
'page': '0',
'query': pid,
'clickAnalytics': "true"
}
response = requests.post('https://2fwotdvm2o-dsn.algolia.net/1/indexes/product_variants_v2/query', headers=headers, params=params,json=data)
response_json = response.json()
response_json_dict = response_json['hits'][0]
product_id = response_json_dict['product_template_id']
print(product_id)
def obtainBasicInfo():
webhook = DiscordWebhook(url=webhook_url)
r_api = requests.get('https://www.goat.com/web-api/v1/product_variants?productTemplateId='+ str(product_id),headers=headers)
data = r_api.json()
embed = DiscordEmbed(title=response_json_dict['name'], url=headers['Referer'], color=242424)
embed.set_thumbnail(url=response_json_dict['main_picture_url'])
sizes = []
shoe_conditions = []
box_conditions = []
prices = []
for i in data:
sizes.append(str(i['size']))
shoe_conditions.append(i['shoeCondition'])
box_conditions.append(i['boxCondition'])
prices.append(str(int(i['lowestPriceCents']['amountUsdCents'])/100))
print(' Size: ' + str(i['size']) + '\n' + ' Shoe condition: ' + i['shoeCondition'] + '\n' + ' Box condition: ' + i['boxCondition'] + '\n' + ' $' + str(int(i['lowestPriceCents']['amountUsdCents'])/100) + '\n' + '-----------------')
embed.add_embed_field(name='Size', value=(str(i['size'])))
embed.add_embed_field(name='Shoe Condition', value=str(i['shoeCondition']))
embed.add_embed_field(name='Box Condition', value=str(i['boxCondition']))
embed.add_embed_field(name='Price', value='$' + str(int(i['lowestPriceCents']['amountUsdCents'])/100))
webhook.add_embed(embed)
send_hook = webhook.execute()
time.sleep(2)
embed.fields = []
print(sizes)
print(shoe_conditions)
print(box_conditions)
print(prices)
obtainBasicInfo()
| [
"discord_webhook.DiscordEmbed",
"requests.post",
"discord_webhook.DiscordWebhook",
"time.sleep"
] | [((248, 275), 'discord_webhook.DiscordWebhook', 'DiscordWebhook', (['webhook_url'], {}), '(webhook_url)\n', (262, 275), False, 'from discord_webhook import DiscordWebhook, DiscordEmbed\n'), ((1239, 1378), 'requests.post', 'requests.post', (['"""https://2fwotdvm2o-dsn.algolia.net/1/indexes/product_variants_v2/query"""'], {'headers': 'headers', 'params': 'params', 'json': 'data'}), "(\n 'https://2fwotdvm2o-dsn.algolia.net/1/indexes/product_variants_v2/query',\n headers=headers, params=params, json=data)\n", (1252, 1378), False, 'import requests\n'), ((1558, 1589), 'discord_webhook.DiscordWebhook', 'DiscordWebhook', ([], {'url': 'webhook_url'}), '(url=webhook_url)\n', (1572, 1589), False, 'from discord_webhook import DiscordWebhook, DiscordEmbed\n'), ((1755, 1843), 'discord_webhook.DiscordEmbed', 'DiscordEmbed', ([], {'title': "response_json_dict['name']", 'url': "headers['Referer']", 'color': '(242424)'}), "(title=response_json_dict['name'], url=headers['Referer'],\n color=242424)\n", (1767, 1843), False, 'from discord_webhook import DiscordWebhook, DiscordEmbed\n'), ((2883, 2896), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (2893, 2896), False, 'import time\n')] |
"""Unit tests for the pytai application.
License:
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import unittest
import xml.etree.ElementTree as ET
from typing import Union, Callable
from unittest.mock import patch, MagicMock
from pathlib import Path
try:
from .. import application
from .xml_utils import *
except ImportError:
if __name__ == "__main__":
import sys
sys.exit(f'This script needs to be run from the root folder:\n'
f'python -m pytai.tests.{Path(sys.argv[0]).stem}\n'
f'python -m unittest pytai.tests.{Path(sys.argv[0]).stem}')
else:
raise
class MockView(MagicMock):
"""Mock class to mock the application's View"""
def __init__(self, *args, **kwargs):
super().__init__()
def add_tree_item(self, parent_handle: Union[ET.Element, str], **kwargs) -> ET.ElementTree:
"""Build an XML tree using the provided input."""
if parent_handle == "":
self.root = ET.Element("root")
return self.root
d = {k: str(v) for k, v in kwargs.items()}
return ET.SubElement(parent_handle, "node", **d)
def schedule_function(self, time_ms: int, callback: Callable[[], None]) -> None:
callback()
def start_worker(self, callback: Callable[[], bool]) -> None:
reschedule = True
while reschedule:
reschedule = callback()
class TestOffsets(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.tmp_path = Path(__file__).resolve().parent / "tmp"
cls.tmp_path.mkdir(parents=True, exist_ok=True)
@staticmethod
def get_resource_path(file_name: str):
return Path(__file__).resolve().parent / "resources" / file_name
def generic_test(self, file_type):
path = self.get_resource_path(f"{file_type}.{file_type}")
format = {"kaitai_format": file_type}
with patch(__name__ + '.application.v.View', MockView()):
app = application.Application(file = path, format = format)
with open(self.tmp_path / "actual_output.xml", "w") as o:
o.write(xml_to_str(app.view.root))
expected_xml = xml_from_file(self.get_resource_path(f"{file_type}.xml"))
try:
xml_compare(app.view.root, expected_xml)
except RuntimeError as e:
self.fail(str(e))
def test_png(self):
self.generic_test("png")
def test_bmp(self):
self.generic_test("bmp")
def test_zip(self):
self.generic_test("zip")
def test_elf(self):
self.generic_test("elf")
def test_wav(self):
self.generic_test("wav")
if __name__ == "__main__":
unittest.main() | [
"unittest.main",
"xml.etree.ElementTree.Element",
"xml.etree.ElementTree.SubElement",
"pathlib.Path"
] | [((3883, 3898), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3896, 3898), False, 'import unittest\n'), ((2221, 2262), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['parent_handle', '"""node"""'], {}), "(parent_handle, 'node', **d)\n", (2234, 2262), True, 'import xml.etree.ElementTree as ET\n'), ((2102, 2120), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""root"""'], {}), "('root')\n", (2112, 2120), True, 'import xml.etree.ElementTree as ET\n'), ((2640, 2654), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (2644, 2654), False, 'from pathlib import Path\n'), ((1599, 1616), 'pathlib.Path', 'Path', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (1603, 1616), False, 'from pathlib import Path\n'), ((1679, 1696), 'pathlib.Path', 'Path', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (1683, 1696), False, 'from pathlib import Path\n'), ((2818, 2832), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (2822, 2832), False, 'from pathlib import Path\n')] |
# Copyright (c) 2019 Science and Technology Facilities Council
# All rights reserved.
# Modifications made as part of the fparser project are distributed
# under the following license:
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''Test Fortran Include Statement: This file tests the parsing of an
include statement. Whilst include is not part of the standard Fortran
rules (the include should include code as the code is being parsed)
there are cases where users might like to keep the include statement
in the Fortran parse tree and output it again.
'''
import pytest
from fparser.api import get_reader
from fparser.two.Fortran2003 import Include_Stmt, InternalError
from fparser.two.utils import NoMatchError
def test_include_stmt(f2003_create):
'''Check that a basic include statement is parsed
correctly. Input separately as a string and as a reader object
'''
def check_include(reader):
'''Internal helper function to avoid code replication.'''
ast = Include_Stmt(reader)
assert "INCLUDE 'my-non-existant-file.inc'" in str(ast)
assert repr(ast).replace("u'", "'") == \
("Include_Stmt(Include_Filename("
"'my-non-existant-file.inc'))")
line = "include 'my-non-existant-file.inc'"
check_include(line)
reader = get_reader(line)
check_include(reader)
def test_spaces(f2003_create):
'''Check that spaces are allowed before and after an include keyword
as well as after the file string.
'''
line = " include 'my-non-existant-file.inc' "
ast = Include_Stmt(line)
assert "INCLUDE 'my-non-existant-file.inc'" in str(ast)
def test_no_space(f2003_create):
'''Check that no space is required between the include keyword and the
file string.
'''
line = "include'my-non-existant-file.inc'"
ast = Include_Stmt(line)
assert "INCLUDE 'my-non-existant-file.inc'" in str(ast)
def test_case(f2003_create):
'''Check that different case is allowed for the include keyword.'''
line = "InClUdE 'my-non-existant-file.inc'"
ast = Include_Stmt(line)
assert "INCLUDE 'my-non-existant-file.inc'" in str(ast)
def test_double_quotes(f2003_create):
'''Check that double quotes are allowed for the file string.'''
line = 'include "my-non-existant-file.inc"'
ast = Include_Stmt(line)
assert "INCLUDE 'my-non-existant-file.inc'" in str(ast)
def test_errors(f2003_create):
'''Check that syntax errors produce a NoMatchError exception.'''
for line in [None, "", " ", "includ", "includ 'x'", "include",
"include ''", "include \"x'", "include 'x\"", "include 'xxx",
"include \"xxx", "include xxx'", "include xxx\"",
"include x'x'", "include 'x'x", "x include 'x'"]:
with pytest.raises(NoMatchError) as excinfo:
_ = Include_Stmt(line)
assert "Include_Stmt: '{0}'".format(line) in str(excinfo.value)
def test_include_filename_error(f2003_create, monkeypatch):
'''Check that we raise an InternalError if a return from
Include_Filename is None or an empty string. This should never
happen as any matching errors would cause this class to raise an
exception.
'''
monkeypatch.setattr("fparser.two.Fortran2003.Include_Filename",
lambda file_name: None)
line = "include ' '"
with pytest.raises(InternalError) as excinfo:
_ = Include_Stmt(line)
assert ("Include_Filename should never return None or an empty "
"name") in str(excinfo.value)
| [
"fparser.api.get_reader",
"pytest.raises",
"fparser.two.Fortran2003.Include_Stmt"
] | [((2737, 2753), 'fparser.api.get_reader', 'get_reader', (['line'], {}), '(line)\n', (2747, 2753), False, 'from fparser.api import get_reader\n'), ((2993, 3011), 'fparser.two.Fortran2003.Include_Stmt', 'Include_Stmt', (['line'], {}), '(line)\n', (3005, 3011), False, 'from fparser.two.Fortran2003 import Include_Stmt, InternalError\n'), ((3265, 3283), 'fparser.two.Fortran2003.Include_Stmt', 'Include_Stmt', (['line'], {}), '(line)\n', (3277, 3283), False, 'from fparser.two.Fortran2003 import Include_Stmt, InternalError\n'), ((3505, 3523), 'fparser.two.Fortran2003.Include_Stmt', 'Include_Stmt', (['line'], {}), '(line)\n', (3517, 3523), False, 'from fparser.two.Fortran2003 import Include_Stmt, InternalError\n'), ((3750, 3768), 'fparser.two.Fortran2003.Include_Stmt', 'Include_Stmt', (['line'], {}), '(line)\n', (3762, 3768), False, 'from fparser.two.Fortran2003 import Include_Stmt, InternalError\n'), ((2426, 2446), 'fparser.two.Fortran2003.Include_Stmt', 'Include_Stmt', (['reader'], {}), '(reader)\n', (2438, 2446), False, 'from fparser.two.Fortran2003 import Include_Stmt, InternalError\n'), ((4806, 4834), 'pytest.raises', 'pytest.raises', (['InternalError'], {}), '(InternalError)\n', (4819, 4834), False, 'import pytest\n'), ((4859, 4877), 'fparser.two.Fortran2003.Include_Stmt', 'Include_Stmt', (['line'], {}), '(line)\n', (4871, 4877), False, 'from fparser.two.Fortran2003 import Include_Stmt, InternalError\n'), ((4225, 4252), 'pytest.raises', 'pytest.raises', (['NoMatchError'], {}), '(NoMatchError)\n', (4238, 4252), False, 'import pytest\n'), ((4281, 4299), 'fparser.two.Fortran2003.Include_Stmt', 'Include_Stmt', (['line'], {}), '(line)\n', (4293, 4299), False, 'from fparser.two.Fortran2003 import Include_Stmt, InternalError\n')] |
import os, csv, time, shutil
from bin import cardbank
from bin import builder
def add_build(add_cards):
'''Build card bank by specifically adding new cards.'''
# process new cards to add
if not add_cards:
return
add_card_map = {}
for card in add_cards:
add_card_map[card["inf"]] = card
# read in existing card bank
card_bank = []
if os.path.exists("bank/card-bank-built.csv"):
card_bank = cardbank.read("bank/card-bank-built.csv", build_forms=False)
# start HTTP session for reuse
session = builder.session()
print("Building new card bank..")
try:
# rebuild card bank, starting with populating from existing
new_cards = []
updated_cards = []
new_card_bank = []
errored = []
for card in card_bank:
if card["inf"] not in add_card_map:
# no change, just add existing card
new_card_bank.append(card)
else:
# changed, replace with new card definition
add_card = add_card_map[card["inf"]]
del add_card_map[card["inf"]]
_build_card_and_add(add_card, new_card_bank, new_cards, errored, session)
# add all brand new cards
for inf, card in add_card_map.items():
_build_card_and_add(card, new_card_bank, new_cards, errored, session)
finally:
session.close()
finish_build(new_card_bank, new_cards, updated_cards, errored)
def build_from_difference(force_rebuild=[]):
'''Build card bank by rectifying differences in card bank basic and built.'''
# read card bank basic (card bank with all basic definitions but not built out)
card_bank_basic = cardbank.read("bank/card-bank-basic.csv", build_forms=False)
# read existing, card bank built
existing = []
if os.path.exists("bank/card-bank-built.csv"):
existing = cardbank.read("bank/card-bank-built.csv", build_forms=False)
existing_map = {}
for card in existing:
existing_map[card["inf"]] = card
# start HTTP session for reuse
session = builder.session()
print("Building new card bank..")
# build card bank from card bank basic
new_cards = []
updated_cards = []
new_card_bank = []
errored = []
for card in card_bank_basic:
# if already exists, check if requiring update only (unless in list of force rebuild)
if (card["inf"] not in force_rebuild) and (card["inf"] in existing_map):
existing_card = existing_map[card["inf"]]
# if any of the built fields are different, something's wrong, rebuilt it entirely
rebuild = False
for field in builder.BUILT_FIELDS:
if not field in existing_card or not existing_card[field]:
rebuild = True
break
# if not rebuilding, just update the supplied fields, which doesn't affect build fields
update = False
for field in builder.SUPPLIED_FIELDS:
if rebuild:
break
if field not in existing_card or existing_card[field] != card[field]:
update = True
existing_card[field] = card[field]
# if no rebuild needed, append existing [and updated] card and continue
if not rebuild:
new_card_bank.append(existing_card)
if update:
updated_cards.append(existing_card)
continue
# if doesn't exist or need rebuilding, rebuild card
_build_card_and_add(card, new_card_bank, new_cards, errored, session)
finish_build(new_card_bank, new_cards, updated_cards, errored)
def _build_card_and_add(card, new_card_bank, new_cards, errored, session=None):
# get verb tenses
tense_map = builder.get(card["inf"], session=session)
# if warning returned, then invalid somehow
if isinstance(tense_map, Warning):
errored.append((card, str(tense_map)))
# otherwise build card and append to card bank
else:
builder.build(card, tense_map)
new_card_bank.append(card)
new_cards.append(card)
# don't spam the website
time.sleep(1)
def finish_build(new_card_bank, new_cards, updated_cards, errored):
'''Finish build, save card bank, and print information about build.'''
print("")
if not new_cards and not updated_cards and not errored:
print("No changes")
return
if new_cards or updated_cards:
# backup
if os.path.exists("bank/card-bank-built.csv"):
shutil.copyfile("bank/card-bank-built.csv", "bank/card-bank-built.bkp.csv")
print("Old card bank backed up as: bank/card-bank-built.bkp.csv")
# write new
with open("bank/card-bank-built.csv", "w", newline="", encoding="utf-8") as csvf:
writer = csv.DictWriter(csvf, fieldnames=builder.FIELDS)
writer.writeheader()
writer.writerows(new_card_bank)
print("New card bank written to: bank/card-bank-built.csv")
if new_cards:
print("\nNew cards created:")
for card in new_cards:
print(" {0}".format(card["inf"]))
if updated_cards:
print("\nCards updated:")
for card in updated_cards:
print(" {0}".format(card["inf"]))
if errored:
print("\nError building card(s) for:")
for pair in errored:
print(" {0} : {1}".format(pair[0]["inf"], pair[1]))
# if called straight-up, build from difference between basic and build card bank
if __name__ == "__main__":
build_from_difference()
| [
"csv.DictWriter",
"os.path.exists",
"bin.builder.get",
"bin.builder.session",
"time.sleep",
"shutil.copyfile",
"bin.builder.build",
"bin.cardbank.read"
] | [((385, 427), 'os.path.exists', 'os.path.exists', (['"""bank/card-bank-built.csv"""'], {}), "('bank/card-bank-built.csv')\n", (399, 427), False, 'import os, csv, time, shutil\n'), ((560, 577), 'bin.builder.session', 'builder.session', ([], {}), '()\n', (575, 577), False, 'from bin import builder\n'), ((1739, 1799), 'bin.cardbank.read', 'cardbank.read', (['"""bank/card-bank-basic.csv"""'], {'build_forms': '(False)'}), "('bank/card-bank-basic.csv', build_forms=False)\n", (1752, 1799), False, 'from bin import cardbank\n'), ((1863, 1905), 'os.path.exists', 'os.path.exists', (['"""bank/card-bank-built.csv"""'], {}), "('bank/card-bank-built.csv')\n", (1877, 1905), False, 'import os, csv, time, shutil\n'), ((2126, 2143), 'bin.builder.session', 'builder.session', ([], {}), '()\n', (2141, 2143), False, 'from bin import builder\n'), ((3885, 3926), 'bin.builder.get', 'builder.get', (["card['inf']"], {'session': 'session'}), "(card['inf'], session=session)\n", (3896, 3926), False, 'from bin import builder\n'), ((4260, 4273), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4270, 4273), False, 'import os, csv, time, shutil\n'), ((449, 509), 'bin.cardbank.read', 'cardbank.read', (['"""bank/card-bank-built.csv"""'], {'build_forms': '(False)'}), "('bank/card-bank-built.csv', build_forms=False)\n", (462, 509), False, 'from bin import cardbank\n'), ((1926, 1986), 'bin.cardbank.read', 'cardbank.read', (['"""bank/card-bank-built.csv"""'], {'build_forms': '(False)'}), "('bank/card-bank-built.csv', build_forms=False)\n", (1939, 1986), False, 'from bin import cardbank\n'), ((4130, 4160), 'bin.builder.build', 'builder.build', (['card', 'tense_map'], {}), '(card, tense_map)\n', (4143, 4160), False, 'from bin import builder\n'), ((4605, 4647), 'os.path.exists', 'os.path.exists', (['"""bank/card-bank-built.csv"""'], {}), "('bank/card-bank-built.csv')\n", (4619, 4647), False, 'import os, csv, time, shutil\n'), ((4661, 4736), 'shutil.copyfile', 'shutil.copyfile', (['"""bank/card-bank-built.csv"""', '"""bank/card-bank-built.bkp.csv"""'], {}), "('bank/card-bank-built.csv', 'bank/card-bank-built.bkp.csv')\n", (4676, 4736), False, 'import os, csv, time, shutil\n'), ((4946, 4993), 'csv.DictWriter', 'csv.DictWriter', (['csvf'], {'fieldnames': 'builder.FIELDS'}), '(csvf, fieldnames=builder.FIELDS)\n', (4960, 4993), False, 'import os, csv, time, shutil\n')] |
from mongoengine import StringField, EmailField, BooleanField
from flask.ext.login import UserMixin
import requests
import json
from mongoengine import Document
from social.apps.flask_app.me.models import FlaskStorage
class User(Document, UserMixin):
username = StringField(max_length=200)
password = StringField(max_length=200, default='')
name = StringField(max_length=100)
fullname = StringField(max_length=100)
first_name = StringField(max_length=100)
last_name = StringField(max_length=100)
email = EmailField()
active = BooleanField(default=True)
def facebook_api(self, url, fields=None):
params = {
'access_token': self.get_social_auth("facebook").extra_data['access_token']
}
if fields:
params["fields"] = ",".join(fields)
res = requests.get(url, params=params)
if res.status_code != 200:
raise Exception("Status was %s" % res.status_code)
return json.loads(res.content)
def get_facebook_albums(self):
return self.facebook_api("https://graph.facebook.com/v2.2/me/albums", fields=["id", "name"])["data"]
def get_facebook_photos(self, album_id):
photos = []
url = "https://graph.facebook.com/v2.2/%s/photos" % album_id
while url:
ret = self.facebook_api(url, fields=[
"id", "created_time", "from", "height", "width", "name", "source"
])
photos += ret["data"]
url = ret.get("paging", {}).get("next")
return photos
def get_social_auth(self, provider):
return FlaskStorage.user.get_social_auth_for_user(self, provider=provider).get()
def is_active(self):
return self.active
| [
"json.loads",
"social.apps.flask_app.me.models.FlaskStorage.user.get_social_auth_for_user",
"requests.get",
"mongoengine.StringField",
"mongoengine.EmailField",
"mongoengine.BooleanField"
] | [((269, 296), 'mongoengine.StringField', 'StringField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (280, 296), False, 'from mongoengine import StringField, EmailField, BooleanField\n'), ((312, 351), 'mongoengine.StringField', 'StringField', ([], {'max_length': '(200)', 'default': '""""""'}), "(max_length=200, default='')\n", (323, 351), False, 'from mongoengine import StringField, EmailField, BooleanField\n'), ((363, 390), 'mongoengine.StringField', 'StringField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (374, 390), False, 'from mongoengine import StringField, EmailField, BooleanField\n'), ((406, 433), 'mongoengine.StringField', 'StringField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (417, 433), False, 'from mongoengine import StringField, EmailField, BooleanField\n'), ((451, 478), 'mongoengine.StringField', 'StringField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (462, 478), False, 'from mongoengine import StringField, EmailField, BooleanField\n'), ((495, 522), 'mongoengine.StringField', 'StringField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (506, 522), False, 'from mongoengine import StringField, EmailField, BooleanField\n'), ((535, 547), 'mongoengine.EmailField', 'EmailField', ([], {}), '()\n', (545, 547), False, 'from mongoengine import StringField, EmailField, BooleanField\n'), ((561, 587), 'mongoengine.BooleanField', 'BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (573, 587), False, 'from mongoengine import StringField, EmailField, BooleanField\n'), ((835, 867), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (847, 867), False, 'import requests\n'), ((983, 1006), 'json.loads', 'json.loads', (['res.content'], {}), '(res.content)\n', (993, 1006), False, 'import json\n'), ((1622, 1689), 'social.apps.flask_app.me.models.FlaskStorage.user.get_social_auth_for_user', 'FlaskStorage.user.get_social_auth_for_user', (['self'], {'provider': 'provider'}), '(self, provider=provider)\n', (1664, 1689), False, 'from social.apps.flask_app.me.models import FlaskStorage\n')] |
"""
********************************************************************************
compas_blender.geometry
********************************************************************************
.. currentmodule:: compas_blender.geometry
Object-oriented convenience wrappers for native Blender geometry.
.. autosummary::
:toctree: generated/
BlenderCurve
BlenderMesh
BlenderPoint
BlenderSurface
"""
try:
import bpy
except ImportError:
pass
class BlenderGeometry(object):
def __init__(self, obj):
self.object = obj
self.name = obj.name
self.geometry = obj.data
self.otype = obj.type
self.attributes = {}
@property
def location(self):
return list(self.object.location)
@classmethod
def from_selection(cls):
raise NotImplementedError
@classmethod
def from_name(cls, name):
return BlenderGeometry(obj=bpy.data.objects[name])
@staticmethod
def find(guid):
raise NotImplementedError
@staticmethod
def refresh():
bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
def delete(self):
raise NotImplementedError
def purge(self):
raise NotImplementedError
def hide(self):
raise NotImplementedError
def show(self):
raise NotImplementedError
def select(self):
raise NotImplementedError
def unselect(self):
raise NotImplementedError
def closest_point(self, *args, **kwargs):
raise NotImplementedError
def closest_points(self, *args, **kwargs):
raise NotImplementedError
from .point import BlenderPoint
from .curve import BlenderCurve
from .mesh import BlenderMesh
from .surface import BlenderSurface
__all__ = [
'BlenderGeometry',
'BlenderPoint',
'BlenderCurve',
'BlenderMesh',
'BlenderSurface',
]
| [
"bpy.ops.wm.redraw_timer"
] | [((1094, 1153), 'bpy.ops.wm.redraw_timer', 'bpy.ops.wm.redraw_timer', ([], {'type': '"""DRAW_WIN_SWAP"""', 'iterations': '(1)'}), "(type='DRAW_WIN_SWAP', iterations=1)\n", (1117, 1153), False, 'import bpy\n')] |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from copy import deepcopy
import pytest
from data_validation import consts
from data_validation.config_manager import ConfigManager
COLUMN_VALIDATION_CONFIG = {
# BigQuery Specific Connection Config
"source_conn": None,
"target_conn": None,
# Validation Type
consts.CONFIG_TYPE: "Column",
# Configuration Required Depending on Validator Type
consts.CONFIG_SCHEMA_NAME: "bigquery-public-data.new_york_citibike",
consts.CONFIG_TABLE_NAME: "citibike_trips",
consts.CONFIG_CALCULATED_FIELDS: [],
consts.CONFIG_GROUPED_COLUMNS: [],
consts.CONFIG_FILTERS: [
{
consts.CONFIG_TYPE: consts.FILTER_TYPE_CUSTOM,
consts.CONFIG_FILTER_SOURCE: "column_name > 100",
consts.CONFIG_FILTER_TARGET: "column_name_target > 100",
}
],
}
QUERY_LIMIT = 100
COLUMN_VALIDATION_CONFIG_LIMIT = deepcopy(COLUMN_VALIDATION_CONFIG)
COLUMN_VALIDATION_CONFIG_LIMIT[consts.CONFIG_LIMIT] = QUERY_LIMIT
QUERY_GROUPS_TEST = [
{
consts.CONFIG_FIELD_ALIAS: "start_alias",
consts.CONFIG_SOURCE_COLUMN: "starttime",
consts.CONFIG_TARGET_COLUMN: "starttime",
consts.CONFIG_CAST: "date",
}
]
AGGREGATES_TEST = [
{
consts.CONFIG_FIELD_ALIAS: "sum_starttime",
consts.CONFIG_SOURCE_COLUMN: "starttime",
consts.CONFIG_TARGET_COLUMN: "starttime",
consts.CONFIG_TYPE: "sum",
}
]
CALCULATED_MULTIPLE_TEST = [
{
consts.CONFIG_FIELD_ALIAS: "concat_start_station_name_end_station_name",
consts.CONFIG_CALCULATED_SOURCE_COLUMNS: [
"start_station_name",
"end_station_name",
],
consts.CONFIG_CALCULATED_TARGET_COLUMNS: [
"start_station_name",
"end_station_name",
],
consts.CONFIG_TYPE: "concat",
},
{
consts.CONFIG_FIELD_ALIAS: "concat_calcs",
consts.CONFIG_CALCULATED_SOURCE_COLUMNS: [
"ifnull_start_station_name",
"rstrip_start_station_name",
"upper_start_station_name",
],
consts.CONFIG_CALCULATED_TARGET_COLUMNS: [
"ifnull_start_station_name",
"rstrip_start_station_name",
"upper_start_station_name",
],
consts.CONFIG_TYPE: "concat",
"depth": 1,
},
{
consts.CONFIG_FIELD_ALIAS: "ifnull_start_station_name",
consts.CONFIG_CALCULATED_SOURCE_COLUMNS: ["start_station_name"],
consts.CONFIG_CALCULATED_TARGET_COLUMNS: ["start_station_name"],
consts.CONFIG_TYPE: "ifnull",
},
{
consts.CONFIG_FIELD_ALIAS: "length_start_station_name",
consts.CONFIG_CALCULATED_SOURCE_COLUMNS: ["start_station_name"],
consts.CONFIG_CALCULATED_TARGET_COLUMNS: ["start_station_name"],
consts.CONFIG_TYPE: "length",
},
{
consts.CONFIG_FIELD_ALIAS: "rstrip_start_station_name",
consts.CONFIG_CALCULATED_SOURCE_COLUMNS: ["start_station_name"],
consts.CONFIG_CALCULATED_TARGET_COLUMNS: ["start_station_name"],
consts.CONFIG_TYPE: "rstrip",
},
{
consts.CONFIG_FIELD_ALIAS: "upper_start_station_name",
consts.CONFIG_CALCULATED_SOURCE_COLUMNS: ["start_station_name"],
consts.CONFIG_CALCULATED_TARGET_COLUMNS: ["start_station_name"],
consts.CONFIG_TYPE: "upper",
},
]
class MockIbisClient(object):
pass
@pytest.fixture
def module_under_test():
import data_validation.validation_builder
return data_validation.validation_builder
def test_import(module_under_test):
assert module_under_test is not None
def test_column_validation(module_under_test):
mock_config_manager = ConfigManager(
COLUMN_VALIDATION_CONFIG, MockIbisClient(), MockIbisClient(), verbose=False
)
builder = module_under_test.ValidationBuilder(mock_config_manager)
assert not builder.verbose
assert builder.config_manager.query_limit is None
def test_column_validation_aggregates(module_under_test):
mock_config_manager = ConfigManager(
COLUMN_VALIDATION_CONFIG, MockIbisClient(), MockIbisClient(), verbose=False
)
builder = module_under_test.ValidationBuilder(mock_config_manager)
mock_config_manager.append_aggregates(AGGREGATES_TEST)
builder.add_config_aggregates()
assert list(builder.get_metadata().keys()) == ["sum_starttime"]
def test_validation_add_groups(module_under_test):
mock_config_manager = ConfigManager(
COLUMN_VALIDATION_CONFIG, MockIbisClient(), MockIbisClient(), verbose=False
)
builder = module_under_test.ValidationBuilder(mock_config_manager)
mock_config_manager.append_query_groups(QUERY_GROUPS_TEST)
builder.add_config_query_groups()
assert list(builder.get_group_aliases()) == ["start_alias"]
def test_column_validation_calculate(module_under_test):
mock_config_manager = ConfigManager(
COLUMN_VALIDATION_CONFIG, MockIbisClient(), MockIbisClient(), verbose=False
)
builder = module_under_test.ValidationBuilder(mock_config_manager)
mock_config_manager.append_calculated_fields(CALCULATED_MULTIPLE_TEST)
builder.add_config_calculated_fields()
print(sorted(list(builder.get_calculated_aliases())))
assert sorted(list(builder.get_calculated_aliases())) == [
"concat_calcs",
"concat_start_station_name_end_station_name",
"ifnull_start_station_name",
"length_start_station_name",
"rstrip_start_station_name",
"upper_start_station_name",
]
def test_column_validation_limit(module_under_test):
mock_config_manager = ConfigManager(
COLUMN_VALIDATION_CONFIG_LIMIT,
MockIbisClient(),
MockIbisClient(),
verbose=False,
)
builder = module_under_test.ValidationBuilder(mock_config_manager)
builder.add_query_limit()
assert builder.source_builder.limit == QUERY_LIMIT
def test_validation_add_filters(module_under_test):
mock_config_manager = ConfigManager(
COLUMN_VALIDATION_CONFIG, MockIbisClient(), MockIbisClient(), verbose=False
)
builder = module_under_test.ValidationBuilder(mock_config_manager)
builder.add_config_filters()
filter_field = builder.source_builder.filters[0]
assert filter_field.left == "column_name > 100"
| [
"copy.deepcopy"
] | [((1447, 1481), 'copy.deepcopy', 'deepcopy', (['COLUMN_VALIDATION_CONFIG'], {}), '(COLUMN_VALIDATION_CONFIG)\n', (1455, 1481), False, 'from copy import deepcopy\n')] |
import traceback
from typing import Any, Callable, Dict, List, Optional, Set, Tuple
from chainalytic_icon.common import config, util
class ApiBundle(object):
"""
The interface to external consumers/applications
"""
def __init__(self, working_dir: str):
super(ApiBundle, self).__init__()
self.working_dir = working_dir
self.collator = None
self.logger = util.get_child_logger('provider.api_bundle')
def set_collator(self, collator: 'Collator'):
self.collator = collator
async def call_api(self, api_id: str, api_params: dict) -> Dict:
ret = {'status': 0, 'result': None}
func = getattr(self, api_id) if hasattr(self, api_id) else None
try:
if func:
self.logger.debug(f'Found API: {api_id}, calling...')
ret['result'] = await func(api_params)
ret['status'] = 1
else:
self.logger.warning(f'API not found: {api_id}')
ret['status'] = -1
ret['result'] = f'API not found: {api_id}'
except Exception as e:
ret['status'] = 0
ret['result'] = f'{str(e)}\n{traceback.format_exc()}'
self.logger.error(f'ERROR when calling API: {api_id}')
self.logger.error(f'{str(e)}\n{traceback.format_exc()}')
return ret
# #################
# APIs to be called
#
async def last_block_height(self, api_params: dict) -> Optional[int]:
if 'transform_id' in api_params:
return await self.collator.last_block_height(api_params['transform_id'])
async def latest_upstream_block_height(self, api_params: dict) -> Optional[int]:
return await self.collator.latest_upstream_block_height()
async def get_block(self, api_params: dict) -> Optional[dict]:
if 'transform_id' in api_params:
return await self.collator.get_block(api_params['height'], api_params['transform_id'])
# ########################
# For `stake_history` only
#
async def latest_unstake_state(self, api_params: dict) -> Optional[dict]:
return await self.collator.latest_unstake_state()
# ###########################
# For `contract_history` only
#
async def contract_transaction(self, api_params: dict) -> Optional[dict]:
return await self.collator.contract_transaction(
api_params['address'], int(api_params['size'])
)
async def contract_internal_transaction(self, api_params: dict) -> Optional[dict]:
return await self.collator.contract_internal_transaction(
api_params['address'], int(api_params['size'])
)
async def contract_stats(self, api_params: dict) -> Optional[dict]:
return await self.collator.contract_stats(api_params['address'])
async def contract_list(self, api_params: dict) -> Optional[dict]:
return await self.collator.contract_list()
async def max_tx_per_contract(self, api_params: dict) -> Optional[dict]:
return await self.collator.max_tx_per_contract()
| [
"traceback.format_exc",
"chainalytic_icon.common.util.get_child_logger"
] | [((406, 450), 'chainalytic_icon.common.util.get_child_logger', 'util.get_child_logger', (['"""provider.api_bundle"""'], {}), "('provider.api_bundle')\n", (427, 450), False, 'from chainalytic_icon.common import config, util\n'), ((1193, 1215), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1213, 1215), False, 'import traceback\n'), ((1328, 1350), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1348, 1350), False, 'import traceback\n')] |
import copy
import torch
from utils import helpers
from utils.layers import conv, linear, batch_norm
def ticketfy(model, split_rate, split_mode="kels"):
conv_layers, linear_layers, bn_layers = helpers.get_layers(model)
for n, _ in conv_layers:
cur_conv = helpers.rgetattr(model, n)
helpers.rsetattr(
model, n,
conv.SplitConv(cur_conv.in_channels,
cur_conv.out_channels,
kernel_size=cur_conv.kernel_size,
stride=cur_conv.stride,
padding=cur_conv.padding,
dilation=cur_conv.dilation,
groups=cur_conv.groups,
bias=cur_conv.bias != None,
padding_mode=cur_conv.padding_mode,
split_rate=split_rate,
split_mode=split_mode))
for i, (n, _) in enumerate(linear_layers):
cur_linear = helpers.rgetattr(model, n)
helpers.rsetattr(
model, n,
linear.SplitLinear(cur_linear.in_features,
cur_linear.out_features,
bias=cur_linear.bias != None,
split_rate=split_rate,
split_mode=split_mode,
last_layer=i == len(linear_layers) - 1))
for n, _ in bn_layers:
cur_bn = helpers.rgetattr(model, n)
helpers.rsetattr(
model, n,
batch_norm.SplitBatchNorm(
cur_bn.num_features,
eps=cur_bn.eps,
momentum=cur_bn.momentum,
track_running_stats=cur_bn.track_running_stats,
split_rate=split_rate))
def regenerate(model, evolve_mode="rand", device="cpu"):
for _, m in model.named_modules():
if hasattr(m, "weight") and m.weight is not None:
if hasattr(m, "mask"): ## Conv and Linear but not BN
assert m.split_rate < 1.0
if m.__class__ == conv.SplitConv or m.__class__ == linear.SplitLinear:
m.split_reinitialize(evolve_mode, device)
else:
raise NotImplemented('Invalid layer {}'.format(
m.__class__))
def extract_ticket(model, split_rate):
split_model = copy.deepcopy(model)
for n, m in split_model.named_modules():
if hasattr(m, "weight") and m.weight is not None:
if hasattr(m, "mask"):
m.extract_slim()
# if src_m.__class__ == conv_type.SplitConv:
# elif src_m.__class__ == linear_type.SplitLinear:
elif m.__class__ == batch_norm.SplitBatchNorm: ## BatchNorm has bn_maks not mask
m.extract_slim()
return split_model
| [
"utils.layers.batch_norm.SplitBatchNorm",
"utils.helpers.get_layers",
"utils.layers.conv.SplitConv",
"copy.deepcopy",
"utils.helpers.rgetattr"
] | [((200, 225), 'utils.helpers.get_layers', 'helpers.get_layers', (['model'], {}), '(model)\n', (218, 225), False, 'from utils import helpers\n'), ((2410, 2430), 'copy.deepcopy', 'copy.deepcopy', (['model'], {}), '(model)\n', (2423, 2430), False, 'import copy\n'), ((275, 301), 'utils.helpers.rgetattr', 'helpers.rgetattr', (['model', 'n'], {}), '(model, n)\n', (291, 301), False, 'from utils import helpers\n'), ((1008, 1034), 'utils.helpers.rgetattr', 'helpers.rgetattr', (['model', 'n'], {}), '(model, n)\n', (1024, 1034), False, 'from utils import helpers\n'), ((1480, 1506), 'utils.helpers.rgetattr', 'helpers.rgetattr', (['model', 'n'], {}), '(model, n)\n', (1496, 1506), False, 'from utils import helpers\n'), ((362, 684), 'utils.layers.conv.SplitConv', 'conv.SplitConv', (['cur_conv.in_channels', 'cur_conv.out_channels'], {'kernel_size': 'cur_conv.kernel_size', 'stride': 'cur_conv.stride', 'padding': 'cur_conv.padding', 'dilation': 'cur_conv.dilation', 'groups': 'cur_conv.groups', 'bias': '(cur_conv.bias != None)', 'padding_mode': 'cur_conv.padding_mode', 'split_rate': 'split_rate', 'split_mode': 'split_mode'}), '(cur_conv.in_channels, cur_conv.out_channels, kernel_size=\n cur_conv.kernel_size, stride=cur_conv.stride, padding=cur_conv.padding,\n dilation=cur_conv.dilation, groups=cur_conv.groups, bias=cur_conv.bias !=\n None, padding_mode=cur_conv.padding_mode, split_rate=split_rate,\n split_mode=split_mode)\n', (376, 684), False, 'from utils.layers import conv, linear, batch_norm\n'), ((1567, 1735), 'utils.layers.batch_norm.SplitBatchNorm', 'batch_norm.SplitBatchNorm', (['cur_bn.num_features'], {'eps': 'cur_bn.eps', 'momentum': 'cur_bn.momentum', 'track_running_stats': 'cur_bn.track_running_stats', 'split_rate': 'split_rate'}), '(cur_bn.num_features, eps=cur_bn.eps, momentum=\n cur_bn.momentum, track_running_stats=cur_bn.track_running_stats,\n split_rate=split_rate)\n', (1592, 1735), False, 'from utils.layers import conv, linear, batch_norm\n')] |
from utils import scrape_helper
url = "http://www.investopedia.com/terms/1/"
links = scrape_helper.get_term_links_from_page(url)
print(links)
| [
"utils.scrape_helper.get_term_links_from_page"
] | [((87, 130), 'utils.scrape_helper.get_term_links_from_page', 'scrape_helper.get_term_links_from_page', (['url'], {}), '(url)\n', (125, 130), False, 'from utils import scrape_helper\n')] |
import requests
import bs4
dateList = []
higlist = []
lowlist= []
r = requests.get(
'https://coinmarketcap.com/currencies/bitcoin/historical-data/')
soup = bs4.BeautifulSoup(r.text, "lxml")
tr = soup.find_all('tr',{'class':'text-right'})
for item in tr:
dateList.append(item.find('td', {'class':'text-left'}).text) | [
"bs4.BeautifulSoup",
"requests.get"
] | [((74, 151), 'requests.get', 'requests.get', (['"""https://coinmarketcap.com/currencies/bitcoin/historical-data/"""'], {}), "('https://coinmarketcap.com/currencies/bitcoin/historical-data/')\n", (86, 151), False, 'import requests\n'), ((164, 197), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['r.text', '"""lxml"""'], {}), "(r.text, 'lxml')\n", (181, 197), False, 'import bs4\n')] |
import os
from colorama import Fore, Style
from pathlib import Path
DIF = "dif10"
ECHO10 = "echo10"
UMM_JSON = "umm-json"
ROOT_DIR = (
# go up one directory
Path(__file__).resolve().parents[1]
)
SCHEMAS_BASE_PATH = f"{ROOT_DIR}/schemas"
SCHEMAS = {
"json": [
"checks",
"check_messages",
"check_messages_override",
"checks_override",
"rule_mapping",
"rules_override",
UMM_JSON
],
"csv": [
"granuledataformat",
"instruments",
"locations",
"projects",
"providers",
"platforms",
"sciencekeywords",
"rucontenttype"
],
"xsd": [ f"{DIF}_xml", f"{ECHO10}_xml" ],
"xml": [ "catalog" ]
}
SCHEMA_PATHS = {
schema: f"{SCHEMAS_BASE_PATH}/{schema}.{filetype}"
for filetype, schemas in SCHEMAS.items()
for schema in schemas
}
VERSION_FILE = f"{SCHEMAS_BASE_PATH}/version.txt"
COLOR = {
"title": Fore.GREEN,
"info": Fore.BLUE,
"error": Fore.RED,
"warning": Fore.YELLOW,
"reset": Style.RESET_ALL,
"bright": Style.BRIGHT
}
GCMD_BASIC_URL = "https://gcmdservices.gsfc.nasa.gov/kms/concepts/concept_scheme/"
GCMD_KEYWORDS = [
"granuledataformat",
"instruments",
"locations",
"platforms",
"projects",
"providers",
"rucontenttype",
"sciencekeywords"
]
GCMD_LINKS = {
keyword: f"{GCMD_BASIC_URL}{keyword}?format=csv" for keyword in GCMD_KEYWORDS
}
CMR_URL = "https://cmr.earthdata.nasa.gov"
| [
"pathlib.Path"
] | [((168, 182), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (172, 182), False, 'from pathlib import Path\n')] |
"""
Generic publisher for graphana
"""
import abc
import six
from decisionengine.framework.modules import Publisher
import decisionengine_modules.graphite_client as graphite
DEFAULT_GRAPHITE_HOST = 'fermicloud399.fnal.gov'
DEFAULT_GRAPHITE_PORT = 2004
DEFAULT_GRAPHITE_CONTEXT = ""
@six.add_metaclass(abc.ABCMeta)
class GenericPublisher(Publisher.Publisher):
def __init__(self, config):
self.graphite_host = config.get('graphite_host', DEFAULT_GRAPHITE_HOST)
self.graphite_port = config.get('graphite_port', DEFAULT_GRAPHITE_PORT)
self.graphite_context_header = config.get(
'graphite_context', DEFAULT_GRAPHITE_CONTEXT)
self.publush_to_graphite = config.get('publish_to_graphite')
self.output_file = config.get('output_file')
@abc.abstractmethod
def consumes(self): # this must be implemented by the inherited class
return None
@abc.abstractmethod
# this must be implemented by the inherited class
def graphite_context(self, data_block):
return None
def publish(self, data_block):
"""
Publish data
:type data_block: :obj:`~datablock.DataBlock`
:arg data_block: data block
"""
if not self.consumes():
return
data = data_block[self.consumes()[0]]
if self.graphite_host and self.publush_to_graphite:
end_point = graphite.Graphite(
host=self.graphite_host, pickle_port=self.graphite_port)
end_point.send_dict(self.graphite_context(data)[0], self.graphite_context(
data)[1], debug_print=False, send_data=True)
csv_data = data.to_csv(self.output_file, index=False)
if not self.output_file:
print(csv_data)
| [
"decisionengine_modules.graphite_client.Graphite",
"six.add_metaclass"
] | [((288, 318), 'six.add_metaclass', 'six.add_metaclass', (['abc.ABCMeta'], {}), '(abc.ABCMeta)\n', (305, 318), False, 'import six\n'), ((1405, 1479), 'decisionengine_modules.graphite_client.Graphite', 'graphite.Graphite', ([], {'host': 'self.graphite_host', 'pickle_port': 'self.graphite_port'}), '(host=self.graphite_host, pickle_port=self.graphite_port)\n', (1422, 1479), True, 'import decisionengine_modules.graphite_client as graphite\n')] |
from flask import Blueprint
api = Blueprint('api', __name__)
from . import authentication, videos, shows, users, comments, errors | [
"flask.Blueprint"
] | [((35, 61), 'flask.Blueprint', 'Blueprint', (['"""api"""', '__name__'], {}), "('api', __name__)\n", (44, 61), False, 'from flask import Blueprint\n')] |
"""
Build SSW scripts from Jinja 2 templates
"""
import os
import datetime
import subprocess
import tempfile
from jinja2 import (Environment as Env,
FileSystemLoader,
PackageLoader)
from scipy.io import readsav
from .read_config import defaults
from .util import SSWIDLError, IDLLicenseError
class Environment(object):
"""
Environment for running SSW and IDL scripts
Parameters
----------
ssw_packages : list, optional
List of SSW packages to load, e.g. 'sdo/aia', 'chianti'
ssw_paths : list, optional
List of SSW paths to pass to `ssw_path`
extra_paths : list, optional
Additional paths to add to the IDL namespace
ssw_home : str, optional
Root of SSW tree
idl_home : str, optional
Path to IDL executable
"""
def __init__(self, ssw_packages=None, ssw_paths=None, extra_paths=None,
ssw_home=None, idl_home=None,):
self.ssw_packages = ssw_packages if ssw_packages is not None else []
self.ssw_paths = ssw_paths if ssw_paths is not None else []
self.extra_paths = extra_paths if extra_paths is not None else []
self.env = Env(loader=PackageLoader('hissw', 'templates'))
self._setup_home(ssw_home, idl_home,)
def _setup_home(self, ssw_home, idl_home,):
"""
Setup SSW and IDL home locations
"""
self.ssw_home = defaults['ssw_home'] if ssw_home is None else ssw_home
if self.ssw_home is None:
raise ValueError('''ssw_home must be set at instantiation or in the hisswrc file.''')
self.idl_home = defaults['idl_home'] if idl_home is None else idl_home
if self.idl_home is None:
raise ValueError('''idl_home must be set at instantiation or in the hisswrc file.''')
def custom_script(self, script, args):
"""
Generate custom IDL scripts from templates
"""
if os.path.isfile(script):
env = Env(loader=FileSystemLoader(os.path.dirname(script)))
idl_script = env.get_template(os.path.basename(script)).render(**args)
else:
env = Env()
idl_script = env.from_string(script).render(**args)
return idl_script
def procedure_script(self, script, save_vars, save_filename):
"""
Render inner procedure file
"""
if save_vars is None:
save_vars = []
params = {'script': script, 'save_vars': save_vars, 'save_filename': save_filename}
return self.env.get_template('procedure.pro').render(**params)
def command_script(self, procedure_filename):
"""
Generate parent IDL script
"""
params = {'ssw_paths': self.ssw_paths,
'extra_paths': self.extra_paths,
'procedure_filename': procedure_filename}
return self.env.get_template('parent.pro').render(**params)
def shell_script(self, command_filename):
"""
Generate shell script for starting up SSWIDL
"""
params = {'ssw_home': self.ssw_home,
'ssw_packages': self.ssw_packages,
'idl_home': self.idl_home,
'command_filename': command_filename}
return self.env.get_template('startup.sh').render(**params)
def run(self, script, args=None, save_vars=None, verbose=True):
"""
Set up the SSWIDL environment and run the supplied scripts.
Parameters
----------
script : str
Literal script or path to script file
args : dict, optional
Input arguments to script
save_vars : list, optional
Variables to save and return from the IDL namespace
verbose : bool, optional
"""
args = {} if args is None else args
with tempfile.TemporaryDirectory() as tmpdir:
# Get filenames
fn_template = os.path.join(
tmpdir, '{name}_'+datetime.datetime.now().strftime('%Y%m%d-%H%M%S')+'.{ext}')
save_filename = fn_template.format(name='idl_vars', ext='sav')
procedure_filename = fn_template.format(name='idl_procedure', ext='pro')
command_filename = fn_template.format(name='idl_script', ext='pro')
shell_filename = fn_template.format(name='ssw_shell', ext='sh')
# Render and save scripts
idl_script = self.custom_script(script, args)
with open(procedure_filename, 'w') as f:
f.write(self.procedure_script(idl_script, save_vars, save_filename))
with open(command_filename, 'w') as f:
f.write(self.command_script(procedure_filename))
with open(shell_filename, 'w') as f:
f.write(self.shell_script(command_filename,))
# Execute
subprocess.call(['chmod', 'u+x', shell_filename])
cmd_output = subprocess.run([shell_filename], shell=True, stderr=subprocess.PIPE,
stdout=subprocess.PIPE)
self._check_for_errors(cmd_output, verbose)
results = readsav(save_filename)
return results
def _check_for_errors(self, output, verbose):
"""
Check IDL output to try and decide if an error has occurred
"""
stdout = output.stdout.decode('utf-8')
stderr = output.stderr.decode('utf-8')
# NOTE: For some reason, not only errors are output to stderr so we
# have to check it for certain keywords to see if an error occurred
if 'execution halted' in stderr.lower():
raise SSWIDLError(stderr)
if 'failed to acquire license' in stderr.lower():
raise IDLLicenseError(stderr)
if verbose:
print(f'{stderr}\n{stdout}')
| [
"tempfile.TemporaryDirectory",
"jinja2.Environment",
"subprocess.run",
"os.path.isfile",
"os.path.dirname",
"datetime.datetime.now",
"subprocess.call",
"os.path.basename",
"scipy.io.readsav",
"jinja2.PackageLoader"
] | [((1959, 1981), 'os.path.isfile', 'os.path.isfile', (['script'], {}), '(script)\n', (1973, 1981), False, 'import os\n'), ((2170, 2175), 'jinja2.Environment', 'Env', ([], {}), '()\n', (2173, 2175), True, 'from jinja2 import Environment as Env, FileSystemLoader, PackageLoader\n'), ((3869, 3898), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (3896, 3898), False, 'import tempfile\n'), ((4883, 4932), 'subprocess.call', 'subprocess.call', (["['chmod', 'u+x', shell_filename]"], {}), "(['chmod', 'u+x', shell_filename])\n", (4898, 4932), False, 'import subprocess\n'), ((4958, 5055), 'subprocess.run', 'subprocess.run', (['[shell_filename]'], {'shell': '(True)', 'stderr': 'subprocess.PIPE', 'stdout': 'subprocess.PIPE'}), '([shell_filename], shell=True, stderr=subprocess.PIPE, stdout\n =subprocess.PIPE)\n', (4972, 5055), False, 'import subprocess\n'), ((5169, 5191), 'scipy.io.readsav', 'readsav', (['save_filename'], {}), '(save_filename)\n', (5176, 5191), False, 'from scipy.io import readsav\n'), ((1210, 1245), 'jinja2.PackageLoader', 'PackageLoader', (['"""hissw"""', '"""templates"""'], {}), "('hissw', 'templates')\n", (1223, 1245), False, 'from jinja2 import Environment as Env, FileSystemLoader, PackageLoader\n'), ((2029, 2052), 'os.path.dirname', 'os.path.dirname', (['script'], {}), '(script)\n', (2044, 2052), False, 'import os\n'), ((2097, 2121), 'os.path.basename', 'os.path.basename', (['script'], {}), '(script)\n', (2113, 2121), False, 'import os\n'), ((4012, 4035), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4033, 4035), False, 'import datetime\n')] |
"""
Entity-relation diagram (ERD) GraphViz dot-file generator.
Usage:
python -m erd db.json -o db.dot
Then pass the result to the GraphViz `dot` tool:
dot db.dot -T png -o db.png
Inspired by: https://github.com/ehne/ERDot
"""
import argparse
import json
import re
from pathlib import Path
from typing import Dict, List
import pydantic
class Spec(pydantic.BaseModel):
tables: Dict[str, Dict[str, str]] = {}
enums: Dict[str, List[str]] = {}
relations: List[str] = []
# NOTE: this would be simpler with a declarative templating tool (e.g. Jinja2),
# but we don't have one in the project at the moment of writing this tool.
# So, imperative we go...
FONT = "Arial"
COLUMN_TYPE_COLOR = "gray40" # See: https://graphviz.org/doc/info/colors.html
GRAPHVIZ_TEMPLATE = """
digraph G {{
graph [
nodesep=0.5;
rankdir="LR";
cencentrate=true;
splines="spline";
fontname="{font}";
pad="0.2,0.2"
];
node [shape=plain, fontname="{font}"];
edge [
dir=both,
fontsize=12,
arrowsize=0.9,
penwidth=1.0,
labelangle=32,
labeldistance=1.8,
fontname="{font}]"
];
{tables}
{enums}
{relations}
}}
"""
def render_table(name: str, columns: Dict[str, str]) -> str:
label_lines = [
'<table border="0" cellborder="1" cellspacing="0">',
f"<tr><td><i>{name}</i></td></tr>",
]
for key, type_ in columns.items():
port = key.replace("+", "").replace("*", "")
display_name = key.replace("*", "PK ").replace("+", "FK ")
label_lines += [
f'<tr><td port="{port}" align="left" cellpadding="5">{display_name}'
" "
f'<font color="{COLUMN_TYPE_COLOR}">{type_}</font></td></tr>'
]
label_lines += ["</table>"]
label = "\n".join(label_lines)
return f'"{name}" [label=<{label}>];'
def render_enum(name: str, items: List[str]) -> str:
label_lines = [
'<table border="0" cellborder="1" cellspacing="0">',
f"<tr><td><i>{name}</i></td></tr>",
]
label_lines += [
f'<tr><td align="left" cellpadding="5">{item}</td></tr>' for item in items
]
label_lines += ["</table>"]
label = "\n".join(label_lines)
return f'"{name}" [label=<{label}>];'
def render_relation(relation: str) -> str:
# Example: src:dest_id *--1 dest:id
m = re.match(
r"^(?P<source_name>\w+):(?P<source_fk>\w+) (?P<left_cardinality>[\d\+\*])--(?P<right_cardinality>[\d\+\*]) (?P<dest_name>\w+):(?P<dest_pk>\w+)$", # noqa: E501
relation,
)
assert m is not None, f"Invalid relation format: {relation!r}"
(
source_name,
source_fk,
left_cardinality,
right_cardinality,
dest_name,
dest_pk,
) = m.groups()
left_props = {
"*": "arrowtail=ocrow",
"+": "arrowtail=ocrowtee",
}.get(left_cardinality, "arrowtail=noneotee")
right_props = {
"*": "arrowtail=ocrow",
"+": "arrowtail=ocrowtee",
}.get(right_cardinality, "arrowtail=noneotee")
return "\n".join(
(
f'"{source_name}":"{source_fk}"->"{dest_name}":"{dest_pk}" [',
f"{right_props},",
f"{left_props},",
"];",
)
)
def render(content: str) -> str:
spec = Spec(**json.loads(content))
tables = "\n".join(
render_table(name, columns) for name, columns in spec.tables.items()
)
enums = "\n".join(render_enum(name, items) for name, items in spec.enums.items())
relations = "\n".join(render_relation(relation) for relation in spec.relations)
return GRAPHVIZ_TEMPLATE.format(
font=FONT, tables=tables, enums=enums, relations=relations
)
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("input_file", type=Path)
parser.add_argument("-o", "--output-file", type=Path)
args = parser.parse_args()
content = args.input_file.read_text()
result = render(content)
args.output_file.write_text(result)
if __name__ == "__main__":
main()
| [
"json.loads",
"re.match",
"argparse.ArgumentParser"
] | [((2412, 2595), 're.match', 're.match', (['"""^(?P<source_name>\\\\w+):(?P<source_fk>\\\\w+) (?P<left_cardinality>[\\\\d\\\\+\\\\*])--(?P<right_cardinality>[\\\\d\\\\+\\\\*]) (?P<dest_name>\\\\w+):(?P<dest_pk>\\\\w+)$"""', 'relation'], {}), "(\n '^(?P<source_name>\\\\w+):(?P<source_fk>\\\\w+) (?P<left_cardinality>[\\\\d\\\\+\\\\*])--(?P<right_cardinality>[\\\\d\\\\+\\\\*]) (?P<dest_name>\\\\w+):(?P<dest_pk>\\\\w+)$'\n , relation)\n", (2420, 2595), False, 'import re\n'), ((3813, 3838), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (3836, 3838), False, 'import argparse\n'), ((3368, 3387), 'json.loads', 'json.loads', (['content'], {}), '(content)\n', (3378, 3387), False, 'import json\n')] |
# pylint: disable=no-self-use,invalid-name
import unittest
import spacy
from scispacy.hyponym_detector import HyponymDetector
class TestHyponymDetector(unittest.TestCase):
def setUp(self):
super().setUp()
self.nlp = spacy.load("en_core_sci_sm")
self.detector = HyponymDetector(self.nlp, extended=True)
self.nlp.add_pipe("hyponym_detector", config={"extended": True}, last=True)
def test_sentences(self):
text = (
"Recognizing that the preferred habitats for the species "
"are in the valleys, systematic planting of keystone plant "
"species such as fig trees (Ficus) creates the best microhabitats."
)
doc = self.nlp(text)
fig_trees = doc[21:23]
plant_species = doc[17:19]
assert doc._.hearst_patterns == [("such_as", plant_species, fig_trees)]
doc = self.nlp("SARS, or other coronaviruses, are bad.")
assert doc._.hearst_patterns == [("other", doc[4:5], doc[0:1])]
doc = self.nlp("Coronaviruses, including SARS and MERS, are bad.")
assert doc._.hearst_patterns == [
("include", doc[0:1], doc[3:4]),
("include", doc[0:1], doc[5:6]),
]
def test_find_noun_compound_head(self):
doc = self.nlp("The potassium channel is good.")
head = self.detector.find_noun_compound_head(doc[1])
assert head == doc[2]
doc = self.nlp("Planting of large plants.")
head = self.detector.find_noun_compound_head(doc[3])
# Planting is a noun, but not a compound with 'plants'.
assert head != doc[0]
assert head == doc[3]
def test_expand_noun_phrase(self):
doc = self.nlp("Keystone plant habitats are good.")
chunk = self.detector.expand_to_noun_compound(doc[1], doc)
assert chunk == doc[0:3]
| [
"spacy.load",
"scispacy.hyponym_detector.HyponymDetector"
] | [((239, 267), 'spacy.load', 'spacy.load', (['"""en_core_sci_sm"""'], {}), "('en_core_sci_sm')\n", (249, 267), False, 'import spacy\n'), ((292, 332), 'scispacy.hyponym_detector.HyponymDetector', 'HyponymDetector', (['self.nlp'], {'extended': '(True)'}), '(self.nlp, extended=True)\n', (307, 332), False, 'from scispacy.hyponym_detector import HyponymDetector\n')] |
"""Steps up and down"""
import calendar
import numpy as np
from pandas.io.sql import read_sql
from pyiem import network
from pyiem.plot.use_agg import plt
from pyiem.util import get_autoplot_context, get_dbconn
PDICT = {'spring': '1 January - 30 June',
'fall': '1 July - 31 December'}
def get_description():
""" Return a dict describing how to call this plotter """
desc = dict()
desc['data'] = True
desc['description'] = """This plot analyzes the number of steps down in
low temperature during the fall season and the number of steps up in
high temperature during the spring season. These steps are simply having
a newer colder low or warmer high for the season to date period.
"""
desc['arguments'] = [
dict(type='station', name='station', default='IA2203',
label='Select Station', network='IACLIMATE'),
dict(type='select', name='season', options=PDICT,
label='Select which half of year', default='fall'),
]
return desc
def plotter(fdict):
""" Go """
pgconn = get_dbconn('coop')
ctx = get_autoplot_context(fdict, get_description())
station = ctx['station']
season = ctx['season']
table = "alldata_%s" % (station[:2],)
nt = network.Table("%sCLIMATE" % (station[:2],))
df = read_sql("""
WITH obs as (
SELECT day, year, month, high, low,
case when month > 6 then 'fall' else 'spring' end as season
from """ + table + """ WHERE station = %s),
data as (
SELECT year, day, season,
max(high) OVER (PARTITION by year, season ORDER by day ASC
ROWS BETWEEN 183 PRECEDING and CURRENT ROW) as mh,
min(low) OVER (PARTITION by year, season ORDER by day ASC
ROWS BETWEEN 183 PRECEDING and CURRENT ROW) as ml
from obs),
lows as (
SELECT year, day, ml as level, season,
rank() OVER (PARTITION by year, ml ORDER by day ASC) from data
WHERE season = 'fall'),
highs as (
SELECT year, day, mh as level, season,
rank() OVER (PARTITION by year, mh ORDER by day ASC) from data
WHERE season = 'spring')
(SELECT year, day, extract(doy from day) as doy,
level, season from lows WHERE rank = 1) UNION
(SELECT year, day, extract(doy from day) as doy,
level, season from highs WHERE rank = 1)
""", pgconn, params=[station])
df2 = df[df['season'] == season]
(fig, ax) = plt.subplots(3, 1, figsize=(7, 10))
dyear = df2.groupby(['year']).count()
ax[0].bar(dyear.index, dyear['level'], facecolor='tan', edgecolor='tan')
ax[0].axhline(dyear['level'].mean(), lw=2)
ax[0].set_ylabel("Yearly Events Avg: %.1f" % (dyear['level'].mean(), ))
ax[0].set_xlim(dyear.index.min()-1, dyear.index.max()+1)
title = "%s Steps %s" % (PDICT[season],
"Down" if season == 'fall' else 'Up')
ax[0].set_title("%s [%s]\n%s in Temperature" % (nt.sts[station]['name'],
station, title))
ax[0].grid(True)
ax[1].hist(np.array(df2['level'], 'f'),
bins=np.arange(df2['level'].min(),
df2['level'].max()+1, 2),
normed=True, facecolor='tan')
ax[1].set_ylabel("Probability Density")
ax[1].axvline(32, lw=2)
ax[1].grid(True)
ax[1].set_xlabel(r"Temperature $^\circ$F, 32 degrees highlighted")
ax[2].hist(np.array(df2['doy'], 'f'),
bins=np.arange(df2['doy'].min(),
df2['doy'].max()+1, 3),
normed=True, facecolor='tan')
ax[2].set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274,
305, 335, 365))
ax[2].set_xticklabels(calendar.month_abbr[1:])
ax[2].set_xlim(df2['doy'].min() - 3,
df2['doy'].max() + 3)
ax[2].set_ylabel("Probability Density")
ax[2].grid(True)
ax[2].set_xlabel("Day of Year, 3 Day Bins")
return fig, df
if __name__ == '__main__':
plotter(dict())
| [
"pyiem.network.Table",
"pandas.io.sql.read_sql",
"pyiem.util.get_dbconn",
"numpy.array",
"pyiem.plot.use_agg.plt.subplots"
] | [((1072, 1090), 'pyiem.util.get_dbconn', 'get_dbconn', (['"""coop"""'], {}), "('coop')\n", (1082, 1090), False, 'from pyiem.util import get_autoplot_context, get_dbconn\n'), ((1256, 1299), 'pyiem.network.Table', 'network.Table', (["('%sCLIMATE' % (station[:2],))"], {}), "('%sCLIMATE' % (station[:2],))\n", (1269, 1299), False, 'from pyiem import network\n'), ((1310, 2440), 'pandas.io.sql.read_sql', 'read_sql', (['(\n """\n WITH obs as (\n SELECT day, year, month, high, low,\n case when month > 6 then \'fall\' else \'spring\' end as season\n from """\n + table +\n """ WHERE station = %s),\n data as (\n SELECT year, day, season,\n max(high) OVER (PARTITION by year, season ORDER by day ASC\n ROWS BETWEEN 183 PRECEDING and CURRENT ROW) as mh,\n min(low) OVER (PARTITION by year, season ORDER by day ASC\n ROWS BETWEEN 183 PRECEDING and CURRENT ROW) as ml\n from obs),\n lows as (\n SELECT year, day, ml as level, season,\n rank() OVER (PARTITION by year, ml ORDER by day ASC) from data\n WHERE season = \'fall\'),\n highs as (\n SELECT year, day, mh as level, season,\n rank() OVER (PARTITION by year, mh ORDER by day ASC) from data\n WHERE season = \'spring\')\n\n (SELECT year, day, extract(doy from day) as doy,\n level, season from lows WHERE rank = 1) UNION\n (SELECT year, day, extract(doy from day) as doy,\n level, season from highs WHERE rank = 1)\n """\n )', 'pgconn'], {'params': '[station]'}), '(\n """\n WITH obs as (\n SELECT day, year, month, high, low,\n case when month > 6 then \'fall\' else \'spring\' end as season\n from """\n + table +\n """ WHERE station = %s),\n data as (\n SELECT year, day, season,\n max(high) OVER (PARTITION by year, season ORDER by day ASC\n ROWS BETWEEN 183 PRECEDING and CURRENT ROW) as mh,\n min(low) OVER (PARTITION by year, season ORDER by day ASC\n ROWS BETWEEN 183 PRECEDING and CURRENT ROW) as ml\n from obs),\n lows as (\n SELECT year, day, ml as level, season,\n rank() OVER (PARTITION by year, ml ORDER by day ASC) from data\n WHERE season = \'fall\'),\n highs as (\n SELECT year, day, mh as level, season,\n rank() OVER (PARTITION by year, mh ORDER by day ASC) from data\n WHERE season = \'spring\')\n\n (SELECT year, day, extract(doy from day) as doy,\n level, season from lows WHERE rank = 1) UNION\n (SELECT year, day, extract(doy from day) as doy,\n level, season from highs WHERE rank = 1)\n """\n , pgconn, params=[station])\n', (1318, 2440), False, 'from pandas.io.sql import read_sql\n'), ((2475, 2510), 'pyiem.plot.use_agg.plt.subplots', 'plt.subplots', (['(3)', '(1)'], {'figsize': '(7, 10)'}), '(3, 1, figsize=(7, 10))\n', (2487, 2510), False, 'from pyiem.plot.use_agg import plt\n'), ((3108, 3135), 'numpy.array', 'np.array', (["df2['level']", '"""f"""'], {}), "(df2['level'], 'f')\n", (3116, 3135), True, 'import numpy as np\n'), ((3468, 3493), 'numpy.array', 'np.array', (["df2['doy']", '"""f"""'], {}), "(df2['doy'], 'f')\n", (3476, 3493), True, 'import numpy as np\n')] |
import struct
import os
current_pe = None
class PE:
"""Basic PE parsing.
Ref:
- https://hshrzd.wordpress.com/pe-bear/
- https://blog.kowalczyk.info/articles/pefileformat.html
"""
X86_64 = 0x8664
X86_32 = 0x14c
ARM = 0x1c0
ARM64 = 0xaa64
ARMNT = 0x1c4
AM33 = 0x1d3
IA64 = 0x200
EFI = 0xebc
MIPS = 0x166
MIPS16 = 0x266
MIPSFPU = 0x366
MIPSFPU16 = 0x466
WCEMIPSV2 = 0x169
POWERPC = 0x1f0
POWERPCFP = 0x1f1
SH3 = 0x1a2
SH3DSP = 0x1a3
SH4 = 0x1a6
SH5 = 0x1a8
THUMP = 0x1c2
RISCV32 = 0x5032
RISCV64 = 0x5064
RISCV128 = 0x5128
M32R = 0x9041
dos_magic = b'MZ'
ptr_to_pe_header = None
pe_magic = b'PE'
machine = X86_32
num_of_sections = None
size_of_opt_header = None
dll_charac = None
opt_magic = b'\x02\x0b'
entry_point = None
base_of_code = None
image_base = None
def __init__(self, pe=""):
if not os.access(pe, os.R_OK):
err("'{0}' not found/readable".format(pe))
err("Failed to get file debug information, most of gef features will not work")
return
with open(pe, "rb") as fd:
# off 0x0
self.dos_magic = fd.read(2)
if self.dos_magic != PE.dos_magic:
self.machine = None
return
# off 0x3c
fd.seek(0x3c)
self.ptr_to_pe_header, = struct.unpack("<I", fd.read(4))
# off_pe + 0x0
fd.seek(self.ptr_to_pe_header)
self.pe_magic = fd.read(2)
# off_pe + 0x4
fd.seek(self.ptr_to_pe_header + 0x4)
self.machine, self.num_of_sections = struct.unpack("<HH", fd.read(4))
# off_pe + 0x14
fd.seek(self.ptr_to_pe_header + 0x14)
self.size_of_opt_header, self.dll_charac = struct.unpack("<HH", fd.read(4))
# off_pe + 0x18
self.opt_magic = fd.read(2)
# off_pe + 0x28
fd.seek(self.ptr_to_pe_header + 0x28)
self.entry_point, self.base_of_code = struct.unpack("<II", fd.read(8))
# off_pe + 0x30
self.image_base, = struct.unpack("<I", fd.read(4))
return
def is_valid(self):
return self.dos_magic == PE.DOS_MAGIC and self.pe_magic == PE.pe_magic
def get_machine_name(self):
return {
0x14c: "X86",
0x166: "MIPS",
0x169: "WCEMIPSV2",
0x1a2: "SH3",
0x1a3: "SH3DSP",
0x1a6: "SH4",
0x1a8: "SH5",
0x1c0: "ARM",
0x1c2: "THUMP",
0x1c4: "ARMNT",
0x1d3: "AM33",
0x1f0: "PowerPC",
0x1f1: "PowerPCFP",
0x200: "IA64",
0x266: "MIPS16",
0x366: "MIPSFPU",
0x466: "MIPSFPU16",
0xebc: "EFI",
0x5032: "RISCV32",
0x5064: "RISCV64",
0x5128: "RISCV128",
0x8664: "X86_64",
0x9041: "M32R",
0xaa64: "ARM64",
None: None
}[self.machine]
@lru_cache()
def get_pe_headers(filename=None):
"""Return an PE object with info from `filename`. If not provided, will return
the currently debugged file."""
if filename is None:
filename = get_filepath()
if filename.startswith("target:"):
warn("Your file is remote, you should try using `gef-remote` instead")
return
return PE(filename)
@lru_cache()
def is_pe64(filename=None):
"""Checks if `filename` is an PE64."""
pe = current_pe or get_pe_headers(filename)
return pe.machine == PE.X86_64
@lru_cache()
def is_pe32(filename=None):
"""Checks if `filename` is an PE32."""
pe = current_pe or get_pe_headers(filename)
return pe.machine == PE.X86_32 | [
"os.access"
] | [((1378, 1400), 'os.access', 'os.access', (['pe', 'os.R_OK'], {}), '(pe, os.R_OK)\n', (1387, 1400), False, 'import os\n')] |
"""
This module is the custom resource used by the MSAM's CloudFormation
templates to populate the web bucket with contents of the MSAM web archive.
"""
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
import json
import os
from subprocess import call
import boto3
from botocore.exceptions import ClientError
import resource_tools
WEB_FOLDER = "/tmp/msam"
def lambda_handler(event, context):
"""
Lambda entry point. Print the event first.
"""
print("Event Input: %s" % json.dumps(event))
bucket_name = event["ResourceProperties"]["BucketName"]
result = {'Status': 'SUCCESS', "StackId": event["StackId"], "RequestId": event["RequestId"], "LogicalResourceId": event["LogicalResourceId"], 'Data': {}, 'ResourceId': bucket_name}
if event.get("PhysicalResourceId", False):
result["PhysicalResourceId"] = event["PhysicalResourceId"]
else:
result["PhysicalResourceId"] = "{}-{}".format(resource_tools.stack_name(event), event["LogicalResourceId"])
try:
if event["RequestType"] == "Create" or event["RequestType"] == "Update":
print(event["RequestType"])
replace_bucket_contents(bucket_name)
elif event["RequestType"] == "Delete":
print(event["RequestType"])
delete_bucket_contents(bucket_name)
except ClientError as client_error:
print("Exception: %s" % client_error)
result = {
'Status': 'FAILED',
"StackId": event["StackId"],
"RequestId": event["RequestId"],
"LogicalResourceId": event["LogicalResourceId"],
'Data': {
"Exception": str(client_error)
},
'ResourceId': None
}
resource_tools.send(event, context, result['Status'], result['Data'], result["PhysicalResourceId"])
def replace_bucket_contents(bucket_name):
"""
This function is responsible for removing any existing contents
in the specified bucket, and adding contents from the zip archive.
"""
client = boto3.client("s3")
region = os.environ["AWS_REGION"]
stamp = os.environ["BUILD_STAMP"]
code_bucket = os.environ["BUCKET_BASENAME"]
source = "https://{code_bucket}-{region}.s3.amazonaws.com/msam/msam-web-{stamp}.zip".format(code_bucket=code_bucket, region=region, stamp=stamp)
# empty the bucket
delete_bucket_contents(bucket_name)
# execute these commands to download the zip and extract it locally
command_list = [
"rm -f /tmp/msam-web-{stamp}.zip".format(stamp=stamp), "rm -rf {folder}".format(folder=WEB_FOLDER),
"mkdir {folder}".format(folder=WEB_FOLDER), "unzip msam-web-{stamp}.zip -d {folder}".format(stamp=stamp, folder=WEB_FOLDER), "ls -l {folder}".format(folder=WEB_FOLDER)
]
for command in command_list:
print(call(command, shell=True))
# upload each local file to the bucket, preserve folders
for dirpath, _, filenames in os.walk(WEB_FOLDER):
for name in filenames:
local = "{}/{}".format(dirpath, name)
remote = local.replace("{}/".format(WEB_FOLDER), "")
content_type = None
if remote.endswith(".js"):
content_type = "application/javascript"
elif remote.endswith(".html"):
content_type = "text/html"
else:
content_type = "binary/octet-stream"
client.put_object(Bucket=bucket_name, Key=remote, Body=open(local, 'rb'), ContentType=content_type)
def delete_bucket_contents(bucket_name):
"""
This function is responsible for removing all contents from the specified bucket.
"""
client = boto3.client("s3")
response = client.list_objects_v2(Bucket=bucket_name)
if "Contents" in response:
for item in response["Contents"]:
client.delete_object(Bucket=bucket_name, Key=item["Key"])
| [
"boto3.client",
"json.dumps",
"subprocess.call",
"resource_tools.send",
"resource_tools.stack_name",
"os.walk"
] | [((1786, 1889), 'resource_tools.send', 'resource_tools.send', (['event', 'context', "result['Status']", "result['Data']", "result['PhysicalResourceId']"], {}), "(event, context, result['Status'], result['Data'],\n result['PhysicalResourceId'])\n", (1805, 1889), False, 'import resource_tools\n'), ((2098, 2116), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (2110, 2116), False, 'import boto3\n'), ((3011, 3030), 'os.walk', 'os.walk', (['WEB_FOLDER'], {}), '(WEB_FOLDER)\n', (3018, 3030), False, 'import os\n'), ((3732, 3750), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (3744, 3750), False, 'import boto3\n'), ((553, 570), 'json.dumps', 'json.dumps', (['event'], {}), '(event)\n', (563, 570), False, 'import json\n'), ((996, 1028), 'resource_tools.stack_name', 'resource_tools.stack_name', (['event'], {}), '(event)\n', (1021, 1028), False, 'import resource_tools\n'), ((2889, 2914), 'subprocess.call', 'call', (['command'], {'shell': '(True)'}), '(command, shell=True)\n', (2893, 2914), False, 'from subprocess import call\n')] |
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2019 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""Implementation of the 'aea run' subcommand."""
import sys
from pathlib import Path
from typing import List, Optional
import click
from aea import __version__
from aea.aea import AEA
from aea.aea_builder import AEABuilder
from aea.cli.common import (
AEA_LOGO,
ConnectionsOption,
check_aea_project,
logger,
)
from aea.cli.install import install
from aea.configurations.base import PublicId
from aea.helpers.base import load_env_file
AEA_DIR = str(Path("."))
def _prepare_environment(click_context, env_file: str, is_install_deps: bool) -> None:
"""
Prepare the AEA project environment.
:param click_context: the click context
:param env_file: the path to the envrionemtn file.
:param is_install_deps: whether to install the dependencies
"""
load_env_file(env_file)
if is_install_deps:
if Path("requirements.txt").exists():
click_context.invoke(install, requirement="requirements.txt")
else:
click_context.invoke(install)
def _build_aea(
connection_ids: Optional[List[PublicId]], skip_consistency_check: bool
) -> AEA:
try:
builder = AEABuilder.from_aea_project(
Path("."), skip_consistency_check=skip_consistency_check
)
aea = builder.build(connection_ids=connection_ids)
return aea
except Exception as e:
# TODO use an ad-hoc exception class for predictable errors
# all the other exceptions should be logged with logger.exception
logger.error(str(e))
sys.exit(1)
def _run_aea(aea: AEA) -> None:
click.echo(AEA_LOGO + "v" + __version__ + "\n")
click.echo("{} starting ...".format(aea.name))
try:
aea.start()
except KeyboardInterrupt:
click.echo(" {} interrupted!".format(aea.name)) # pragma: no cover
except Exception as e:
logger.exception(e)
sys.exit(1)
finally:
click.echo("{} stopping ...".format(aea.name))
aea.stop()
@click.command()
@click.option(
"--connections",
"connection_ids",
cls=ConnectionsOption,
required=False,
default=None,
help="The connection names to use for running the agent. Must be declared in the agent's configuration file.",
)
@click.option(
"--env",
"env_file",
type=click.Path(),
required=False,
default=".env",
help="Specify an environment file (default: .env)",
)
@click.option(
"--install-deps",
"is_install_deps",
is_flag=True,
required=False,
default=False,
help="Install all the dependencies before running the agent.",
)
@click.pass_context
@check_aea_project
def run(
click_context, connection_ids: List[PublicId], env_file: str, is_install_deps: bool
):
"""Run the agent."""
skip_consistency_check = click_context.obj.config["skip_consistency_check"]
_prepare_environment(click_context, env_file, is_install_deps)
aea = _build_aea(connection_ids, skip_consistency_check)
_run_aea(aea)
| [
"aea.cli.common.logger.exception",
"pathlib.Path",
"click.option",
"aea.helpers.base.load_env_file",
"click.echo",
"click.Path",
"sys.exit",
"click.command"
] | [((2790, 2805), 'click.command', 'click.command', ([], {}), '()\n', (2803, 2805), False, 'import click\n'), ((2807, 3032), 'click.option', 'click.option', (['"""--connections"""', '"""connection_ids"""'], {'cls': 'ConnectionsOption', 'required': '(False)', 'default': 'None', 'help': '"""The connection names to use for running the agent. Must be declared in the agent\'s configuration file."""'}), '(\'--connections\', \'connection_ids\', cls=ConnectionsOption,\n required=False, default=None, help=\n "The connection names to use for running the agent. Must be declared in the agent\'s configuration file."\n )\n', (2819, 3032), False, 'import click\n'), ((3212, 3379), 'click.option', 'click.option', (['"""--install-deps"""', '"""is_install_deps"""'], {'is_flag': '(True)', 'required': '(False)', 'default': '(False)', 'help': '"""Install all the dependencies before running the agent."""'}), "('--install-deps', 'is_install_deps', is_flag=True, required=\n False, default=False, help=\n 'Install all the dependencies before running the agent.')\n", (3224, 3379), False, 'import click\n'), ((1265, 1274), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (1269, 1274), False, 'from pathlib import Path\n'), ((1590, 1613), 'aea.helpers.base.load_env_file', 'load_env_file', (['env_file'], {}), '(env_file)\n', (1603, 1613), False, 'from aea.helpers.base import load_env_file\n'), ((2391, 2438), 'click.echo', 'click.echo', (["(AEA_LOGO + 'v' + __version__ + '\\n')"], {}), "(AEA_LOGO + 'v' + __version__ + '\\n')\n", (2401, 2438), False, 'import click\n'), ((3099, 3111), 'click.Path', 'click.Path', ([], {}), '()\n', (3109, 3111), False, 'import click\n'), ((1985, 1994), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (1989, 1994), False, 'from pathlib import Path\n'), ((2341, 2352), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2349, 2352), False, 'import sys\n'), ((2660, 2679), 'aea.cli.common.logger.exception', 'logger.exception', (['e'], {}), '(e)\n', (2676, 2679), False, 'from aea.cli.common import AEA_LOGO, ConnectionsOption, check_aea_project, logger\n'), ((2688, 2699), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2696, 2699), False, 'import sys\n'), ((1649, 1673), 'pathlib.Path', 'Path', (['"""requirements.txt"""'], {}), "('requirements.txt')\n", (1653, 1673), False, 'from pathlib import Path\n')] |
"""An environment to skip k frames and return a max between the last two."""
import gym
import numpy as np
class MaxFrameskipEnv(gym.Wrapper):
"""An environment to skip k frames and return a max between the last two."""
def __init__(self, env, skip: int=4) -> None:
"""
Initialize a new max frame skip env around an existing environment.
Args:
env: the environment to wrap around
skip: the number of frames to skip (i.e. hold an action for)
Returns:
None
"""
gym.Wrapper.__init__(self, env)
# most recent raw observations (for max pooling across time steps)
self._obs_buffer = np.zeros((2, *env.observation_space.shape), dtype=np.uint8)
self._skip = skip
def step(self, action):
"""Repeat action, sum reward, and max over last observations."""
# the total reward from `skip` frames having `action` held on them
total_reward = 0.0
done = None
# perform the action `skip` times
for i in range(self._skip):
obs, reward, done, info = self.env.step(action)
total_reward += reward
# assign the buffer with the last two frames
if i == self._skip - 2:
self._obs_buffer[0] = obs
if i == self._skip - 1:
self._obs_buffer[1] = obs
# break the loop if the game terminated
if done:
break
# Note that the observation on the done=True frame doesn't matter
# (because the next state isn't evaluated when done is true)
max_frame = self._obs_buffer.max(axis=0)
return max_frame, total_reward, done, info
def reset(self, **kwargs):
return self.env.reset(**kwargs)
# explicitly define the outward facing API of this module
__all__ = [MaxFrameskipEnv.__name__]
| [
"numpy.zeros",
"gym.Wrapper.__init__"
] | [((557, 588), 'gym.Wrapper.__init__', 'gym.Wrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (577, 588), False, 'import gym\n'), ((691, 750), 'numpy.zeros', 'np.zeros', (['(2, *env.observation_space.shape)'], {'dtype': 'np.uint8'}), '((2, *env.observation_space.shape), dtype=np.uint8)\n', (699, 750), True, 'import numpy as np\n')] |
"""Script containing the DeepLoco environments."""
import gym
import numpy as np
import os
import sys
import cv2
try:
sys.path.append(os.path.join(os.environ["TERRAINRL_PATH"], "simAdapter"))
import terrainRLSim # noqa: F401
except (KeyError, ImportError, ModuleNotFoundError):
pass
class BipedalSoccer(gym.Env):
"""Bipedal Soccer environment.
In this environment, a bipedal agent is placed in an open field with a
soccer ball. The agent is rewarded for moving to the ball, and additionally
dribbling the ball to the target. The reward function is a weighted sum of
the agent's distance from the ball and the distance of the ball from a
desired goal position. This reward is positive to discourage the agent from
falling prematurely.
Attributes
----------
wrapped_env : gym.Env
the original environment, which add more dimensions than wanted here
"""
def __init__(self):
"""Instantiate the environment."""
self.wrapped_env = terrainRLSim.getEnv(
"PD-Biped3D-HLC-Soccer-v1", render=False)
# Add the time horizon.
self.horizon = 512
@property
def observation_space(self):
"""See parent class."""
return self.wrapped_env.observation_space
@property
def action_space(self):
"""See parent class."""
return self.wrapped_env.action_space
def step(self, action):
"""See parent class."""
obs, rew, done, info = self.wrapped_env.step(np.array([action]))
return obs[0], rew[0][0], done, info
def reset(self):
"""See parent class."""
return self.wrapped_env.reset()[0]
def render(self, mode='human'):
"""See parent class."""
return self.wrapped_env.render(mode=mode)
class BipedalObstacles(gym.Env):
"""Bipedal Obstacles environment.
In this environment, a bipedal agent is placed in an open field with
obstacles scattered throughout the world. The goal of the agent is to
walk around the world and reach a goal position.
Attributes
----------
wrapped_env : gym.Env
the original environment, which add more dimensions than wanted here
"""
def __init__(self, render):
"""Instantiate the environment.
Parameters
----------
render : bool
whether to render the environment
"""
self.t = 0
if render:
self.wrapped_env = gym.make("PD-Biped3D-HLC-Obstacles-render-v2")
else:
self.wrapped_env = gym.make("PD-Biped3D-HLC-Obstacles-v2")
# Add the time horizon.
self.horizon = 2000
@property
def observation_space(self):
"""See parent class."""
return gym.spaces.Box(
low=20 * self.wrapped_env.observation_space.low[:-2],
high=20 * self.wrapped_env.observation_space.high[:-2],
dtype=np.float32)
@property
def context_space(self):
"""See parent class."""
return gym.spaces.Box(
low=20 * self.wrapped_env.observation_space.low[-2:],
high=20 * self.wrapped_env.observation_space.high[-2:],
dtype=np.float32)
@property
def action_space(self):
"""See parent class."""
return self.wrapped_env.action_space
@property
def current_context(self):
"""See parent class."""
return self.wrapped_env.env.getObservation()[-2:]
def step(self, action):
"""See parent class."""
self.t += 1
obs, rew, done, info = self.wrapped_env.step(action)
done = done or self.t >= self.horizon
return obs[:-2], rew, done, info
def reset(self):
"""See parent class."""
self.t = 0
return self.wrapped_env.reset()[:-2]
def render(self, mode='human'):
"""See parent class."""
image = self.wrapped_env.env.render(
headless_step=True)
if mode == 'human':
f = np.flip(image.astype(np.float32) / 255.0, axis=0)
f = np.flip(f, axis=2)
cv2.imshow("PD-Biped3D-HLC-Obstacles-v2", f)
cv2.waitKey(1)
elif mode == 'rgb_array':
return image
| [
"numpy.flip",
"terrainRLSim.getEnv",
"os.path.join",
"gym.spaces.Box",
"cv2.imshow",
"numpy.array",
"cv2.waitKey",
"gym.make"
] | [((139, 195), 'os.path.join', 'os.path.join', (["os.environ['TERRAINRL_PATH']", '"""simAdapter"""'], {}), "(os.environ['TERRAINRL_PATH'], 'simAdapter')\n", (151, 195), False, 'import os\n'), ((1016, 1077), 'terrainRLSim.getEnv', 'terrainRLSim.getEnv', (['"""PD-Biped3D-HLC-Soccer-v1"""'], {'render': '(False)'}), "('PD-Biped3D-HLC-Soccer-v1', render=False)\n", (1035, 1077), False, 'import terrainRLSim\n'), ((2762, 2909), 'gym.spaces.Box', 'gym.spaces.Box', ([], {'low': '(20 * self.wrapped_env.observation_space.low[:-2])', 'high': '(20 * self.wrapped_env.observation_space.high[:-2])', 'dtype': 'np.float32'}), '(low=20 * self.wrapped_env.observation_space.low[:-2], high=\n 20 * self.wrapped_env.observation_space.high[:-2], dtype=np.float32)\n', (2776, 2909), False, 'import gym\n'), ((3033, 3180), 'gym.spaces.Box', 'gym.spaces.Box', ([], {'low': '(20 * self.wrapped_env.observation_space.low[-2:])', 'high': '(20 * self.wrapped_env.observation_space.high[-2:])', 'dtype': 'np.float32'}), '(low=20 * self.wrapped_env.observation_space.low[-2:], high=\n 20 * self.wrapped_env.observation_space.high[-2:], dtype=np.float32)\n', (3047, 3180), False, 'import gym\n'), ((1515, 1533), 'numpy.array', 'np.array', (['[action]'], {}), '([action])\n', (1523, 1533), True, 'import numpy as np\n'), ((2474, 2520), 'gym.make', 'gym.make', (['"""PD-Biped3D-HLC-Obstacles-render-v2"""'], {}), "('PD-Biped3D-HLC-Obstacles-render-v2')\n", (2482, 2520), False, 'import gym\n'), ((2566, 2605), 'gym.make', 'gym.make', (['"""PD-Biped3D-HLC-Obstacles-v2"""'], {}), "('PD-Biped3D-HLC-Obstacles-v2')\n", (2574, 2605), False, 'import gym\n'), ((4073, 4091), 'numpy.flip', 'np.flip', (['f'], {'axis': '(2)'}), '(f, axis=2)\n', (4080, 4091), True, 'import numpy as np\n'), ((4104, 4148), 'cv2.imshow', 'cv2.imshow', (['"""PD-Biped3D-HLC-Obstacles-v2"""', 'f'], {}), "('PD-Biped3D-HLC-Obstacles-v2', f)\n", (4114, 4148), False, 'import cv2\n'), ((4161, 4175), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (4172, 4175), False, 'import cv2\n')] |
# -*- coding:utf-8 -*-
'''
Created on 2015年3月2日
@author: wanhao01
'''
import sys
from crawler.minispider import logerror
import main
reload(sys)
sys.setdefaultencoding('utf-8')
if __name__ == '__main__':
try:
main.main()
except Exception as exception:
logerror("error during running, details: " + str(exception))
pass | [
"main.main",
"sys.setdefaultencoding"
] | [((164, 195), 'sys.setdefaultencoding', 'sys.setdefaultencoding', (['"""utf-8"""'], {}), "('utf-8')\n", (186, 195), False, 'import sys\n'), ((245, 256), 'main.main', 'main.main', ([], {}), '()\n', (254, 256), False, 'import main\n')] |
import types
from tf.advanced.app import App
MODIFIERS = """
remark folio note ref emph und super special q num den
""".strip().split()
def fmt_layoutFull(app, n, **kwargs):
return app._wrapHtml(n, ("",))
def fmt_layoutRemarks(app, n, **kwargs):
return app._wrapHtml(n, ("r",))
def fmt_layoutNotes(app, n, **kwargs):
return app._wrapHtml(n, ("n",))
def fmt_layoutOrig(app, n, **kwargs):
return app._wrapHtml(n, ("o",))
def fmt_layoutNoRemarks(app, n, **kwargs):
return app._wrapHtml(n, ("o", "n"))
def fmt_layoutNoNotes(app, n, **kwargs):
return app._wrapHtml(n, ("o", "r"))
def fmt_layoutNonOrig(app, n, **kwargs):
return app._wrapHtml(n, ("r", "n"))
NOTE = "note"
WORD = "word"
class TfApp(App):
def __init__(app, *args, **kwargs):
app.fmt_layoutFull = types.MethodType(fmt_layoutFull, app)
app.fmt_layoutRemarks = types.MethodType(fmt_layoutRemarks, app)
app.fmt_layoutNotes = types.MethodType(fmt_layoutNotes, app)
app.fmt_layoutOrig = types.MethodType(fmt_layoutOrig, app)
app.fmt_layoutNoRemarks = types.MethodType(fmt_layoutNoRemarks, app)
app.fmt_layoutNoNotes = types.MethodType(fmt_layoutNoNotes, app)
app.fmt_layoutNonOrig = types.MethodType(fmt_layoutNonOrig, app)
super().__init__(*args, **kwargs)
def _wrapHtml(app, n, kinds):
api = app.api
F = api.F
Fs = api.Fs
L = api.L
preNote = ""
postNote = ""
if "" in kinds or "n" in kinds:
notes = L.u(n, otype=NOTE)
if notes:
note = notes[0]
mark = F.mark.v(note)
noteWords = L.d(note, otype=WORD)
firstWord = noteWords[0]
lastWord = noteWords[-1]
if firstWord == n:
preNote = f"«{mark}= "
if lastWord == n:
postNote = f" ={mark}»"
material = "".join(Fs(f"trans{kind}").v(n) or "" for kind in kinds)
after = "".join(Fs(f"punc{kind}").v(n) or "" for kind in kinds)
material = f"{preNote}{material}{after}{postNote}"
clses = " ".join(
cf for cf in MODIFIERS if (fscf := Fs(f"is{cf}")) and fscf.v(n)
)
if clses:
material = f'<span class="{clses}">{material}</span>'
return material
| [
"types.MethodType"
] | [((819, 856), 'types.MethodType', 'types.MethodType', (['fmt_layoutFull', 'app'], {}), '(fmt_layoutFull, app)\n', (835, 856), False, 'import types\n'), ((889, 929), 'types.MethodType', 'types.MethodType', (['fmt_layoutRemarks', 'app'], {}), '(fmt_layoutRemarks, app)\n', (905, 929), False, 'import types\n'), ((960, 998), 'types.MethodType', 'types.MethodType', (['fmt_layoutNotes', 'app'], {}), '(fmt_layoutNotes, app)\n', (976, 998), False, 'import types\n'), ((1028, 1065), 'types.MethodType', 'types.MethodType', (['fmt_layoutOrig', 'app'], {}), '(fmt_layoutOrig, app)\n', (1044, 1065), False, 'import types\n'), ((1100, 1142), 'types.MethodType', 'types.MethodType', (['fmt_layoutNoRemarks', 'app'], {}), '(fmt_layoutNoRemarks, app)\n', (1116, 1142), False, 'import types\n'), ((1175, 1215), 'types.MethodType', 'types.MethodType', (['fmt_layoutNoNotes', 'app'], {}), '(fmt_layoutNoNotes, app)\n', (1191, 1215), False, 'import types\n'), ((1248, 1288), 'types.MethodType', 'types.MethodType', (['fmt_layoutNonOrig', 'app'], {}), '(fmt_layoutNonOrig, app)\n', (1264, 1288), False, 'import types\n')] |
import hashlib
import logging
import os
import shutil
import traceback
from contextlib import closing
from pywb.utils.loaders import BlockLoader
from webrecorder.rec.storage.base import BaseStorage
from webrecorder.rec.storage.storagepaths import add_local_store_prefix, strip_prefix
logger = logging.getLogger('wr.io')
# ============================================================================
class DirectLocalFileStorage(BaseStorage):
"""Webrecorder storage (local files)."""
def __init__(self):
"""Initialize Webrecorder storage."""
super(DirectLocalFileStorage, self).__init__(os.environ['STORAGE_ROOT'])
def delete_collection_dir(self, dir_path):
"""Delete collection directory.
:param str dir_path: directory path
:returns: whether successful or not
:rtype: bool
"""
local_dir = os.path.join(self.storage_root, dir_path)
try:
logger.debug('Local Store: Deleting Directory: ' + local_dir)
parent_dir = os.path.dirname(local_dir)
shutil.rmtree(local_dir)
os.removedirs(parent_dir)
return True
except Exception as e:
if e.errno != 2:
logger.error(str(e))
return False
def do_upload(self, target_url, full_filename):
"""Upload file into local file storage.
:param str target_url: target URL
:param str full_filename: path
:returns: whether successful or not
:rtype: bool
"""
os.makedirs(os.path.dirname(target_url), exist_ok=True)
try:
if full_filename != target_url:
shutil.copyfile(full_filename, target_url)
else:
logger.debug('Local Store: Same File, No Upload')
return True
except Exception as e:
logger.error(str(e))
return False
def is_valid_url(self, target_url):
"""Return whether given target URL is an existing file.
:param str target_url: target URL
:returns: whether given target URL is an existing file
:rtype: bool
"""
return os.path.isfile(target_url)
def get_client_url(self, target_url):
"""Get client URL.
:param str target_url: target URL
:returns: client URL
:rtype: str
"""
return add_local_store_prefix(target_url.replace(os.path.sep, '/'))
def client_url_to_target_url(self, client_url):
"""Get target URL (from client URL).
:param str client URL: client URL
:returns: target URL
:rtype: str
"""
return strip_prefix(client_url)
def do_delete(self, target_url, client_url):
"""Delete file from storage.
:param str target_url: target URL
:returns: whether successful or not
:rtype: bool
"""
try:
logger.debug('Local Store: Deleting: ' + target_url)
os.remove(target_url)
# if target_url.startswith(self.storage_root):
# os.removedirs(os.path.dirname(target_url))
return True
except Exception as e:
if e.errno != 2:
logger.error(str(e))
return False
# ============================================================================
class LocalFileStorage(DirectLocalFileStorage):
"""Webrecorder storage w/ Redis interface (local files).
:ivar StrictRedis redis: Redis interface
"""
def __init__(self, redis):
"""Initialize Webrecorder storage w/ Redis interface.
:param StrictRedis redis: Redis interface
"""
self.redis = redis
super(LocalFileStorage, self).__init__()
### BEGIN PERMA CUSTOMIZATIONS
### First pass at https://github.com/harvard-lil/perma/issues/2614
def delete_collection(self, collection):
"""Delete collection.
:param collection: collection
:type: n.s.
:returns: whether successful or not
:rtype: bool
"""
path = collection.get_dir_path()
if path:
try:
dirpath = os.path.join(self.storage_root, path)
return (self.redis.publish('handle_delete_dir', dirpath) > 0)
except Exception:
logger.error("Failed attempt to delete collection {}".format(collection), exc_info=True)
return False
return False
### END PERMA CUSTOMIZATIONS
def do_delete(self, target_url, client_url):
"""Delete file.
:param str target_url: target URL
:param str client_url: client URL (unused argument)
:returns: whether successful or not
:rtype: bool
"""
return self.redis.publish('handle_delete_file', target_url) > 0
def get_checksum_and_size(self, filepath_or_url):
"""Returns the checksum of the supplied URL or filepath and the size of the resource
:param str filepath_or_url: The URL or filepath to the resource that the checksum and size is desired for
:return: A three tuple containing the kind of checksum, the checksum itself, and size
:rtype: tuple[str|None, str|None, int|None]
"""
m = hashlib.md5()
amount = 1024 * 1024
total_size = 0
with closing(BlockLoader().load(filepath_or_url)) as f:
while True:
chunk = f.read(amount)
chunk_size = len(chunk)
if chunk_size == 0:
break
total_size += chunk_size
m.update(chunk)
return 'md5', m.hexdigest(), total_size
| [
"logging.getLogger",
"hashlib.md5",
"pywb.utils.loaders.BlockLoader",
"os.path.join",
"webrecorder.rec.storage.storagepaths.strip_prefix",
"os.path.isfile",
"os.path.dirname",
"shutil.copyfile",
"os.removedirs",
"shutil.rmtree",
"os.remove"
] | [((296, 322), 'logging.getLogger', 'logging.getLogger', (['"""wr.io"""'], {}), "('wr.io')\n", (313, 322), False, 'import logging\n'), ((875, 916), 'os.path.join', 'os.path.join', (['self.storage_root', 'dir_path'], {}), '(self.storage_root, dir_path)\n', (887, 916), False, 'import os\n'), ((2178, 2204), 'os.path.isfile', 'os.path.isfile', (['target_url'], {}), '(target_url)\n', (2192, 2204), False, 'import os\n'), ((2674, 2698), 'webrecorder.rec.storage.storagepaths.strip_prefix', 'strip_prefix', (['client_url'], {}), '(client_url)\n', (2686, 2698), False, 'from webrecorder.rec.storage.storagepaths import add_local_store_prefix, strip_prefix\n'), ((5275, 5288), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (5286, 5288), False, 'import hashlib\n'), ((1030, 1056), 'os.path.dirname', 'os.path.dirname', (['local_dir'], {}), '(local_dir)\n', (1045, 1056), False, 'import os\n'), ((1069, 1093), 'shutil.rmtree', 'shutil.rmtree', (['local_dir'], {}), '(local_dir)\n', (1082, 1093), False, 'import shutil\n'), ((1106, 1131), 'os.removedirs', 'os.removedirs', (['parent_dir'], {}), '(parent_dir)\n', (1119, 1131), False, 'import os\n'), ((1559, 1586), 'os.path.dirname', 'os.path.dirname', (['target_url'], {}), '(target_url)\n', (1574, 1586), False, 'import os\n'), ((2997, 3018), 'os.remove', 'os.remove', (['target_url'], {}), '(target_url)\n', (3006, 3018), False, 'import os\n'), ((1677, 1719), 'shutil.copyfile', 'shutil.copyfile', (['full_filename', 'target_url'], {}), '(full_filename, target_url)\n', (1692, 1719), False, 'import shutil\n'), ((4181, 4218), 'os.path.join', 'os.path.join', (['self.storage_root', 'path'], {}), '(self.storage_root, path)\n', (4193, 4218), False, 'import os\n'), ((5362, 5375), 'pywb.utils.loaders.BlockLoader', 'BlockLoader', ([], {}), '()\n', (5373, 5375), False, 'from pywb.utils.loaders import BlockLoader\n')] |
# This file is part of OctoBot (https://github.com/Drakkar-Software/OctoBot)
# Copyright (c) 2021 Drakkar-Software, All rights reserved.
#
# OctoBot is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# OctoBot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with OctoBot. If not, see <https://www.gnu.org/licenses/>.
import pytest
import time
import octobot.community as community
ERROR_TITLE = "An error happened"
ERROR_METRICS_ID = "1254xyz"
ERROR_TIME = time.time()
UPLOADER_URL = "http://upload_url"
@pytest.fixture
def basic_error():
return community.Error(
None,
ERROR_TITLE,
ERROR_TIME,
ERROR_METRICS_ID
)
@pytest.fixture
def exception_error():
# generated exception with traceback
return community.Error(
_get_exception(),
ERROR_TITLE,
ERROR_TIME,
ERROR_METRICS_ID
)
@pytest.fixture
def error_uploader():
return community.ErrorsUploader(UPLOADER_URL)
def _get_exception():
def fake3():
1/0
def fake2():
fake3()
def fake_func():
fake2()
try:
fake_func()
except ZeroDivisionError as err:
return err
| [
"octobot.community.ErrorsUploader",
"time.time",
"octobot.community.Error"
] | [((915, 926), 'time.time', 'time.time', ([], {}), '()\n', (924, 926), False, 'import time\n'), ((1010, 1074), 'octobot.community.Error', 'community.Error', (['None', 'ERROR_TITLE', 'ERROR_TIME', 'ERROR_METRICS_ID'], {}), '(None, ERROR_TITLE, ERROR_TIME, ERROR_METRICS_ID)\n', (1025, 1074), True, 'import octobot.community as community\n'), ((1372, 1410), 'octobot.community.ErrorsUploader', 'community.ErrorsUploader', (['UPLOADER_URL'], {}), '(UPLOADER_URL)\n', (1396, 1410), True, 'import octobot.community as community\n')] |
import connexion
import six
from ga4ghtest.models import Plugin # noqa: E501
from ga4ghtest import util
from ga4ghtest.core.controllers import plugins_controller as controller
def create_plugin(
body
): # noqa: E501
"""Create a test plugin
Add a plugin for testing functionality of an API. # noqa: E501
:param body:
:type body: dict | bytes
:rtype: str
"""
if connexion.request.is_json:
body = Plugin.from_dict(connexion.request.get_json()) # noqa: E501
return controller.create_plugin(
body=body
)
def get_plugins(
sort_by='created_at',
order='desc',
limit=3
): # noqa: E501
"""Get test plugins
Get the list of available test plugins. # noqa: E501
:param sort_by: logic by which to sort matched records
:type sort_by: str
:param order: sort order (ascending or descending)
:type order: str
:param limit: maximum number of records to return
:type limit: int
:rtype: str
"""
return controller.get_plugins(
sort_by=sort_by,
order=order,
limit=limit
)
| [
"connexion.request.get_json",
"ga4ghtest.core.controllers.plugins_controller.get_plugins",
"ga4ghtest.core.controllers.plugins_controller.create_plugin"
] | [((515, 550), 'ga4ghtest.core.controllers.plugins_controller.create_plugin', 'controller.create_plugin', ([], {'body': 'body'}), '(body=body)\n', (539, 550), True, 'from ga4ghtest.core.controllers import plugins_controller as controller\n'), ((1010, 1075), 'ga4ghtest.core.controllers.plugins_controller.get_plugins', 'controller.get_plugins', ([], {'sort_by': 'sort_by', 'order': 'order', 'limit': 'limit'}), '(sort_by=sort_by, order=order, limit=limit)\n', (1032, 1075), True, 'from ga4ghtest.core.controllers import plugins_controller as controller\n'), ((460, 488), 'connexion.request.get_json', 'connexion.request.get_json', ([], {}), '()\n', (486, 488), False, 'import connexion\n')] |
import os
from split_settings.tools import include, optional
ENVIRONMENT = os.getenv('DJANGO_ENV') or 'development'
include(
# Load environment settings
'base/env.py',
optional('local/env.py'), # We can "patch" any settings from local folder env.py file.
# Here we should have the order because of dependencies
'base/paths.py',
'base/apps.py',
'base/middleware.py',
# Load all other settings
'base/*.py',
# Select the right env:
'environments/%s.py' % ENVIRONMENT,
optional('local/*.py'), # we can load any other settings from local folder
) | [
"os.getenv",
"split_settings.tools.optional"
] | [((76, 99), 'os.getenv', 'os.getenv', (['"""DJANGO_ENV"""'], {}), "('DJANGO_ENV')\n", (85, 99), False, 'import os\n'), ((182, 206), 'split_settings.tools.optional', 'optional', (['"""local/env.py"""'], {}), "('local/env.py')\n", (190, 206), False, 'from split_settings.tools import include, optional\n'), ((520, 542), 'split_settings.tools.optional', 'optional', (['"""local/*.py"""'], {}), "('local/*.py')\n", (528, 542), False, 'from split_settings.tools import include, optional\n')] |
# -*- coding: utf-8 -*-
'''
Utility methods for dictionary
==============================
'''
__all__ = (
'calling_dict_from',
'combine_dict',
'dict_sorted')
from itertools import chain
from typing import Tuple
from builder.utils import assertion
def calling_dict_from(calling: (str, dict), name: str) -> dict:
''' Construct a calling dictionary for Person class.
'''
from builder.utils.util_str import dict_from_string
tmp = {}
if isinstance(calling, dict):
tmp = calling
else:
tmp = dict_from_string(assertion.is_str(calling), ':')
me = tmp['me'] if 'me' in tmp else '私'
return combine_dict(tmp, {'S': name, 'M': me})
def combine_dict(a: dict, b: dict) -> dict:
''' Combine one dictionary from two dictionaries.
'''
return {**assertion.is_dict(a), **assertion.is_dict(b)}
def dict_sorted(origin: dict, is_reverse: bool=False) -> dict:
''' Sort dictionary.
'''
return dict(
sorted(assertion.is_dict(origin).items(),
key=lambda x:x[0], reverse=assertion.is_bool(is_reverse)))
| [
"builder.utils.assertion.is_bool",
"builder.utils.assertion.is_dict",
"builder.utils.assertion.is_str"
] | [((818, 838), 'builder.utils.assertion.is_dict', 'assertion.is_dict', (['a'], {}), '(a)\n', (835, 838), False, 'from builder.utils import assertion\n'), ((842, 862), 'builder.utils.assertion.is_dict', 'assertion.is_dict', (['b'], {}), '(b)\n', (859, 862), False, 'from builder.utils import assertion\n'), ((570, 595), 'builder.utils.assertion.is_str', 'assertion.is_str', (['calling'], {}), '(calling)\n', (586, 595), False, 'from builder.utils import assertion\n'), ((1072, 1101), 'builder.utils.assertion.is_bool', 'assertion.is_bool', (['is_reverse'], {}), '(is_reverse)\n', (1089, 1101), False, 'from builder.utils import assertion\n'), ((998, 1023), 'builder.utils.assertion.is_dict', 'assertion.is_dict', (['origin'], {}), '(origin)\n', (1015, 1023), False, 'from builder.utils import assertion\n')] |
from flask import Flask
from flask_socketio import SocketIO
from flask_sqlalchemy import SQLAlchemy
UPLOAD_FOLDER = 'uploads'
ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'gif'}
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/dev.db'
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
app.secret_key = "<KEY>"
socketio = SocketIO(app)
db = SQLAlchemy(app)
from tinder.routes import *
| [
"flask_sqlalchemy.SQLAlchemy",
"flask_socketio.SocketIO",
"flask.Flask"
] | [((185, 200), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (190, 200), False, 'from flask import Flask\n'), ((346, 359), 'flask_socketio.SocketIO', 'SocketIO', (['app'], {}), '(app)\n', (354, 359), False, 'from flask_socketio import SocketIO\n'), ((365, 380), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (375, 380), False, 'from flask_sqlalchemy import SQLAlchemy\n')] |
import numpy as np
class Loss():
def output_gradient(self):
return
class MSE(Loss):
def __call__(self, predicted, labels):
return 0.5 * np.square(predicted - labels)
def output_gradient(self, predicted, labels):
return predicted - labels
class BinaryCrossEntropy(Loss):
def __call__(self, predicted, labels):
return - np.nan_to_num((labels*np.log(predicted) + (1-labels)*np.log(1-predicted)))
def output_gradient(self, predicted, labels):
return np.nan_to_num(-(labels/predicted) + (1-labels)/(1-predicted))
class CategoricalCrossEntropy(Loss):
def __call__(self, predicted, labels):
return -np.nan_to_num(np.sum(labels*np.log(predicted), axis=0, keepdims=True))
def output_gradient(self, predicted, labels):
return -np.nan_to_num(labels/predicted)
| [
"numpy.log",
"numpy.nan_to_num",
"numpy.square"
] | [((512, 581), 'numpy.nan_to_num', 'np.nan_to_num', (['(-(labels / predicted) + (1 - labels) / (1 - predicted))'], {}), '(-(labels / predicted) + (1 - labels) / (1 - predicted))\n', (525, 581), True, 'import numpy as np\n'), ((163, 192), 'numpy.square', 'np.square', (['(predicted - labels)'], {}), '(predicted - labels)\n', (172, 192), True, 'import numpy as np\n'), ((809, 842), 'numpy.nan_to_num', 'np.nan_to_num', (['(labels / predicted)'], {}), '(labels / predicted)\n', (822, 842), True, 'import numpy as np\n'), ((393, 410), 'numpy.log', 'np.log', (['predicted'], {}), '(predicted)\n', (399, 410), True, 'import numpy as np\n'), ((424, 445), 'numpy.log', 'np.log', (['(1 - predicted)'], {}), '(1 - predicted)\n', (430, 445), True, 'import numpy as np\n'), ((699, 716), 'numpy.log', 'np.log', (['predicted'], {}), '(predicted)\n', (705, 716), True, 'import numpy as np\n')] |
"""
This file is part of pynadc
https://github.com/rmvanhees/pynadc
Methods to query the NADC Sciamachy SQLite database
Copyright (c) 2012-2021 SRON - Netherlands Institute for Space Research
All Rights Reserved
License: BSD-3-Clause
"""
from pathlib import Path
import sqlite3
# --------------------------------------------------
def get_product_by_name(args=None, dbname=None, product=None,
to_screen=False, dump=False, debug=False):
"""
Query NADC Sciamachy SQLite database on product name
Input
-----
args : dictionary with keys dbname, product, to_screen, dump, debug
dbname : full path to Sciamachy SQLite database
product : name of product [value required]
to_screen : print query result to standard output [default: False]
dump : return database content about product, instead of full-path
debug : do not query data base, but display SQL query [default: False]
Output
------
return full-path to product [default]
or show database content about product
"""
if args:
dbname = args.dbname
product = args.product
dump = args.dump
debug = args.debug
if dbname is None:
print('Fatal, SQLite database is not specified')
return []
if not Path(dbname).is_file():
print('Fatal, can not find SQLite database: %s' % dbname)
return []
if product[0:10] == 'SCI_NL__0P':
table = 'meta__0P'
elif product[0:10] == 'SCI_NL__1P':
table = 'meta__1P'
else:
table = 'meta__2P'
if dump:
select_str = '*'
else:
select_str = 'path,name,compression'
query_str = 'select {} from {} where name=\'{}\''.format(select_str,
table,
product)
# pylint: disable=no-member
conn = sqlite3.connect(dbname)
if dump:
conn.row_factory = sqlite3.Row
cur = conn.cursor()
if debug:
print(query_str)
conn.close()
return []
cur.execute(query_str)
row = cur.fetchone()
if row is None:
conn.close()
return []
if to_screen:
if dump:
for name in row.keys():
print(name, '\t', row[name])
else:
if row[2] == 0:
print(Path(*row[:-1]))
else:
print(Path(*row[:-1]).with_suffix('.gz'))
if dump:
return row
if row[2] == 0:
return str(Path(*row[:-1]))
return str(Path(*row[:-1]).with_suffix('.gz'))
# --------------------------------------------------
def get_product_by_type(args=None, dbname=None, prod_type=None,
proc_stage=None, proc_best=None,
orbits=None, date=None, rtime=None,
to_screen=False, dump=False, debug=False):
"""
Query NADC Sciamachy SQLite database on product type with data selections
Input
-----
args : dictionary with keys dbname, type, proc, best, orbit, date,
rtime, to_screen, dump, debug
dbname : full path to Sciamachy SQLite database
prod_type : level of product, available 0, 1, 2 [value required]
prod_stage ; baseline of product (PROC_STAGE): N, R, P, R, U, W, ...
[default: None]
prod_best ; select highest available baseline [default: None]
orbit : select on absolute orbit number [default: None]
date : select on dateTimeStart [default: None]
rtime : select on receiveTime [default: None]
to_screen : print query result to standard output [default: False]
debug : do not query data base, but display SQL query [default: False]
Output
------
return full-path to selected products [default]
"""
if args:
dbname = args.dbname
prod_type = args.type
proc_stage = args.proc
proc_best = args.best
orbits = args.orbit
date = args.date
rtime = args.rtime
dump = args.dump
debug = args.debug
if dbname is None:
print('Fatal, SQLite database is not specified')
return []
if not Path(dbname).is_file():
print('Fatal, can not find SQLite database: %s' % dbname)
return []
if dump:
query_str = ['select * from meta__%sP' % prod_type]
else:
query_str = ['select path,name,compression from meta__%sP' % prod_type]
if proc_best:
if prod_type == '0':
query_str.append(' as s1 join (select absOrbit,MAX(q_flag)')
query_str.append(' as qflag from meta__%sP' % prod_type)
else:
query_str.append(' as s1 join (select absOrbit,MAX(procStage)')
query_str.append(' as proc from meta__%sP' % prod_type)
if orbits:
if ' where' not in query_str:
query_str.append(' where')
else:
query_str.append(' and')
if len(orbits) == 1:
mystr = ' absOrbit=%-d' % orbits[0]
else:
mystr = ' absOrbit between %-d and %-d' % (orbits[0], orbits[1])
query_str.append(mystr)
if proc_stage:
if ' where' not in query_str:
query_str.append(' where')
else:
query_str.append(' and')
mystr = ' procStage in ('
for _c in proc_stage:
if mystr[-1] != '(':
mystr += ','
mystr += '\'' + _c + '\''
mystr += ')'
query_str.append(mystr)
if date:
if ' where' not in query_str:
query_str.append(' where')
else:
query_str.append(' and')
dtime = '+1 second'
year = int(date[0:4])
dtime = '+1 year'
if len(date) >= 6:
month = int(date[4:6])
dtime = '+1 month'
else:
month = 1
if len(date) >= 8:
day = int(date[6:8])
dtime = '+1 day'
else:
day = 1
if len(date) >= 10:
hour = int(date[8:10])
dtime = '+1 hour'
else:
hour = 0
if len(date) >= 12:
minu = int(date[10:12])
dtime = '+1 minute'
else:
minu = 0
_d1 = '{:04d}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}'.format(
year, month, day, hour, minu, 0)
mystr = ' dateTimeStart between \'%s\' and datetime(\'%s\',\'%s\')'
query_str.append(mystr % (_d1, _d1, dtime))
if rtime:
if ' where' not in query_str:
query_str.append(' where')
else:
query_str.append(' and')
mystr = ' receiveDate between datetime(\'now\',\'-%-d %s\')' \
+ ' and datetime(\'now\')'
if rtime[-1] == 'h':
query_str.append(mystr % (int(rtime[0:-1]), 'hour'))
else:
query_str.append(mystr % (int(rtime[0:-1]), 'day'))
if proc_best:
query_str.append(' GROUP by absOrbit) as s2 on')
query_str.append(' s1.absOrbit=s2.absOrbit')
if prod_type == '0':
query_str.append(' and s1.q_flag=s2.qflag')
else:
query_str.append(' and s1.procStage=s2.proc')
else:
query_str.append(' order by absOrbit ASC, procStage DESC')
if debug:
print(''.join(query_str))
return []
# pylint: disable=no-member
row_list = []
conn = sqlite3.connect(dbname)
if dump:
conn.row_factory = sqlite3.Row
cur = conn.cursor()
cur.execute(''.join(query_str))
for row in cur:
if to_screen:
if dump:
print(row)
else:
if row[2] == 0:
print(Path(*row[:-1]))
else:
print(Path(*row[:-1]).with_suffix('.gz'))
else:
if dump:
row_list.append(row)
else:
if row[2] == 0:
row_list.append(str(Path(*row[:-1])))
else:
row_list.append(str(Path(*row[:-1]).with_suffix('.gz')))
conn.close()
return row_list
| [
"sqlite3.connect",
"pathlib.Path"
] | [((1944, 1967), 'sqlite3.connect', 'sqlite3.connect', (['dbname'], {}), '(dbname)\n', (1959, 1967), False, 'import sqlite3\n'), ((7502, 7525), 'sqlite3.connect', 'sqlite3.connect', (['dbname'], {}), '(dbname)\n', (7517, 7525), False, 'import sqlite3\n'), ((2581, 2596), 'pathlib.Path', 'Path', (['*row[:-1]'], {}), '(*row[:-1])\n', (2585, 2596), False, 'from pathlib import Path\n'), ((1317, 1329), 'pathlib.Path', 'Path', (['dbname'], {}), '(dbname)\n', (1321, 1329), False, 'from pathlib import Path\n'), ((2614, 2629), 'pathlib.Path', 'Path', (['*row[:-1]'], {}), '(*row[:-1])\n', (2618, 2629), False, 'from pathlib import Path\n'), ((4272, 4284), 'pathlib.Path', 'Path', (['dbname'], {}), '(dbname)\n', (4276, 4284), False, 'from pathlib import Path\n'), ((2415, 2430), 'pathlib.Path', 'Path', (['*row[:-1]'], {}), '(*row[:-1])\n', (2419, 2430), False, 'from pathlib import Path\n'), ((7804, 7819), 'pathlib.Path', 'Path', (['*row[:-1]'], {}), '(*row[:-1])\n', (7808, 7819), False, 'from pathlib import Path\n'), ((2472, 2487), 'pathlib.Path', 'Path', (['*row[:-1]'], {}), '(*row[:-1])\n', (2476, 2487), False, 'from pathlib import Path\n'), ((8067, 8082), 'pathlib.Path', 'Path', (['*row[:-1]'], {}), '(*row[:-1])\n', (8071, 8082), False, 'from pathlib import Path\n'), ((7869, 7884), 'pathlib.Path', 'Path', (['*row[:-1]'], {}), '(*row[:-1])\n', (7873, 7884), False, 'from pathlib import Path\n'), ((8147, 8162), 'pathlib.Path', 'Path', (['*row[:-1]'], {}), '(*row[:-1])\n', (8151, 8162), False, 'from pathlib import Path\n')] |
from pycord.discord.ext import commands
import pycord.discord as discord
from pycord.discord import Embed
import requests
import json
from discord import Embed
class Data(commands.Cog):
def __init__(self, bot) -> None:
self.bot: commands.Bot = bot
@commands.command()
async def ping(self, ctx):
await ctx.send(f"My Ping: {round(self.bot.latency * 1000)}ws")
@commands.command()
async def discordstatus(self, ctx):
res = requests.get('https://discordstatus.com/metrics-display/5k2rt9f7pmny/day.json')
data = json.loads(res.text)
latency = round(data['summary']['mean'])
embed = Embed(
description=f"Current: {latency}",
)
await ctx.send(embed=embed)
def setup(bot):
bot.add_cog(Data(bot))
| [
"requests.get",
"json.loads",
"discord.Embed",
"pycord.discord.ext.commands.command"
] | [((259, 277), 'pycord.discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (275, 277), False, 'from pycord.discord.ext import commands\n'), ((378, 396), 'pycord.discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (394, 396), False, 'from pycord.discord.ext import commands\n'), ((445, 524), 'requests.get', 'requests.get', (['"""https://discordstatus.com/metrics-display/5k2rt9f7pmny/day.json"""'], {}), "('https://discordstatus.com/metrics-display/5k2rt9f7pmny/day.json')\n", (457, 524), False, 'import requests\n'), ((536, 556), 'json.loads', 'json.loads', (['res.text'], {}), '(res.text)\n', (546, 556), False, 'import json\n'), ((615, 655), 'discord.Embed', 'Embed', ([], {'description': 'f"""Current: {latency}"""'}), "(description=f'Current: {latency}')\n", (620, 655), False, 'from discord import Embed\n')] |
#!/usr/bin/python3
from SetupRunDirectory import verifyDirectoryFiles, setupRunDirectory
from CleanupRunDirectory import cleanUpRunDirectory
from RunAssembly import verifyConfigFiles, verifyFastaFiles, runAssembly, initializeAssembler
from SaveRun import saveRun
import configparser
from datetime import datetime
from shutil import copyfile
import subprocess
import signal
import traceback
import argparse
import sys
import gc
import os
def getDatetimeString():
"""
Generate a datetime string. Useful for making output folders names that never conflict.
"""
now = datetime.now()
now = [now.year, now.month, now.day, now.hour, now.minute, now.second, now.microsecond]
datetimeString = "_".join(list(map(str, now)))
return datetimeString
def ensureDirectoryExists(directoryPath, i=0):
"""
Recursively test directories in a directory path and generate missing directories as needed
:param directoryPath:
:return:
"""
if i > 3:
print("WARNING: generating subdirectories of depth %d, please verify path is correct: %s" % (i, directoryPath))
if not os.path.exists(directoryPath):
try:
os.mkdir(directoryPath)
except FileNotFoundError:
ensureDirectoryExists(os.path.dirname(directoryPath), i=i + 1)
if not os.path.exists(directoryPath):
os.mkdir(directoryPath)
def overrideDefaultConfig(config, args):
"""
Check all the possible params to see if the user provided an override value, and add any overrides
to their appropriate location in the config dictionary
"""
if args.minReadLength is not None:
config["Reads"]["minReadLength"] = str(args.minReadLength)
if args.k is not None:
config["Kmers"]["k"] = str(args.k)
if args.probability is not None:
config["Kmers"]["probability"] = str(args.probability)
if args.m is not None:
config["MinHash"]["m"] = str(args.m)
if args.minHashIterationCount is not None:
config["MinHash"]["minHashIterationCount"] = str(args.minHashIterationCount)
if args.maxBucketSize is not None:
config["MinHash"]["maxBucketSize"] = str(args.maxBucketSize)
if args.minFrequency is not None:
config["MinHash"]["minFrequency"] = str(args.minFrequency)
if args.maxSkip is not None:
config["Align"]["maxSkip"] = str(args.maxSkip)
if args.maxMarkerFrequency is not None:
config["Align"]["maxMarkerFrequency"] = str(args.maxMarkerFrequency)
if args.minAlignedMarkerCount is not None:
config["Align"]["minAlignedMarkerCount"] = str(args.minAlignedMarkerCount)
if args.maxTrim is not None:
config["Align"]["maxTrim"] = str(args.maxTrim)
if args.minComponentSize is not None:
config["ReadGraph"]["minComponentSize"] = str(args.minComponentSize)
if args.maxChimericReadDistance is not None:
config["ReadGraph"]["maxChimericReadDistance"] = str(args.maxChimericReadDistance)
if args.minCoverage is not None:
config["MarkerGraph"]["minCoverage"] = str(args.minCoverage)
if args.maxCoverage is not None:
config["MarkerGraph"]["maxCoverage"] = str(args.maxCoverage)
if args.lowCoverageThreshold is not None:
config["MarkerGraph"]["lowCoverageThreshold"] = str(args.lowCoverageThreshold)
if args.highCoverageThreshold is not None:
config["MarkerGraph"]["highCoverageThreshold"] = str(args.highCoverageThreshold)
if args.maxDistance is not None:
config["MarkerGraph"]["maxDistance"] = str(args.maxDistance)
if args.pruneIterationCount is not None:
config["MarkerGraph"]["pruneIterationCount"] = str(args.pruneIterationCount)
if args.markerGraphEdgeLengthThresholdForConsensus is not None:
config["Assembly"]["markerGraphEdgeLengthThresholdForConsensus"] = str(
args.markerGraphEdgeLengthThresholdForConsensus)
if args.consensusCaller is not None:
config["Assembly"]["consensusCaller"] = str(args.consensusCaller) + "ConsensusCaller"
if args.useMarginPhase is not None:
config["Assembly"]["useMarginPhase"] = str(args.useMarginPhase)
if args.storeCoverageData is not None:
config["Assembly"]["storeCoverageData"] = str(args.storeCoverageData)
return config
def main(readsSequencePath, outputParentDirectory, Data, largePagesMountPoint, processHandler, savePageMemory, performPageCleanUp, args):
if not os.path.exists(readsSequencePath):
raise Exception("ERROR: input file not found: %s" % readsSequencePath)
# Make sure given sequence file path is absolute, because CWD will be changed later
readsSequencePath = os.path.abspath(readsSequencePath)
# Generate output directory to run shasta in
outputDirectoryName = "run_" + getDatetimeString()
outputDirectory = os.path.abspath(os.path.join(outputParentDirectory, outputDirectoryName))
ensureDirectoryExists(outputDirectory)
# Locate path of default configuration files relative to this script's "binary" file.
# Use of realpath is needed to make sure symbolic links are resolved.
scriptPath = os.path.dirname(os.path.realpath(__file__))
confDirectory = os.path.join(os.path.dirname(scriptPath), "conf")
defaultConfFilename = "shasta.conf"
defaultConfPath = os.path.join(confDirectory, defaultConfFilename)
localConfPath = os.path.join(outputDirectory, "shasta.conf")
# Parse config file to fill in default parameters
config = configparser.ConfigParser()
if not config.read(defaultConfPath):
raise Exception("Error reading config file %s." % defaultConfPath)
# Check if any params were specified by user and override the default config
config = overrideDefaultConfig(config, args)
# Write updated config file to output directory so RunAssembly.py can be called as a separate process
with open(localConfPath, "w") as file:
config.write(file)
# Add bayesian params file to the output directory if needed
if args.consensusCaller == "SimpleBayesian":
defaultMatrixPath = os.path.join(confDirectory, "SimpleBayesianConsensusCaller-1.csv")
localMatrixPath = os.path.join(outputDirectory, "SimpleBayesianConsensusCaller.csv")
copyfile(defaultMatrixPath, localMatrixPath)
# Add marginphase params file to the output directory if needed
if args.useMarginPhase:
defaultParamsPath = os.path.join(confDirectory, "MarginPhase-allParams.np.json")
localParamsPath = os.path.join(outputDirectory, "MarginPhase.json")
copyfile(defaultParamsPath, localParamsPath)
# Setup run directory according to SetupRunDirectory.py
verifyDirectoryFiles(runDirectory=outputDirectory)
setupRunDirectory(runDirectory=outputDirectory)
# Ensure prerequisite files are present
verifyConfigFiles(parentDirectory=outputDirectory)
verifyFastaFiles(fastaFileNames=[readsSequencePath])
# Set current working directory to the output dir
os.chdir(outputDirectory)
# Launch assembler as a separate process using the saved (updated) config file
executablePath = os.path.join(scriptPath, "RunAssembly.py")
arguments = [executablePath, readsSequencePath]
processHandler.launchProcess(arguments=arguments, working_directory=outputDirectory, wait=True)
# Save page memory to disk so it can be reused during RunServerFromDisk
if savePageMemory:
saveRun(outputDirectory)
if performPageCleanUp:
sys.stderr.write("Cleaning up page memory...")
cleanUpRunDirectory(requireUserInput=False)
sys.stderr.write("\rCleaning up page memory... Done\n")
class ProcessHandler:
def __init__(self, Data, largePagesMountPoint, process=None):
self.process = process
self.Data = Data
self.largePagesMountPoint = largePagesMountPoint
def launchProcess(self, arguments, working_directory, wait):
if self.process is None:
self.process = subprocess.Popen(arguments, cwd=working_directory)
if wait:
self.process.wait()
else:
exit("ERROR: process already launched")
def handleExit(self, signum, frame):
"""
Method to be called at (early) termination. By default, the native "signal" handler passes 2 arguments signum
and frame
:param signum:
:param frame:
:return:
"""
pass
if self.process is not None:
self.process.kill() # kill or terminate?
gc.collect()
self.cleanup()
def cleanup(self):
sys.stderr.write("\nERROR: script terminated or interrupted\n")
sys.stderr.write("Cleaning up page memory...")
cleanUpRunDirectory(requireUserInput=False)
sys.stderr.write("\rCleaning up page memory... Done\n")
exit(1)
def stringAsBool(s):
s = s.lower()
boolean = None
if s in {"t", "true", "1", "y", "yes"}:
boolean = True
elif s in {"f", "false", "0", "n", "no"}:
boolean = False
else:
exit("Error: invalid argument specified for boolean flag: %s"%s)
return boolean
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.register("type", "bool", stringAsBool) # add type keyword to registries
parser.add_argument(
"--inputSequences",
type=str,
required=True,
help="File path of FASTQ or FASTA sequence file containing sequences for assembly"
)
parser.add_argument(
"--savePageMemory",
type="bool",
# default=10,
required=False,
help="Save page memory to disk before clearing the ephemeral page data. \n \
Convenient for post-assembly analysis using RunServerFromDisk.py. \n\n \
Any case insensitive variant of the following is accepted: \n \
t, true, 1, y, yes, f, false, 0, n, no"
)
parser.add_argument(
"--performPageCleanUp",
type="bool",
default="True",
required=False,
help="Whether to perform post-assembly cleanup of page files. \n \
Any case insensitive variant of the following is accepted: \n \
t, true, 1, y, yes, f, false, 0, n, no"
)
parser.add_argument(
"--storeCoverageData",
type="bool",
# default=10,
required=False,
help="Whether to store read-level data: observed bases and run lengths. \n \
Any case insensitive variant of the following is accepted: \n \
t, true, 1, y, yes, f, false, 0, n, no"
)
parser.add_argument(
"--outputDir",
type=str,
default="./output/",
required=False,
help="Desired output directory path (will be created during run time if doesn't exist)"
)
parser.add_argument(
"--minReadLength",
type=int,
# default=1000,
required=False,
help="The minimum read length. Reads shorter than this are skipped on input."
)
parser.add_argument(
"--k",
type=int,
# default=10,
required=False,
help="The length of the k-mers used as markers. \n"
)
parser.add_argument(
"--probability",
type=float,
# default=0.1,
required=False,
help="The probability that a k-mer is a marker. \n \
This is approximately equal to the fraction\n \
of k-mers that will be used as markers."
)
parser.add_argument(
"--m",
type=int,
# default=4,
required=False,
help="The number of consecutive markers that define a MinHash feature."
)
parser.add_argument(
"--minHashIterationCount",
type=int,
# default=100,
required=False,
help="The number of MinHash iterations."
)
parser.add_argument(
"--maxBucketSize",
type=int,
# default=30,
required=False,
help="The maximum bucket size to be used by the MinHash algoritm. \n \
Buckets larger than this are ignored."
)
parser.add_argument(
"--minFrequency",
type=int,
# default=1,
required=False,
help="The minimum number of times a pair of oriented reads \n \
is found by the MinHash algorithm for the pair to \n \
generate an overlap."
)
parser.add_argument(
"--maxSkip",
type=int,
# default=30,
required=False,
help="The maximum number of markers that an alignment is allowed\n \
to skip on either of the oriented reads being aligned."
)
parser.add_argument(
"--maxMarkerFrequency",
type=int,
# default=10,
required=False,
help="Marker frequency threshold. \n \
When computing an alignment between two oriented reads, \n \
marker kmers that appear more than this number of times \n \
in either of the two oriented reads are discarded \n \
(in both oriented reads)."
)
parser.add_argument(
"--minAlignedMarkerCount",
type=int,
# default=100,
required=False,
help="The minimum number of aligned markers in an alignment \n \
in order for the alignment to be considered good and usable."
)
parser.add_argument(
"--maxTrim",
type=int,
# default=30,
required=False,
help="The maximum number of trim markers tolerated at the \n \
beginning and end of an alignment. There can be \n \
up this number of markers between the first/last aligned marker \n \
and the beginning/end of either oriented read \n \
for an alignment to be considered good and usable."
)
parser.add_argument(
"--minComponentSize",
type=int,
# default=100,
required=False,
help="The minimum size (number of oriented reads) of \n \
a connected component to be kept."
)
parser.add_argument(
"--maxChimericReadDistance",
type=int,
# default=2,
required=False,
help="Argument maxChimericReadDistance for flagChimericReads."
)
parser.add_argument(
"--minCoverage",
type=int,
# default=10,
required=False,
help="The minimum and maximum coverage (number of markers) \n \
for a vertex of the marker graph. \n \
Vertices with coverage outside this range are collapsed \n \
away and not generated by computeMarkerGraphVertices."
)
parser.add_argument(
"--maxCoverage",
type=int,
# default=100,
required=False,
help="The minimum and maximum coverage (number of markers) \n \
for a vertex of the marker graph. \n \
Vertices with coverage outside this range are collapsed \n \
away and not generated by computeMarkerGraphVertices."
)
parser.add_argument(
"--lowCoverageThreshold",
type=int,
# default=1,
required=False,
help="Parameters for flagMarkerGraphWeakEdges."
)
parser.add_argument(
"--highCoverageThreshold",
type=int,
# default=1000,
required=False,
help="Parameters for flagMarkerGraphWeakEdges."
)
parser.add_argument(
"--maxDistance",
type=int,
# default=30,
required=False,
help="Parameters for flagMarkerGraphWeakEdges."
)
parser.add_argument(
"--pruneIterationCount",
type=int,
# default=6,
required=False,
help="Number of iterations for pruneMarkerGraphStrongSubgraph."
)
parser.add_argument(
"--markerGraphEdgeLengthThresholdForConsensus",
type=int,
# default=100,
required=False,
help="Used during sequence assembly."
)
parser.add_argument(
"--consensusCaller",
type=str,
required=False,
choices=["Simple", "SimpleBayesian", "Median"],
help="Whether to use Bayesian inference on read lengths during consensus calling"
)
parser.add_argument(
"--useMarginPhase",
type="bool",
# default=True,
required=False,
help="Use margin polisher during consensus. \n\n \
Any case insensitive variant of the following is accepted: \n \
t, true, 1, y, yes, f, false, 0, n, no"
)
args = parser.parse_args()
# Assign default paths for page data
largePagesMountPoint = "/hugepages"
Data = os.path.join(largePagesMountPoint, "Data")
# Initialize a class to deal with the subprocess that is opened for the assembler
processHandler = ProcessHandler(Data=Data, largePagesMountPoint=largePagesMountPoint)
# Setup termination handling to deallocate large page memory, unmount on-disk page data, and delete disk data
# This is done by mapping the signal handler to the member function of an instance of ProcessHandler
signal.signal(signal.SIGTERM, processHandler.handleExit)
signal.signal(signal.SIGINT, processHandler.handleExit)
main(readsSequencePath=args.inputSequences,
outputParentDirectory=args.outputDir,
largePagesMountPoint=largePagesMountPoint,
Data=Data,
args=args,
processHandler=processHandler,
savePageMemory=args.savePageMemory,
performPageCleanUp=args.performPageCleanUp)
| [
"configparser.ConfigParser",
"SaveRun.saveRun",
"RunAssembly.verifyFastaFiles",
"os.path.exists",
"SetupRunDirectory.verifyDirectoryFiles",
"SetupRunDirectory.setupRunDirectory",
"argparse.ArgumentParser",
"subprocess.Popen",
"CleanupRunDirectory.cleanUpRunDirectory",
"os.mkdir",
"os.path.dirnam... | [((584, 598), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (596, 598), False, 'from datetime import datetime\n'), ((4709, 4743), 'os.path.abspath', 'os.path.abspath', (['readsSequencePath'], {}), '(readsSequencePath)\n', (4724, 4743), False, 'import os\n'), ((5347, 5395), 'os.path.join', 'os.path.join', (['confDirectory', 'defaultConfFilename'], {}), '(confDirectory, defaultConfFilename)\n', (5359, 5395), False, 'import os\n'), ((5416, 5460), 'os.path.join', 'os.path.join', (['outputDirectory', '"""shasta.conf"""'], {}), "(outputDirectory, 'shasta.conf')\n", (5428, 5460), False, 'import os\n'), ((5529, 5556), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (5554, 5556), False, 'import configparser\n'), ((6717, 6767), 'SetupRunDirectory.verifyDirectoryFiles', 'verifyDirectoryFiles', ([], {'runDirectory': 'outputDirectory'}), '(runDirectory=outputDirectory)\n', (6737, 6767), False, 'from SetupRunDirectory import verifyDirectoryFiles, setupRunDirectory\n'), ((6772, 6819), 'SetupRunDirectory.setupRunDirectory', 'setupRunDirectory', ([], {'runDirectory': 'outputDirectory'}), '(runDirectory=outputDirectory)\n', (6789, 6819), False, 'from SetupRunDirectory import verifyDirectoryFiles, setupRunDirectory\n'), ((6869, 6919), 'RunAssembly.verifyConfigFiles', 'verifyConfigFiles', ([], {'parentDirectory': 'outputDirectory'}), '(parentDirectory=outputDirectory)\n', (6886, 6919), False, 'from RunAssembly import verifyConfigFiles, verifyFastaFiles, runAssembly, initializeAssembler\n'), ((6924, 6976), 'RunAssembly.verifyFastaFiles', 'verifyFastaFiles', ([], {'fastaFileNames': '[readsSequencePath]'}), '(fastaFileNames=[readsSequencePath])\n', (6940, 6976), False, 'from RunAssembly import verifyConfigFiles, verifyFastaFiles, runAssembly, initializeAssembler\n'), ((7040, 7065), 'os.chdir', 'os.chdir', (['outputDirectory'], {}), '(outputDirectory)\n', (7048, 7065), False, 'import os\n'), ((7171, 7213), 'os.path.join', 'os.path.join', (['scriptPath', '"""RunAssembly.py"""'], {}), "(scriptPath, 'RunAssembly.py')\n", (7183, 7213), False, 'import os\n'), ((9301, 9326), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (9324, 9326), False, 'import argparse\n'), ((16856, 16898), 'os.path.join', 'os.path.join', (['largePagesMountPoint', '"""Data"""'], {}), "(largePagesMountPoint, 'Data')\n", (16868, 16898), False, 'import os\n'), ((17300, 17356), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'processHandler.handleExit'], {}), '(signal.SIGTERM, processHandler.handleExit)\n', (17313, 17356), False, 'import signal\n'), ((17361, 17416), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'processHandler.handleExit'], {}), '(signal.SIGINT, processHandler.handleExit)\n', (17374, 17416), False, 'import signal\n'), ((1115, 1144), 'os.path.exists', 'os.path.exists', (['directoryPath'], {}), '(directoryPath)\n', (1129, 1144), False, 'import os\n'), ((4482, 4515), 'os.path.exists', 'os.path.exists', (['readsSequencePath'], {}), '(readsSequencePath)\n', (4496, 4515), False, 'import os\n'), ((4887, 4943), 'os.path.join', 'os.path.join', (['outputParentDirectory', 'outputDirectoryName'], {}), '(outputParentDirectory, outputDirectoryName)\n', (4899, 4943), False, 'import os\n'), ((5186, 5212), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (5202, 5212), False, 'import os\n'), ((5247, 5274), 'os.path.dirname', 'os.path.dirname', (['scriptPath'], {}), '(scriptPath)\n', (5262, 5274), False, 'import os\n'), ((6124, 6190), 'os.path.join', 'os.path.join', (['confDirectory', '"""SimpleBayesianConsensusCaller-1.csv"""'], {}), "(confDirectory, 'SimpleBayesianConsensusCaller-1.csv')\n", (6136, 6190), False, 'import os\n'), ((6217, 6283), 'os.path.join', 'os.path.join', (['outputDirectory', '"""SimpleBayesianConsensusCaller.csv"""'], {}), "(outputDirectory, 'SimpleBayesianConsensusCaller.csv')\n", (6229, 6283), False, 'import os\n'), ((6292, 6336), 'shutil.copyfile', 'copyfile', (['defaultMatrixPath', 'localMatrixPath'], {}), '(defaultMatrixPath, localMatrixPath)\n', (6300, 6336), False, 'from shutil import copyfile\n'), ((6462, 6522), 'os.path.join', 'os.path.join', (['confDirectory', '"""MarginPhase-allParams.np.json"""'], {}), "(confDirectory, 'MarginPhase-allParams.np.json')\n", (6474, 6522), False, 'import os\n'), ((6549, 6598), 'os.path.join', 'os.path.join', (['outputDirectory', '"""MarginPhase.json"""'], {}), "(outputDirectory, 'MarginPhase.json')\n", (6561, 6598), False, 'import os\n'), ((6607, 6651), 'shutil.copyfile', 'copyfile', (['defaultParamsPath', 'localParamsPath'], {}), '(defaultParamsPath, localParamsPath)\n', (6615, 6651), False, 'from shutil import copyfile\n'), ((7475, 7499), 'SaveRun.saveRun', 'saveRun', (['outputDirectory'], {}), '(outputDirectory)\n', (7482, 7499), False, 'from SaveRun import saveRun\n'), ((7536, 7582), 'sys.stderr.write', 'sys.stderr.write', (['"""Cleaning up page memory..."""'], {}), "('Cleaning up page memory...')\n", (7552, 7582), False, 'import sys\n'), ((7591, 7634), 'CleanupRunDirectory.cleanUpRunDirectory', 'cleanUpRunDirectory', ([], {'requireUserInput': '(False)'}), '(requireUserInput=False)\n', (7610, 7634), False, 'from CleanupRunDirectory import cleanUpRunDirectory\n'), ((7643, 7698), 'sys.stderr.write', 'sys.stderr.write', (["'\\rCleaning up page memory... Done\\n'"], {}), "('\\rCleaning up page memory... Done\\n')\n", (7659, 7698), False, 'import sys\n'), ((8686, 8751), 'sys.stderr.write', 'sys.stderr.write', (['"""\nERROR: script terminated or interrupted\n"""'], {}), '("""\nERROR: script terminated or interrupted\n""")\n', (8702, 8751), False, 'import sys\n'), ((8759, 8805), 'sys.stderr.write', 'sys.stderr.write', (['"""Cleaning up page memory..."""'], {}), "('Cleaning up page memory...')\n", (8775, 8805), False, 'import sys\n'), ((8814, 8857), 'CleanupRunDirectory.cleanUpRunDirectory', 'cleanUpRunDirectory', ([], {'requireUserInput': '(False)'}), '(requireUserInput=False)\n', (8833, 8857), False, 'from CleanupRunDirectory import cleanUpRunDirectory\n'), ((8866, 8921), 'sys.stderr.write', 'sys.stderr.write', (["'\\rCleaning up page memory... Done\\n'"], {}), "('\\rCleaning up page memory... Done\\n')\n", (8882, 8921), False, 'import sys\n'), ((1171, 1194), 'os.mkdir', 'os.mkdir', (['directoryPath'], {}), '(directoryPath)\n', (1179, 1194), False, 'import os\n'), ((8041, 8091), 'subprocess.Popen', 'subprocess.Popen', (['arguments'], {'cwd': 'working_directory'}), '(arguments, cwd=working_directory)\n', (8057, 8091), False, 'import subprocess\n'), ((8605, 8617), 'gc.collect', 'gc.collect', ([], {}), '()\n', (8615, 8617), False, 'import gc\n'), ((1264, 1294), 'os.path.dirname', 'os.path.dirname', (['directoryPath'], {}), '(directoryPath)\n', (1279, 1294), False, 'import os\n'), ((1325, 1354), 'os.path.exists', 'os.path.exists', (['directoryPath'], {}), '(directoryPath)\n', (1339, 1354), False, 'import os\n'), ((1372, 1395), 'os.mkdir', 'os.mkdir', (['directoryPath'], {}), '(directoryPath)\n', (1380, 1395), False, 'import os\n')] |
"""
You work for a retail store that wants to increase sales on Tuesday and
Wednesday, which are the store's slowest sales days. On Tuesday and
Wednesday, if a customer's subtotal is greater than $50, the store will
discount the customer's purchase by 10%.
"""
# Import the datatime module so that
# it can be used in this program.
from datetime import datetime
# The discount rate is 10% and the sales tax rate is 6%.
DISC_RATE = 0.10
SALES_TAX_RATE = 0.06
subtotal = 0
done = False
while not done:
# Get the price from the user.
text = input("Please enter the price: ")
if text.lower() == "done":
done = True
else:
price = float(text)
# Get the quantity from the user.
quantity = int(input("Plesae enter the quantity: "))
subtotal += price * quantity
# Print a blank line.
print()
# Round the subtotal to two digits after
# the decimal and print the subtotal.
subtotal = round(subtotal, 2)
print(f"Subtotal: {subtotal}")
print()
# Call the now() method to get the current date and
# time as a datetime object from the computer's clock.
current_date_and_time = datetime.now()
# Call the isoweekday() method to get the day of
# the week from the current_date_and_time object.
weekday = current_date_and_time.isoweekday()
# if the subtotal is greater than 50 and
# today is Tuesday or Wednesday, compute the discount.
if weekday == 2 or weekday == 3:
if subtotal < 50:
insufficient = 50 - subtotal
print(f"To receive the discount, add {insufficient} to your order.")
else:
discount = round(subtotal * DISC_RATE, 2)
print(f"Discount amount: {discount}")
subtotal -= discount
# Compute the sales tax. Notice that we compute the sales tax
# after computing the discount because the customer does not
# pay sales tax on the full price but on the discounted price.
sales_tax = round(subtotal * SALES_TAX_RATE, 2)
print(f"Sales tax amount: {sales_tax}")
# Compute the total by adding the subtotal and the sales tax.
total = subtotal + sales_tax
# Display the total for the user to see.
print(f"Total: {total:.2f}")
| [
"datetime.datetime.now"
] | [((1143, 1157), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1155, 1157), False, 'from datetime import datetime\n')] |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
import random
import numpy as np
import torch
import torch.nn.functional as F
from torch_utils import misc, training_stats
from torch_utils.ops import conv2d_gradfix
#----------------------------------------------------------------------------
class Loss:
def accumulate_gradients(self, phase, real_img, real_c, gen_z, gen_c, sync, gain): # to be overridden by subclass
raise NotImplementedError()
#----------------------------------------------------------------------------
class StyleGANVLoss(Loss):
def __init__(self, device, G_mapping, G_synthesis, D, augment_pipe=None, style_mixing_prob=0,
r1_gamma=0, pl_batch_shrink=2, pl_decay=0.01, pl_weight=2, video_consistent_aug=True,
sync_batch_start_time=False, motion_reg=0, motion_reg_num_frames=128, motion_reg_batch_size=256,
predict_dists_weight=0):
super().__init__()
self.device = device
self.G_mapping = G_mapping
self.G_synthesis = G_synthesis
self.D = D
self.augment_pipe = augment_pipe
self.style_mixing_prob = style_mixing_prob
self.r1_gamma = r1_gamma
self.pl_batch_shrink = pl_batch_shrink
self.pl_decay = pl_decay
self.pl_weight = pl_weight
self.pl_mean = torch.zeros([], device=device)
self.video_consistent_aug = video_consistent_aug
self.sync_batch_start_time = sync_batch_start_time
self.motion_reg = motion_reg
self.motion_reg_num_frames = motion_reg_num_frames
self.motion_reg_batch_size = motion_reg_batch_size
self.predict_dists_weight = predict_dists_weight
def run_G(self, z, c, t, l, sync):
with misc.ddp_sync(self.G_mapping, sync):
ws = self.G_mapping(z, c, l=l)
if self.style_mixing_prob > 0:
with torch.autograd.profiler.record_function('style_mixing'):
cutoff = torch.empty([], dtype=torch.int64, device=ws.device).random_(1, ws.shape[1])
cutoff = torch.where(torch.rand([], device=ws.device) < self.style_mixing_prob, cutoff, torch.full_like(cutoff, ws.shape[1]))
ws[:, cutoff:] = self.G_mapping(torch.randn_like(z), c, l=l, skip_w_avg_update=True)[:, cutoff:]
with misc.ddp_sync(self.G_synthesis, sync):
out = self.G_synthesis(ws, t=t, c=c, l=l)
return out, ws
def run_D(self, img, c, t, sync):
if self.augment_pipe is not None:
if self.video_consistent_aug:
nf, ch, h, w = img.shape
f = self.G_synthesis.motion_encoder.num_frames_per_motion
n = nf // f
img = img.view(n, f * ch, h, w) # [n, f * ch, h, w]
img = self.augment_pipe(img) # [n, f * ch, h, w]
if self.video_consistent_aug:
img = img.view(n * f, ch, h, w) # [n * f, ch, h, w]
with misc.ddp_sync(self.D, sync):
outputs = self.D(img, c, t)
return outputs
def accumulate_gradients(self, phase, real_img, real_c, real_t, gen_z, gen_c, gen_t, gen_l, sync, gain):
assert phase in ['Gmain', 'Greg', 'Gboth', 'Dmain', 'Dreg', 'Dboth']
do_Gmain = (phase in ['Gmain', 'Gboth'])
do_Dmain = (phase in ['Dmain', 'Dboth'])
do_Gpl = (phase in ['Greg', 'Gboth']) and (self.pl_weight != 0)
do_Dr1 = (phase in ['Dreg', 'Dboth']) and (self.r1_gamma != 0)
real_img = real_img.view(-1, *real_img.shape[2:]) # [batch_size * num_frames, c, h, w]
if self.sync_batch_start_time:
# Syncing the batch to the same start time
if self.sync_batch_start_time == 'random':
offset = gen_t[random.randint(0, len(gen_t) - 1), 0] # [1]
elif self.sync_batch_start_time == 'zero':
offset = 0 # [1]
elif self.sync_batch_start_time == 'min':
offset = gen_t.min() # [1]
else:
offset = None
if not offset is None:
gen_t = (gen_t - gen_t[:, [0]]) + offset # [batch_size, nf]
# Gmain: Maximize logits for generated images.
if do_Gmain:
with torch.autograd.profiler.record_function('Gmain_forward'):
gen_img, _gen_ws = self.run_G(gen_z, gen_c, gen_t, gen_l, sync=(sync and not do_Gpl)) # [batch_size * num_frames, c, h, w]
D_out_gen = self.run_D(gen_img, gen_c, gen_t, sync=False) # [batch_size]
training_stats.report('Loss/scores/fake', D_out_gen['image_logits'])
training_stats.report('Loss/signs/fake', D_out_gen['image_logits'].sign())
loss_Gmain = F.softplus(-D_out_gen['image_logits']) # -log(sigmoid(y))
if 'video_logits' in D_out_gen:
loss_Gmain_video = F.softplus(-D_out_gen['video_logits']).mean() # -log(sigmoid(y)) # [1]
training_stats.report('Loss/scores/fake_video', D_out_gen['video_logits'])
training_stats.report('Loss/G/loss_video', loss_Gmain_video)
else:
loss_Gmain_video = 0.0 # [1]
training_stats.report('Loss/G/loss', loss_Gmain)
with torch.autograd.profiler.record_function('Gmain_backward'):
(loss_Gmain + loss_Gmain_video).mean().mul(gain).backward()
if self.motion_reg > 0.0:
with torch.autograd.profiler.record_function('Gmotion_reg_forward'):
w = torch.zeros(self.motion_reg_batch_size, self.G_mapping.w_dim, device=self.device) # [batch_size, w_dim]
c = torch.zeros(self.motion_reg_batch_size, self.G_mapping.c_dim) # [batch_size, c_dim]
l = torch.zeros(self.motion_reg_batch_size) # [batch_size]
t = torch.linspace(0, self.G_motion_encoder.max_num_frames, self.motion_reg_num_frames, device=self.device).unsqueeze(0).repeat_interleave(self.motion_reg_batch_size, dim=0) # [batch_size, num_frames]
time_emb_coefs = self.G_motion_encoder(c=c, t=t, l=l, w=w, return_time_embs_coefs=True) # {...}
periods = time_emb_coefs['periods'].view(self.motion_reg_batch_size, self.motion_reg_num_frames, -1) # [batch_size, num_frames, num_feats * num_fourier_feats]
phases = time_emb_coefs['phases'].view(self.motion_reg_batch_size, self.motion_reg_num_frames, -1) # [batch_size, num_frames, num_feats * num_fourier_feats]
periods_logvar = -(periods.var(dim=0) + 1e-8).log() # [num_frames, num_feats * num_fourier_feats]
phases_logvar = -(phases.var(dim=0) + 1e-8).log() # [num_frames, num_feats * num_fourier_feats]
loss_Gmotion_reg = (periods_logvar.mean() + phases_logvar.mean()) * self.motion_reg # [1]
dummy = time_emb_coefs['time_embs'].sum() * 0.0 # [1] <- for DDP consistency
training_stats.report('Loss/G/motion_reg', loss_Gmotion_reg)
with torch.autograd.profiler.record_function('Gmotion_reg_backward'):
(loss_Gmotion_reg + dummy).mul(gain).backward()
# Gpl: Apply path length regularization.
if do_Gpl:
with torch.autograd.profiler.record_function('Gpl_forward'):
batch_size = gen_z.shape[0] // self.pl_batch_shrink
gen_img, gen_ws = self.run_G(gen_z[:batch_size], gen_c[:batch_size], gen_t[:batch_size], gen_l[:batch_size], sync=sync) # [batch_size * num_frames, c, h, w]
pl_noise = torch.randn_like(gen_img) / np.sqrt(gen_img.shape[2] * gen_img.shape[3])
with torch.autograd.profiler.record_function('pl_grads'), conv2d_gradfix.no_weight_gradients():
pl_grads = torch.autograd.grad(outputs=[(gen_img * pl_noise).sum()], inputs=[gen_ws], create_graph=True, only_inputs=True)[0]
pl_lengths = pl_grads.square().sum(2).mean(1).sqrt()
pl_mean = self.pl_mean.lerp(pl_lengths.mean(), self.pl_decay)
self.pl_mean.copy_(pl_mean.detach())
pl_penalty = (pl_lengths - pl_mean).square()
training_stats.report('Loss/pl_penalty', pl_penalty)
loss_Gpl = pl_penalty * self.pl_weight
training_stats.report('Loss/G/reg', loss_Gpl)
with torch.autograd.profiler.record_function('Gpl_backward'):
loss_Gpl.mean().mul(gain).backward()
# Dmain: Minimize logits for generated images.
loss_Dgen = 0
if do_Dmain:
with torch.autograd.profiler.record_function('Dgen_forward'):
with torch.no_grad():
gen_img, _gen_ws = self.run_G(gen_z, gen_c, gen_t, gen_l, sync=False) # [batch_size * num_frames, c, h, w]
D_out_gen = self.run_D(gen_img, gen_c, gen_t, sync=False) # Gets synced by loss_Dreal.
training_stats.report('Loss/scores/fake', D_out_gen['image_logits'])
training_stats.report('Loss/signs/fake', D_out_gen['image_logits'].sign())
loss_Dgen = F.softplus(D_out_gen['image_logits']) # -log(1 - sigmoid(y))
if self.predict_dists_weight > 0.0:
t_diffs_gen = gen_t[:, 1] - gen_t[:, 0] # [batch_size]
loss_Dgen_dist_preds = F.cross_entropy(D_out_gen['dist_preds'], t_diffs_gen.long()) # [batch_size]
training_stats.report('Loss/D/dist_preds_gen', loss_Dgen_dist_preds)
else:
loss_Dgen_dist_preds = 0.0
if 'video_logits' in D_out_gen:
loss_Dgen_video = F.softplus(D_out_gen['video_logits']).mean() # [1]
training_stats.report('Loss/scores/fake_video', D_out_gen['video_logits'])
else:
loss_Dgen_video = 0.0 # [1]
with torch.autograd.profiler.record_function('Dgen_backward'):
(loss_Dgen + loss_Dgen_video + loss_Dgen_dist_preds).mean().mul(gain).backward()
# Dmain: Maximize logits for real images.
# Dr1: Apply R1 regularization.
if do_Dmain or do_Dr1:
name = 'Dreal_Dr1' if do_Dmain and do_Dr1 else 'Dreal' if do_Dmain else 'Dr1'
with torch.autograd.profiler.record_function(name + '_forward'):
real_img_tmp = real_img.detach().requires_grad_(do_Dr1)
D_out_real = self.run_D(real_img_tmp, real_c, real_t, sync=sync)
training_stats.report('Loss/scores/real', D_out_real['image_logits'])
training_stats.report('Loss/signs/real', D_out_real['image_logits'].sign())
loss_Dreal = 0
loss_Dreal_dist_preds = 0
loss_Dreal_video = 0.0 # [1]
if do_Dmain:
loss_Dreal = F.softplus(-D_out_real['image_logits']) # -log(sigmoid(y))
training_stats.report('Loss/D/loss', loss_Dgen + loss_Dreal)
if 'video_logits' in D_out_gen:
loss_Dreal_video = F.softplus(-D_out_real['video_logits']).mean() # [1]
training_stats.report('Loss/scores/real_video', D_out_real['video_logits'])
training_stats.report('Loss/D/loss_video', loss_Dgen_video + loss_Dreal_video)
if self.predict_dists_weight > 0.0:
t_diffs_real = real_t[:, 1] - real_t[:, 0] # [batch_size]
loss_Dreal_dist_preds = F.cross_entropy(D_out_real['dist_preds'], t_diffs_real.long()) # [batch_size]
training_stats.report('Loss/D/dist_preds_real', loss_Dreal_dist_preds)
loss_Dr1 = 0
if do_Dr1:
with torch.autograd.profiler.record_function('r1_grads'), conv2d_gradfix.no_weight_gradients():
r1_grads = torch.autograd.grad(outputs=[D_out_real['image_logits'].sum()], inputs=[real_img_tmp], create_graph=True, only_inputs=True)[0]
r1_penalty = r1_grads.square().sum([1,2,3])
loss_Dr1 = r1_penalty * (self.r1_gamma / 2) # [batch_size * num_frames_per_sample]
loss_Dr1 = loss_Dr1.view(-1, len(real_img_tmp) // len(D_out_real['image_logits'])).mean(dim=1) # [batch_size]
training_stats.report('Loss/r1_penalty', r1_penalty)
training_stats.report('Loss/D/reg', loss_Dr1)
dummy_video_logits = (D_out_real["video_logits"].sum() * 0.0) if "video_logits" in D_out_real else 0.0
with torch.autograd.profiler.record_function(name + '_backward'):
(D_out_real["image_logits"] * 0 + dummy_video_logits + loss_Dreal + loss_Dreal_video + loss_Dr1 + loss_Dreal_dist_preds).mean().mul(gain).backward()
#----------------------------------------------------------------------------
| [
"numpy.sqrt",
"torch.full_like",
"torch.nn.functional.softplus",
"torch.randn_like",
"torch_utils.ops.conv2d_gradfix.no_weight_gradients",
"torch.autograd.profiler.record_function",
"torch.no_grad",
"torch_utils.training_stats.report",
"torch.linspace",
"torch_utils.misc.ddp_sync",
"torch.empty"... | [((1718, 1748), 'torch.zeros', 'torch.zeros', (['[]'], {'device': 'device'}), '([], device=device)\n', (1729, 1748), False, 'import torch\n'), ((2130, 2165), 'torch_utils.misc.ddp_sync', 'misc.ddp_sync', (['self.G_mapping', 'sync'], {}), '(self.G_mapping, sync)\n', (2143, 2165), False, 'from torch_utils import misc, training_stats\n'), ((2713, 2750), 'torch_utils.misc.ddp_sync', 'misc.ddp_sync', (['self.G_synthesis', 'sync'], {}), '(self.G_synthesis, sync)\n', (2726, 2750), False, 'from torch_utils import misc, training_stats\n'), ((3350, 3377), 'torch_utils.misc.ddp_sync', 'misc.ddp_sync', (['self.D', 'sync'], {}), '(self.D, sync)\n', (3363, 3377), False, 'from torch_utils import misc, training_stats\n'), ((4663, 4719), 'torch.autograd.profiler.record_function', 'torch.autograd.profiler.record_function', (['"""Gmain_forward"""'], {}), "('Gmain_forward')\n", (4702, 4719), False, 'import torch\n'), ((4965, 5033), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/scores/fake"""', "D_out_gen['image_logits']"], {}), "('Loss/scores/fake', D_out_gen['image_logits'])\n", (4986, 5033), False, 'from torch_utils import misc, training_stats\n'), ((5154, 5192), 'torch.nn.functional.softplus', 'F.softplus', (["(-D_out_gen['image_logits'])"], {}), "(-D_out_gen['image_logits'])\n", (5164, 5192), True, 'import torch.nn.functional as F\n'), ((5633, 5681), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/G/loss"""', 'loss_Gmain'], {}), "('Loss/G/loss', loss_Gmain)\n", (5654, 5681), False, 'from torch_utils import misc, training_stats\n'), ((5699, 5756), 'torch.autograd.profiler.record_function', 'torch.autograd.profiler.record_function', (['"""Gmain_backward"""'], {}), "('Gmain_backward')\n", (5738, 5756), False, 'import torch\n'), ((7732, 7786), 'torch.autograd.profiler.record_function', 'torch.autograd.profiler.record_function', (['"""Gpl_forward"""'], {}), "('Gpl_forward')\n", (7771, 7786), False, 'import torch\n'), ((8664, 8716), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/pl_penalty"""', 'pl_penalty'], {}), "('Loss/pl_penalty', pl_penalty)\n", (8685, 8716), False, 'from torch_utils import misc, training_stats\n'), ((8788, 8833), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/G/reg"""', 'loss_Gpl'], {}), "('Loss/G/reg', loss_Gpl)\n", (8809, 8833), False, 'from torch_utils import misc, training_stats\n'), ((8851, 8906), 'torch.autograd.profiler.record_function', 'torch.autograd.profiler.record_function', (['"""Gpl_backward"""'], {}), "('Gpl_backward')\n", (8890, 8906), False, 'import torch\n'), ((9077, 9132), 'torch.autograd.profiler.record_function', 'torch.autograd.profiler.record_function', (['"""Dgen_forward"""'], {}), "('Dgen_forward')\n", (9116, 9132), False, 'import torch\n'), ((9418, 9486), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/scores/fake"""', "D_out_gen['image_logits']"], {}), "('Loss/scores/fake', D_out_gen['image_logits'])\n", (9439, 9486), False, 'from torch_utils import misc, training_stats\n'), ((9606, 9643), 'torch.nn.functional.softplus', 'F.softplus', (["D_out_gen['image_logits']"], {}), "(D_out_gen['image_logits'])\n", (9616, 9643), True, 'import torch.nn.functional as F\n'), ((10393, 10449), 'torch.autograd.profiler.record_function', 'torch.autograd.profiler.record_function', (['"""Dgen_backward"""'], {}), "('Dgen_backward')\n", (10432, 10449), False, 'import torch\n'), ((10777, 10835), 'torch.autograd.profiler.record_function', 'torch.autograd.profiler.record_function', (["(name + '_forward')"], {}), "(name + '_forward')\n", (10816, 10835), False, 'import torch\n'), ((11006, 11075), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/scores/real"""', "D_out_real['image_logits']"], {}), "('Loss/scores/real', D_out_real['image_logits'])\n", (11027, 11075), False, 'from torch_utils import misc, training_stats\n'), ((13105, 13164), 'torch.autograd.profiler.record_function', 'torch.autograd.profiler.record_function', (["(name + '_backward')"], {}), "(name + '_backward')\n", (13144, 13164), False, 'import torch\n'), ((2274, 2329), 'torch.autograd.profiler.record_function', 'torch.autograd.profiler.record_function', (['"""style_mixing"""'], {}), "('style_mixing')\n", (2313, 2329), False, 'import torch\n'), ((5390, 5464), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/scores/fake_video"""', "D_out_gen['video_logits']"], {}), "('Loss/scores/fake_video', D_out_gen['video_logits'])\n", (5411, 5464), False, 'from torch_utils import misc, training_stats\n'), ((5485, 5545), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/G/loss_video"""', 'loss_Gmain_video'], {}), "('Loss/G/loss_video', loss_Gmain_video)\n", (5506, 5545), False, 'from torch_utils import misc, training_stats\n'), ((5894, 5956), 'torch.autograd.profiler.record_function', 'torch.autograd.profiler.record_function', (['"""Gmotion_reg_forward"""'], {}), "('Gmotion_reg_forward')\n", (5933, 5956), False, 'import torch\n'), ((5982, 6068), 'torch.zeros', 'torch.zeros', (['self.motion_reg_batch_size', 'self.G_mapping.w_dim'], {'device': 'self.device'}), '(self.motion_reg_batch_size, self.G_mapping.w_dim, device=self.\n device)\n', (5993, 6068), False, 'import torch\n'), ((6110, 6171), 'torch.zeros', 'torch.zeros', (['self.motion_reg_batch_size', 'self.G_mapping.c_dim'], {}), '(self.motion_reg_batch_size, self.G_mapping.c_dim)\n', (6121, 6171), False, 'import torch\n'), ((6218, 6257), 'torch.zeros', 'torch.zeros', (['self.motion_reg_batch_size'], {}), '(self.motion_reg_batch_size)\n', (6229, 6257), False, 'import torch\n'), ((7430, 7490), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/G/motion_reg"""', 'loss_Gmotion_reg'], {}), "('Loss/G/motion_reg', loss_Gmotion_reg)\n", (7451, 7490), False, 'from torch_utils import misc, training_stats\n'), ((7513, 7576), 'torch.autograd.profiler.record_function', 'torch.autograd.profiler.record_function', (['"""Gmotion_reg_backward"""'], {}), "('Gmotion_reg_backward')\n", (7552, 7576), False, 'import torch\n'), ((8056, 8081), 'torch.randn_like', 'torch.randn_like', (['gen_img'], {}), '(gen_img)\n', (8072, 8081), False, 'import torch\n'), ((8084, 8128), 'numpy.sqrt', 'np.sqrt', (['(gen_img.shape[2] * gen_img.shape[3])'], {}), '(gen_img.shape[2] * gen_img.shape[3])\n', (8091, 8128), True, 'import numpy as np\n'), ((8150, 8201), 'torch.autograd.profiler.record_function', 'torch.autograd.profiler.record_function', (['"""pl_grads"""'], {}), "('pl_grads')\n", (8189, 8201), False, 'import torch\n'), ((8203, 8239), 'torch_utils.ops.conv2d_gradfix.no_weight_gradients', 'conv2d_gradfix.no_weight_gradients', ([], {}), '()\n', (8237, 8239), False, 'from torch_utils.ops import conv2d_gradfix\n'), ((9155, 9170), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (9168, 9170), False, 'import torch\n'), ((9934, 10002), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/D/dist_preds_gen"""', 'loss_Dgen_dist_preds'], {}), "('Loss/D/dist_preds_gen', loss_Dgen_dist_preds)\n", (9955, 10002), False, 'from torch_utils import misc, training_stats\n'), ((10230, 10304), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/scores/fake_video"""', "D_out_gen['video_logits']"], {}), "('Loss/scores/fake_video', D_out_gen['video_logits'])\n", (10251, 10304), False, 'from torch_utils import misc, training_stats\n'), ((11349, 11388), 'torch.nn.functional.softplus', 'F.softplus', (["(-D_out_real['image_logits'])"], {}), "(-D_out_real['image_logits'])\n", (11359, 11388), True, 'import torch.nn.functional as F\n'), ((11428, 11488), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/D/loss"""', '(loss_Dgen + loss_Dreal)'], {}), "('Loss/D/loss', loss_Dgen + loss_Dreal)\n", (11449, 11488), False, 'from torch_utils import misc, training_stats\n'), ((12853, 12905), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/r1_penalty"""', 'r1_penalty'], {}), "('Loss/r1_penalty', r1_penalty)\n", (12874, 12905), False, 'from torch_utils import misc, training_stats\n'), ((12926, 12971), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/D/reg"""', 'loss_Dr1'], {}), "('Loss/D/reg', loss_Dr1)\n", (12947, 12971), False, 'from torch_utils import misc, training_stats\n'), ((2545, 2581), 'torch.full_like', 'torch.full_like', (['cutoff', 'ws.shape[1]'], {}), '(cutoff, ws.shape[1])\n', (2560, 2581), False, 'import torch\n'), ((11662, 11737), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/scores/real_video"""', "D_out_real['video_logits']"], {}), "('Loss/scores/real_video', D_out_real['video_logits'])\n", (11683, 11737), False, 'from torch_utils import misc, training_stats\n'), ((11762, 11840), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/D/loss_video"""', '(loss_Dgen_video + loss_Dreal_video)'], {}), "('Loss/D/loss_video', loss_Dgen_video + loss_Dreal_video)\n", (11783, 11840), False, 'from torch_utils import misc, training_stats\n'), ((12130, 12200), 'torch_utils.training_stats.report', 'training_stats.report', (['"""Loss/D/dist_preds_real"""', 'loss_Dreal_dist_preds'], {}), "('Loss/D/dist_preds_real', loss_Dreal_dist_preds)\n", (12151, 12200), False, 'from torch_utils import misc, training_stats\n'), ((12283, 12334), 'torch.autograd.profiler.record_function', 'torch.autograd.profiler.record_function', (['"""r1_grads"""'], {}), "('r1_grads')\n", (12322, 12334), False, 'import torch\n'), ((12336, 12372), 'torch_utils.ops.conv2d_gradfix.no_weight_gradients', 'conv2d_gradfix.no_weight_gradients', ([], {}), '()\n', (12370, 12372), False, 'from torch_utils.ops import conv2d_gradfix\n'), ((2360, 2412), 'torch.empty', 'torch.empty', (['[]'], {'dtype': 'torch.int64', 'device': 'ws.device'}), '([], dtype=torch.int64, device=ws.device)\n', (2371, 2412), False, 'import torch\n'), ((2478, 2510), 'torch.rand', 'torch.rand', (['[]'], {'device': 'ws.device'}), '([], device=ws.device)\n', (2488, 2510), False, 'import torch\n'), ((2635, 2654), 'torch.randn_like', 'torch.randn_like', (['z'], {}), '(z)\n', (2651, 2654), False, 'import torch\n'), ((5299, 5337), 'torch.nn.functional.softplus', 'F.softplus', (["(-D_out_gen['video_logits'])"], {}), "(-D_out_gen['video_logits'])\n", (5309, 5337), True, 'import torch.nn.functional as F\n'), ((10159, 10196), 'torch.nn.functional.softplus', 'F.softplus', (["D_out_gen['video_logits']"], {}), "(D_out_gen['video_logits'])\n", (10169, 10196), True, 'import torch.nn.functional as F\n'), ((11585, 11624), 'torch.nn.functional.softplus', 'F.softplus', (["(-D_out_real['video_logits'])"], {}), "(-D_out_real['video_logits'])\n", (11595, 11624), True, 'import torch.nn.functional as F\n'), ((6297, 6405), 'torch.linspace', 'torch.linspace', (['(0)', 'self.G_motion_encoder.max_num_frames', 'self.motion_reg_num_frames'], {'device': 'self.device'}), '(0, self.G_motion_encoder.max_num_frames, self.\n motion_reg_num_frames, device=self.device)\n', (6311, 6405), False, 'import torch\n')] |
from BurstPaperWallet.api import brs_api
from BurstPaperWallet.api import passphrase_url_transform as transform
def initialize(account, old_passphrase, fee=735000):
url = "sendMoney&recipient={}&secretPhrase={}&amountNQT=1&feeNQT={}&recipientPublicKey={}&deadline=1440"\
.format(account["reed solomon"], transform(old_passphrase), fee, account["public key"])
print(brs_api(url))
def check_balance(reed_solomon):
url = "getGuaranteedBalance&account={}".format(reed_solomon)
balance = brs_api(url)
return balance["guaranteedBalanceNQT"]
def adjust_fee(balance, fee):
if fee is None:
fee = 735000
if int(balance) >= fee:
return fee
else:
return balance
| [
"BurstPaperWallet.api.passphrase_url_transform",
"BurstPaperWallet.api.brs_api"
] | [((512, 524), 'BurstPaperWallet.api.brs_api', 'brs_api', (['url'], {}), '(url)\n', (519, 524), False, 'from BurstPaperWallet.api import brs_api\n'), ((318, 343), 'BurstPaperWallet.api.passphrase_url_transform', 'transform', (['old_passphrase'], {}), '(old_passphrase)\n', (327, 343), True, 'from BurstPaperWallet.api import passphrase_url_transform as transform\n'), ((384, 396), 'BurstPaperWallet.api.brs_api', 'brs_api', (['url'], {}), '(url)\n', (391, 396), False, 'from BurstPaperWallet.api import brs_api\n')] |
import numpy as np
import os
import keras
from keras import regularizers, losses
from keras.models import Sequential, Model
from keras.layers import Lambda, Input, Dense, Dropout, Reshape, BatchNormalization, Softmax, Concatenate
from keras.utils import plot_model
import keras.backend as K
class multiVAE:
def __init__(self, sampleLen, numUnits, enc_denseLayerSizes, enc_denseLayerActivations, enc_dropouts, enc_batchnorms, dec_denseLayerSizes, dec_denseLayerActivations, dec_dropouts, dec_batchnorms, inf_layerSize):
self.sampleLen = sampleLen
self.numUnits = numUnits
self.enc_denseLayerSizes = enc_denseLayerSizes
self.enc_denseLayerActivations = enc_denseLayerActivations
self.enc_dropouts = enc_dropouts
self.enc_batchnorms = enc_batchnorms
self.dec_denseLayerSizes = dec_denseLayerSizes
self.dec_denseLayerActivations = dec_denseLayerActivations
self.dec_dropouts = dec_dropouts
self.dec_batchnorms = dec_batchnorms
self.inf_layerSize = inf_layerSize
self.trainModel = None
self.inf_layer = None
self.genModel = [None]*self.numUnits
def sample_z(self, args):
mean, log_sigma = args
eps = K.random_normal(shape=(32, self.inf_layerSize), mean=0., stddev=1.)
return mean + K.exp(log_sigma / 2) * eps
def createInputList(self):
m = [None]*self.numUnits
for i, _ in enumerate(m):
m[i] = Input(shape = (self.sampleLen, ), name = 'input'+str(i+1))
return m
def encoder(self, m_i, i):
temp = len(self.enc_dropouts)
for l, act, drop, bn, j in zip(self.enc_denseLayerSizes, self.enc_denseLayerActivations, self.enc_dropouts, self.enc_batchnorms, range(temp)):
m_i = Dense(l, activation=act, name=str(i+1)+'enc_dense'+str(j+1))(m_i)
m_i = Dropout(drop, name=str(i+1)+'enc_dropout'+str(j+1))(m_i)
if(bn):
m_i = BatchNormalization(name=str(i+1)+'enc_batchnorm'+str(j+1))(m_i)
return m_i
def decoder(self, z_i, i):
temp = len(self.dec_dropouts)
for l, act, drop, bn, j in zip(self.dec_denseLayerSizes, self.dec_denseLayerActivations, self.dec_dropouts, self.dec_batchnorms, range(temp)):
z_i = Dense(l, activation=act, name=str(i+1)+'dec_dense'+str(j+1))(z_i)
z_i = Dropout(drop, name=str(i+1)+'dec_dropout'+str(j+1))(z_i)
if(bn):
z_i = BatchNormalization(name=str(i+1)+'dec_batchnorm'+str(j+1))(z_i)
return z_i
def createFullNetwork(self):
m = self.createInputList()
y = [None]*self.numUnits
m_ = [None]*self.numUnits
#use list comprehension to make this better
for i, m_i in enumerate(m):
y[i] = self.encoder(m_i, i)
# y_len = y[0].get_shape()[1:].as_list()[0]
z_in = Concatenate()(y)
mean = Dense(self.inf_layerSize, activation='linear', name='mean')(z_in)
log_sigma = Dense(self.inf_layerSize, activation='linear', name='stddev')(z_in)
z_out = Lambda(self.sample_z, name='inf_layer')([mean, log_sigma])
# self.inf_layer = Input(shape = z_out.get_shape().as_list())
# self.createDecoderModel(z_out)
for i in range(self.numUnits):
m_[i] = self.decoder(z_out, i)
self.trainModel = Model(inputs=m, outputs=m_)
plot_model(self.trainModel, to_file='multiVAE.png')
def trainFullNetwork(self):
return None
def extractGenModel(self):
temp = len(self.dec_batchnorms)
self.inf_layer = Input(shape = self.trainModel.get_layer('inf_layer').output_shape, name='gen_input')
for i in range(self.numUnits):
temp_layer = self.inf_layer
for j, bn in zip(range(temp), self.dec_batchnorms):
temp_layer = self.trainModel.get_layer(str(i+1)+'dec_dense'+str(j+1))(temp_layer)
temp_layer = self.trainModel.get_layer(str(i+1)+'dec_dropout'+str(j+1))(temp_layer)
if(bn):
temp_layer = self.trainModel.get_layer(str(i+1)+'dec_batchnorm'+str(j+1))(temp_layer)
self.genModel[i] = Model(inputs=self.inf_layer, outputs=temp_layer)
plot_model(self.genModel[i], to_file='genModel'+str(i+1)+'.png')
| [
"keras.layers.Concatenate",
"keras.utils.plot_model",
"keras.layers.Lambda",
"keras.models.Model",
"keras.backend.random_normal",
"keras.layers.Dense",
"keras.backend.exp"
] | [((1138, 1207), 'keras.backend.random_normal', 'K.random_normal', ([], {'shape': '(32, self.inf_layerSize)', 'mean': '(0.0)', 'stddev': '(1.0)'}), '(shape=(32, self.inf_layerSize), mean=0.0, stddev=1.0)\n', (1153, 1207), True, 'import keras.backend as K\n'), ((3020, 3047), 'keras.models.Model', 'Model', ([], {'inputs': 'm', 'outputs': 'm_'}), '(inputs=m, outputs=m_)\n', (3025, 3047), False, 'from keras.models import Sequential, Model\n'), ((3050, 3101), 'keras.utils.plot_model', 'plot_model', (['self.trainModel'], {'to_file': '"""multiVAE.png"""'}), "(self.trainModel, to_file='multiVAE.png')\n", (3060, 3101), False, 'from keras.utils import plot_model\n'), ((2584, 2597), 'keras.layers.Concatenate', 'Concatenate', ([], {}), '()\n', (2595, 2597), False, 'from keras.layers import Lambda, Input, Dense, Dropout, Reshape, BatchNormalization, Softmax, Concatenate\n'), ((2610, 2669), 'keras.layers.Dense', 'Dense', (['self.inf_layerSize'], {'activation': '"""linear"""', 'name': '"""mean"""'}), "(self.inf_layerSize, activation='linear', name='mean')\n", (2615, 2669), False, 'from keras.layers import Lambda, Input, Dense, Dropout, Reshape, BatchNormalization, Softmax, Concatenate\n'), ((2690, 2751), 'keras.layers.Dense', 'Dense', (['self.inf_layerSize'], {'activation': '"""linear"""', 'name': '"""stddev"""'}), "(self.inf_layerSize, activation='linear', name='stddev')\n", (2695, 2751), False, 'from keras.layers import Lambda, Input, Dense, Dropout, Reshape, BatchNormalization, Softmax, Concatenate\n'), ((2768, 2807), 'keras.layers.Lambda', 'Lambda', (['self.sample_z'], {'name': '"""inf_layer"""'}), "(self.sample_z, name='inf_layer')\n", (2774, 2807), False, 'from keras.layers import Lambda, Input, Dense, Dropout, Reshape, BatchNormalization, Softmax, Concatenate\n'), ((3733, 3781), 'keras.models.Model', 'Model', ([], {'inputs': 'self.inf_layer', 'outputs': 'temp_layer'}), '(inputs=self.inf_layer, outputs=temp_layer)\n', (3738, 3781), False, 'from keras.models import Sequential, Model\n'), ((1225, 1245), 'keras.backend.exp', 'K.exp', (['(log_sigma / 2)'], {}), '(log_sigma / 2)\n', (1230, 1245), True, 'import keras.backend as K\n')] |
import pytest
from tests.stub_client import StubHttpClient
from coinoxr.requestor import Requestor
from coinoxr.response import Response
def content(file):
return StubHttpClient.json(file)["content"]
@pytest.fixture
def client():
client = StubHttpClient()
client.add_app_id("fake_app_id")
client.add_date("2012-07-10")
client.add_date("2012-07-12")
return client
@pytest.fixture
def client_get_mock(mocker):
def client_get_mock(status_code, json):
response = Response(status_code, json)
client = mocker.Mock(StubHttpClient)
client.get = mocker.Mock(return_value=response)
return client
return client_get_mock
@pytest.fixture
def requestor(client):
return Requestor("fake_app_id", client)
| [
"tests.stub_client.StubHttpClient",
"coinoxr.response.Response",
"coinoxr.requestor.Requestor",
"tests.stub_client.StubHttpClient.json"
] | [((252, 268), 'tests.stub_client.StubHttpClient', 'StubHttpClient', ([], {}), '()\n', (266, 268), False, 'from tests.stub_client import StubHttpClient\n'), ((733, 765), 'coinoxr.requestor.Requestor', 'Requestor', (['"""fake_app_id"""', 'client'], {}), "('fake_app_id', client)\n", (742, 765), False, 'from coinoxr.requestor import Requestor\n'), ((170, 195), 'tests.stub_client.StubHttpClient.json', 'StubHttpClient.json', (['file'], {}), '(file)\n', (189, 195), False, 'from tests.stub_client import StubHttpClient\n'), ((502, 529), 'coinoxr.response.Response', 'Response', (['status_code', 'json'], {}), '(status_code, json)\n', (510, 529), False, 'from coinoxr.response import Response\n')] |
#!flask/bin/python
# coding=utf-8
from flask import Flask, jsonify
app = Flask(__name__)
tasks = {
"event_id" : "1.9",
"introductions" : [
{
"title" : "情怀",
"details" : "各种无敌, 各种牛人, 各种挑战, 等你来战",
"image" : "hello.png",
"background_image" : "backgroundImage.png"
},
{
"title" : "钉子",
"details" : "各种硬, 各种尖, 各种钻, 钉子精神",
"image" : "hello.png",
"background_image" : "backgroundImage.png"
}]
}
@app.route('/todo/api/v1.0/tasks', methods=['GET'])
def get_tasks():
return jsonify(tasks)
if __name__ == '__main__':
app.run(debug=True) | [
"flask.jsonify",
"flask.Flask"
] | [((74, 89), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (79, 89), False, 'from flask import Flask, jsonify\n'), ((539, 553), 'flask.jsonify', 'jsonify', (['tasks'], {}), '(tasks)\n', (546, 553), False, 'from flask import Flask, jsonify\n')] |
# Copyright (c) 2015-2017 <NAME>
# License: MIT
"""
nbrun - Run an Jupyter/IPython notebook, optionally passing arguments.
USAGE
-----
Copy this file in the folder containing the master notebook used to
execute the other notebooks. Then use `run_notebook()` to execute
notebooks.
"""
import time
from pathlib import Path
from IPython.display import display, FileLink
import nbformat
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert import HTMLExporter
__version__ = '0.2'
def dict_to_code(mapping):
"""Convert input dict `mapping` to a string containing python code.
Each key is the name of a variable and each value is
the variable content. Each variable assignment is separated by
a newline.
Keys must be strings, and cannot start with a number (i.e. must be
valid python identifiers). Values must be objects with a string
representation (the result of repr(obj)) which is valid python code for
re-creating the object.
For examples, numbers, strings or list/tuple/dict
of numbers and strings are allowed.
Returns:
A string containing the python code.
"""
lines = ("{} = {}".format(key, repr(value))
for key, value in mapping.items())
return '\n'.join(lines)
def run_notebook(notebook_path, nb_kwargs=None, suffix='-out',
out_path_ipynb=None, out_path_html=None,
kernel_name=None, working_dir='./',
timeout=3600, execute_kwargs=None,
save_ipynb=True, save_html=False,
insert_pos=1, hide_input=False, display_links=True,
return_nb=False, add_timestamp=True):
"""Runs a notebook and saves the output in a new notebook.
Executes a notebook, optionally passing "arguments"
similarly to passing arguments to a function.
Notebook arguments are passed in a dictionary (`nb_kwargs`) which is
converted into a string containing python assignments. This string is
inserted in the template notebook as a code cell. The code assigns
variables which can be used to control the execution. When "calling"
a notebook, you need to know which arguments (variables) to pass.
Unlike normal python functions, no check is performed on the input
arguments. For sanity, we recommended describing the variables that
can be assigned using a markdown cell at the beginning of the template
notebook.
Arguments:
notebook_path (pathlib.Path or string): input notebook filename.
This is the notebook to be executed (i.e. template notebook).
nb_kwargs (dict or None): If not None, this dict is converted to a
string of python assignments using the dict keys as variables
names and the dict values as variables content. This string is
inserted as code-cell in the notebook to be executed.
suffix (string): suffix to append to the file name of the executed
notebook. Argument ignored if `out_notebook_path` is not None.
out_path_ipynb (pathlib.Path, string or None): file name for the
output ipynb notebook. If None, the ouput ipynb notebook has
the same name as the input notebook plus a suffix, specified
by the `suffix` argument. If not None, `suffix` is ignored.
If argument `save_ipynb` is False this argument is ignored.
out_path_html (pathlib.Path, string or None): file name for the
output HTML notebook. If None, the ouput HTML notebook has
the same name as the input notebook plus a suffix, specified
by the `suffix` argument. If not None, `suffix` is ignored.
If argument `save_html` is False this argument is ignored.
kernel_name (string or None): name of the kernel used to execute the
notebook. Use the default kernel if None.
working_dir (string or Path): the folder the kernel is started into.
timeout (int): max execution time (seconds) for each cell before the
execution is aborted.
execute_kwargs (dict): additional arguments passed to
`ExecutePreprocessor`.
save_ipynb (bool): if True, save the output notebook in ipynb format.
Default True.
save_html (bool): if True, save the output notebook in HTML format.
Default False.
insert_pos (int): position of insertion of the code-cell containing
the input arguments. Default is 1 (i.e. second cell). With this
default, the first cell of the input notebook can define default
argument values (used when the notebook is executed
with no arguments or through the Notebook App).
hide_input (bool): whether to create a notebook with input cells
hidden (useful to remind user that the auto-generated output
is not meant to have the code edited.
display_links (bool): if True, display/print "link" of template and
output notebooks. Links are only rendered in a notebook.
In a text terminal, links are displayed as full file names.
return_nb (bool): if True, returns the notebook object. If False
returns None. Default False.
add_timestamp (bool): if True, add a timestamp cell to the executed
notebook containing time of execution, duration and the name of
the template notebook.
"""
timestamp = ("**Executed:** %s<br>**Duration:** %d seconds.<br>"
"**Autogenerated from:** [%s](%s)\n\n---")
if nb_kwargs is None:
nb_kwargs = {}
else:
header = '# Cell inserted during automated execution.'
code = dict_to_code(nb_kwargs)
code_cell = '\n'.join((header, code))
notebook_path = Path(notebook_path)
if not notebook_path.is_file():
raise FileNotFoundError("Path '%s' not found." % notebook_path)
def check_out_path(notebook_path, out_path, ext, save):
if out_path is None:
out_path = Path(notebook_path.parent,
notebook_path.stem + suffix + ext)
out_path = Path(out_path)
if save and not out_path.parent.exists():
msg = "Folder of the output %s file was not found:\n - %s\n."
raise FileNotFoundError(msg % (ext, out_path_ipynb.parent))
return out_path
out_path_ipynb = check_out_path(notebook_path, out_path_ipynb,
ext='.ipynb', save=save_ipynb)
out_path_html = check_out_path(notebook_path, out_path_html,
ext='.html', save=save_html)
if display_links:
display(FileLink(str(notebook_path)))
if execute_kwargs is None:
execute_kwargs = {}
execute_kwargs.update(timeout=timeout)
if kernel_name is not None:
execute_kwargs.update(kernel_name=kernel_name)
ep = ExecutePreprocessor(**execute_kwargs)
nb = nbformat.read(str(notebook_path), as_version=4)
if hide_input:
nb["metadata"].update({"hide_input": True})
if len(nb_kwargs) > 0:
nb['cells'].insert(insert_pos, nbformat.v4.new_code_cell(code_cell))
start_time = time.time()
try:
# Execute the notebook
ep.preprocess(nb, {'metadata': {'path': working_dir}})
except:
# Execution failed, print a message then raise.
msg = ('Error executing the notebook "%s".\n'
'Notebook arguments: %s\n\n'
'See notebook "%s" for the traceback.' %
(notebook_path, str(nb_kwargs), out_path_ipynb))
print(msg)
timestamp += '\n\nError occurred during execution. See below.'
raise
finally:
if add_timestamp:
duration = time.time() - start_time
timestamp = timestamp % (time.ctime(start_time), duration,
notebook_path, out_path_ipynb)
timestamp_cell = nbformat.v4.new_markdown_cell(timestamp)
nb['cells'].insert(0, timestamp_cell)
# Save the executed notebook to disk
if save_ipynb:
nbformat.write(nb, str(out_path_ipynb))
if display_links:
display(FileLink(str(out_path_ipynb)))
if save_html:
html_exporter = HTMLExporter()
body, resources = html_exporter.from_notebook_node(nb)
with open(str(out_path_html), 'w') as f:
f.write(body)
if return_nb:
return nb
if __name__ == '__main__':
import argparse
descr = """\
Execute all notebooks in a folder saving the result in the "out"
subfolder.
"""
parser = argparse.ArgumentParser(description=descr, epilog='\n')
parser.add_argument('folder',
help='Source folder with files to be processed.')
msg = ('Name of kernel executing the notebook.\n'
'Use `jupyter kernelspec list` for a list of kernels.')
parser.add_argument('--kernel', metavar='KERNEL_NAME', default=None,
help=msg)
args = parser.parse_args()
folder = Path(args.folder)
assert folder.is_dir(), 'Folder "%s" not found.' % folder
out_path = Path(folder, 'out/')
if not out_path.is_dir():
out_path.mkdir(parents=True) # py2 compat
print('Executing notebooks in "%s" ... ' % folder)
pathlist = list(folder.glob('*.ipynb'))
for nbpath in pathlist:
if not (nbpath.stem.endswith('-out') or nbpath.stem.startswith('_')):
print()
out_path_ipynb = Path(out_path, nbpath.name)
run_notebook(nbpath, out_path_ipynb=out_path_ipynb,
kernel_name=args.kernel)
| [
"nbconvert.HTMLExporter",
"time.ctime",
"argparse.ArgumentParser",
"pathlib.Path",
"nbformat.v4.new_code_cell",
"nbconvert.preprocessors.ExecutePreprocessor",
"time.time",
"nbformat.v4.new_markdown_cell"
] | [((5814, 5833), 'pathlib.Path', 'Path', (['notebook_path'], {}), '(notebook_path)\n', (5818, 5833), False, 'from pathlib import Path\n'), ((6930, 6967), 'nbconvert.preprocessors.ExecutePreprocessor', 'ExecutePreprocessor', ([], {}), '(**execute_kwargs)\n', (6949, 6967), False, 'from nbconvert.preprocessors import ExecutePreprocessor\n'), ((7220, 7231), 'time.time', 'time.time', ([], {}), '()\n', (7229, 7231), False, 'import time\n'), ((8719, 8774), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'descr', 'epilog': '"""\n"""'}), "(description=descr, epilog='\\n')\n", (8742, 8774), False, 'import argparse\n'), ((9156, 9173), 'pathlib.Path', 'Path', (['args.folder'], {}), '(args.folder)\n', (9160, 9173), False, 'from pathlib import Path\n'), ((9252, 9272), 'pathlib.Path', 'Path', (['folder', '"""out/"""'], {}), "(folder, 'out/')\n", (9256, 9272), False, 'from pathlib import Path\n'), ((6164, 6178), 'pathlib.Path', 'Path', (['out_path'], {}), '(out_path)\n', (6168, 6178), False, 'from pathlib import Path\n'), ((6055, 6116), 'pathlib.Path', 'Path', (['notebook_path.parent', '(notebook_path.stem + suffix + ext)'], {}), '(notebook_path.parent, notebook_path.stem + suffix + ext)\n', (6059, 6116), False, 'from pathlib import Path\n'), ((7164, 7200), 'nbformat.v4.new_code_cell', 'nbformat.v4.new_code_cell', (['code_cell'], {}), '(code_cell)\n', (7189, 7200), False, 'import nbformat\n'), ((7980, 8020), 'nbformat.v4.new_markdown_cell', 'nbformat.v4.new_markdown_cell', (['timestamp'], {}), '(timestamp)\n', (8009, 8020), False, 'import nbformat\n'), ((8326, 8340), 'nbconvert.HTMLExporter', 'HTMLExporter', ([], {}), '()\n', (8338, 8340), False, 'from nbconvert import HTMLExporter\n'), ((9609, 9636), 'pathlib.Path', 'Path', (['out_path', 'nbpath.name'], {}), '(out_path, nbpath.name)\n', (9613, 9636), False, 'from pathlib import Path\n'), ((7787, 7798), 'time.time', 'time.time', ([], {}), '()\n', (7796, 7798), False, 'import time\n'), ((7849, 7871), 'time.ctime', 'time.ctime', (['start_time'], {}), '(start_time)\n', (7859, 7871), False, 'import time\n')] |
import numpy as np
import pandas as pd
import seaborn as sns
from nninst.backend.tensorflow.model import AlexNet
from nninst.backend.tensorflow.trace.alexnet_imagenet_inter_class_similarity import (
alexnet_imagenet_inter_class_similarity_frequency,
)
from nninst.op import Conv2dOp, DenseOp
np.random.seed(0)
sns.set()
threshold = 0.5
frequency = int(500 * 0.1)
label = "import"
variant = None
base_name = f"alexnet_imagenet_inter_class_similarity_frequency_{frequency}"
cmap = "Greens"
same_class_similarity = []
diff_class_similarity = []
layer_names = []
layers = AlexNet.graph().load().ops_in_layers(Conv2dOp, DenseOp)
for layer_name in [
None,
*layers,
]:
similarity = alexnet_imagenet_inter_class_similarity_frequency(
threshold, frequency, label, variant=variant, layer_name=layer_name
).load()
same_class_similarity.append(
np.mean(similarity[np.eye(similarity.shape[0], dtype=bool)])
)
diff_class_similarity.append(
np.mean(
similarity[
np.tri(similarity.shape[0], similarity.shape[1], k=-1, dtype=bool)
]
)
)
if layer_name is None:
file_name = base_name
layer_names.append("All")
else:
file_name = base_name + "_" + layer_name[: layer_name.index("/")]
layer_names.append(layer_name[: layer_name.index("/")])
plot_array = np.around(similarity, decimals=2)
ax = sns.heatmap(plot_array, cmap=cmap, vmax=plot_array.max(), annot=True)
ax.set(xlabel="Class", ylabel="Class")
fig = ax.get_figure()
# fig.savefig(f"{file_name}.pdf", bbox_inches="tight")
fig.savefig(f"{file_name}.png", bbox_inches="tight")
# np.savetxt(f"{file_name}.csv", similarity, delimiter=",")
fig.clf()
for layer_name, similarity in zip(
["avg", "first_half", "second_half"],
[
np.mean(
[
alexnet_imagenet_inter_class_similarity_frequency(
threshold, frequency, label, variant=variant, layer_name=layer
).load()
for layer in layers
],
axis=0,
),
# np.mean([alexnet_imagenet_inter_class_similarity_frequency(
# threshold, frequency, label, variant=variant, layer_name=layer
# ).load()
# for layer in layers[:len(layers) // 2]], axis=0),
# np.mean([alexnet_imagenet_inter_class_similarity_frequency(
# threshold, frequency, label, variant=variant, layer_name=layer
# ).load()
# for layer in layers[len(layers) // 2:]], axis=0),
],
):
file_name = base_name + "_" + layer_name
plot_array = np.around(similarity, decimals=2)
ax = sns.heatmap(plot_array, cmap=cmap, vmax=plot_array.max(), annot=True)
ax.set(xlabel="Class", ylabel="Class")
fig = ax.get_figure()
# fig.savefig(f"{file_name}.pdf", bbox_inches="tight")
fig.savefig(f"{file_name}.png", bbox_inches="tight")
# np.savetxt(f"{file_name}.csv", similarity, delimiter=",")
fig.clf()
summary_df = pd.DataFrame(
{
"Same Class": same_class_similarity,
"Diff Class": diff_class_similarity,
"Layer": layer_names,
}
)
summary_df.to_csv(f"{base_name}_summary.csv", index=False)
| [
"seaborn.set",
"nninst.backend.tensorflow.model.AlexNet.graph",
"numpy.eye",
"nninst.backend.tensorflow.trace.alexnet_imagenet_inter_class_similarity.alexnet_imagenet_inter_class_similarity_frequency",
"numpy.around",
"numpy.random.seed",
"pandas.DataFrame",
"numpy.tri"
] | [((298, 315), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (312, 315), True, 'import numpy as np\n'), ((316, 325), 'seaborn.set', 'sns.set', ([], {}), '()\n', (323, 325), True, 'import seaborn as sns\n'), ((3070, 3184), 'pandas.DataFrame', 'pd.DataFrame', (["{'Same Class': same_class_similarity, 'Diff Class': diff_class_similarity,\n 'Layer': layer_names}"], {}), "({'Same Class': same_class_similarity, 'Diff Class':\n diff_class_similarity, 'Layer': layer_names})\n", (3082, 3184), True, 'import pandas as pd\n'), ((1390, 1423), 'numpy.around', 'np.around', (['similarity'], {'decimals': '(2)'}), '(similarity, decimals=2)\n', (1399, 1423), True, 'import numpy as np\n'), ((2680, 2713), 'numpy.around', 'np.around', (['similarity'], {'decimals': '(2)'}), '(similarity, decimals=2)\n', (2689, 2713), True, 'import numpy as np\n'), ((697, 819), 'nninst.backend.tensorflow.trace.alexnet_imagenet_inter_class_similarity.alexnet_imagenet_inter_class_similarity_frequency', 'alexnet_imagenet_inter_class_similarity_frequency', (['threshold', 'frequency', 'label'], {'variant': 'variant', 'layer_name': 'layer_name'}), '(threshold, frequency,\n label, variant=variant, layer_name=layer_name)\n', (746, 819), False, 'from nninst.backend.tensorflow.trace.alexnet_imagenet_inter_class_similarity import alexnet_imagenet_inter_class_similarity_frequency\n'), ((577, 592), 'nninst.backend.tensorflow.model.AlexNet.graph', 'AlexNet.graph', ([], {}), '()\n', (590, 592), False, 'from nninst.backend.tensorflow.model import AlexNet\n'), ((898, 937), 'numpy.eye', 'np.eye', (['similarity.shape[0]'], {'dtype': 'bool'}), '(similarity.shape[0], dtype=bool)\n', (904, 937), True, 'import numpy as np\n'), ((1037, 1103), 'numpy.tri', 'np.tri', (['similarity.shape[0]', 'similarity.shape[1]'], {'k': '(-1)', 'dtype': 'bool'}), '(similarity.shape[0], similarity.shape[1], k=-1, dtype=bool)\n', (1043, 1103), True, 'import numpy as np\n'), ((1897, 2014), 'nninst.backend.tensorflow.trace.alexnet_imagenet_inter_class_similarity.alexnet_imagenet_inter_class_similarity_frequency', 'alexnet_imagenet_inter_class_similarity_frequency', (['threshold', 'frequency', 'label'], {'variant': 'variant', 'layer_name': 'layer'}), '(threshold, frequency,\n label, variant=variant, layer_name=layer)\n', (1946, 2014), False, 'from nninst.backend.tensorflow.trace.alexnet_imagenet_inter_class_similarity import alexnet_imagenet_inter_class_similarity_frequency\n')] |
# -*- coding: utf-8 -*-
"""
<NAME>
Computational Biologist
Target Sciences
GSK
<EMAIL>
"""
import sys
import get_generalizable_features
import get_merged_features
import get_useful_features
def main(validation_rep=0, validation_fold=0):
print('VALIDATION_REP: {0!s}, VALIDATION_FOLD:{1!s}'.format(validation_rep, validation_fold), flush=True)
print('GETTING GENERALIZABLE FEATURES...', flush=True)
get_generalizable_features.main(validation_rep, validation_fold)
print('GETTING MERGED FEATURES...', flush=True)
get_merged_features.main(validation_rep, validation_fold)
print('GETTING USEFUL FEATURES...', flush=True)
get_useful_features.main(validation_rep, validation_fold)
if __name__ == '__main__':
main(validation_rep=int(sys.argv[1]), validation_fold=int(sys.argv[2]))
| [
"get_useful_features.main",
"get_merged_features.main",
"get_generalizable_features.main"
] | [((418, 482), 'get_generalizable_features.main', 'get_generalizable_features.main', (['validation_rep', 'validation_fold'], {}), '(validation_rep, validation_fold)\n', (449, 482), False, 'import get_generalizable_features\n'), ((544, 601), 'get_merged_features.main', 'get_merged_features.main', (['validation_rep', 'validation_fold'], {}), '(validation_rep, validation_fold)\n', (568, 601), False, 'import get_merged_features\n'), ((663, 720), 'get_useful_features.main', 'get_useful_features.main', (['validation_rep', 'validation_fold'], {}), '(validation_rep, validation_fold)\n', (687, 720), False, 'import get_useful_features\n')] |
# Generated by Django 3.1.6 on 2021-04-12 08:45
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('SocialApp', '0002_auto_20210411_2237'),
]
operations = [
migrations.DeleteModel(
name='RemoteFollow',
),
]
| [
"django.db.migrations.DeleteModel"
] | [((229, 272), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""RemoteFollow"""'}), "(name='RemoteFollow')\n", (251, 272), False, 'from django.db import migrations\n')] |
"""Tests for thread_async_queue."""
from __future__ import annotations
import asyncio
from concurrent.futures import ThreadPoolExecutor
from itertools import chain
from typing import List, NamedTuple
import pytest
from opentrons.protocol_runner.thread_async_queue import (
ThreadAsyncQueue,
QueueClosed,
)
def test_basic_single_threaded_behavior() -> None:
"""Test basic queue behavior in a single thread."""
subject = ThreadAsyncQueue[int]()
with subject:
subject.put(1)
subject.put(2)
subject.put(3)
# Putting isn't allowed after closing.
with pytest.raises(QueueClosed):
subject.put(4)
with pytest.raises(QueueClosed):
subject.put(5)
# Closing isn't allowed after closing.
with pytest.raises(QueueClosed):
subject.done_putting()
# Values are retrieved in order.
assert [subject.get(), subject.get(), subject.get()] == [1, 2, 3]
# After retrieving all values, further retrievals raise.
with pytest.raises(QueueClosed):
subject.get()
with pytest.raises(QueueClosed):
# If closing were naively implemented as a sentinel value being inserted
# into the queue, it might be that the first get() after the close
# correctly raises but the second get() doesn't.
subject.get()
def test_multi_thread_producer_consumer() -> None:
"""Stochastically smoke-test thread safety.
Use the queue to pass values between threads
in a multi-producer, multi-consumer setup.
Verify that all the values make it through in the correct order.
"""
num_producers = 3
num_consumers = 3
producer_ids = list(range(num_producers))
# The values that each producer will put into the queue.
# Anecdotally, threads interleave meaningfully with at least 10000 values.
values_per_producer = list(range(30000))
all_expected_values = [
_ProducedValue(producer_id=p, value=v)
for p in producer_ids
for v in values_per_producer
]
subject = ThreadAsyncQueue[_ProducedValue]()
# Run producers concurrently with consumers.
with ThreadPoolExecutor(max_workers=num_producers + num_consumers) as executor:
# `with subject` needs to be inside `with ThreadPoolExecutor`
# to avoid deadlocks in case something in here raises.
# Consumers need to see the queue closed eventually to terminate,
# and `with ThreadPoolExecutor` will wait until all threads are terminated
# before exiting.
with subject:
producers = [
executor.submit(
_produce,
queue=subject,
values=values_per_producer,
producer_id=producer_id,
)
for producer_id in producer_ids
]
consumers = [
executor.submit(_consume, queue=subject) for i in range(num_consumers)
]
# Ensure all producers are done before we exit the `with subject` block
# and close off the queue to further submissions.
for c in producers:
c.result()
consumer_results = [consumer.result() for consumer in consumers]
all_values = list(chain(*consumer_results))
# Assert that the total set of consumed values is as expected:
# No duplicates, no extras, and nothing missing.
assert sorted(all_values) == sorted(all_expected_values)
def assert_consumer_result_correctly_ordered(
consumer_result: List[_ProducedValue],
) -> None:
# Assert that the consumer got values in the order the producer provided them.
# Allow values from different producers to be interleaved,
# and tolerate skipped values (assume they were given to a different consumer).
# [[All consumed from producer 0], [All consumed from producer 1], etc.]
consumed_values_per_producer = [
[pv for pv in consumer_result if pv.producer_id == producer_id]
for producer_id in producer_ids
]
for values_from_single_producer in consumed_values_per_producer:
assert values_from_single_producer == sorted(values_from_single_producer)
for consumer_result in consumer_results:
assert_consumer_result_correctly_ordered(consumer_result)
async def test_async() -> None:
"""Smoke-test async support.
Use the queue to pass values
from a single async producer to a single async consumer,
running concurrently in the same event loop.
This verifies two things:
1. That async retrieval returns basically the expected values.
2. That async retrieval keeps the event loop free while waiting.
If it didn't, this test would reveal the problem by deadlocking.
We trust that more complicated multi-producer/multi-consumer interactions
are covered by the non-async tests.
"""
expected_values = list(range(1000))
subject = ThreadAsyncQueue[_ProducedValue]()
consumer = asyncio.create_task(_consume_async(queue=subject))
try:
with subject:
await _produce_async(queue=subject, values=expected_values, producer_id=0)
finally:
consumed = await consumer
assert consumed == [_ProducedValue(producer_id=0, value=v) for v in expected_values]
class _ProducedValue(NamedTuple):
producer_id: int
value: int
def _produce(
queue: ThreadAsyncQueue[_ProducedValue],
values: List[int],
producer_id: int,
) -> None:
"""Put values in the queue, tagged with an ID representing this producer."""
for v in values:
queue.put(_ProducedValue(producer_id=producer_id, value=v))
def _consume(queue: ThreadAsyncQueue[_ProducedValue]) -> List[_ProducedValue]:
"""Consume values from the queue indiscriminately until it's closed.
Return everything consumed, in the order that this function consumed it.
"""
result = []
for value in queue.get_until_closed():
result.append(value)
return result
async def _produce_async(
queue: ThreadAsyncQueue[_ProducedValue],
values: List[int],
producer_id: int,
) -> None:
"""Like `_produce()`, except yield to the event loop after each insertion."""
for value in values:
queue.put(_ProducedValue(producer_id=producer_id, value=value))
await asyncio.sleep(0)
async def _consume_async(
queue: ThreadAsyncQueue[_ProducedValue],
) -> List[_ProducedValue]:
"""Like _consume()`, except yield to the event loop while waiting."""
result = []
async for value in queue.get_async_until_closed():
result.append(value)
return result
| [
"itertools.chain",
"concurrent.futures.ThreadPoolExecutor",
"pytest.raises",
"asyncio.sleep"
] | [((606, 632), 'pytest.raises', 'pytest.raises', (['QueueClosed'], {}), '(QueueClosed)\n', (619, 632), False, 'import pytest\n'), ((666, 692), 'pytest.raises', 'pytest.raises', (['QueueClosed'], {}), '(QueueClosed)\n', (679, 692), False, 'import pytest\n'), ((770, 796), 'pytest.raises', 'pytest.raises', (['QueueClosed'], {}), '(QueueClosed)\n', (783, 796), False, 'import pytest\n'), ((1008, 1034), 'pytest.raises', 'pytest.raises', (['QueueClosed'], {}), '(QueueClosed)\n', (1021, 1034), False, 'import pytest\n'), ((1067, 1093), 'pytest.raises', 'pytest.raises', (['QueueClosed'], {}), '(QueueClosed)\n', (1080, 1093), False, 'import pytest\n'), ((2140, 2201), 'concurrent.futures.ThreadPoolExecutor', 'ThreadPoolExecutor', ([], {'max_workers': '(num_producers + num_consumers)'}), '(max_workers=num_producers + num_consumers)\n', (2158, 2201), False, 'from concurrent.futures import ThreadPoolExecutor\n'), ((3275, 3299), 'itertools.chain', 'chain', (['*consumer_results'], {}), '(*consumer_results)\n', (3280, 3299), False, 'from itertools import chain\n'), ((6379, 6395), 'asyncio.sleep', 'asyncio.sleep', (['(0)'], {}), '(0)\n', (6392, 6395), False, 'import asyncio\n')] |
"""Solution for subdag_example.py.
Uses a factory function to return a DAG that can be used as the subdag argument
to SubDagOperator. Notice that:
1) the SubDAG's dag_id is formatted as parent_dag_id.subdag_task_id
2) the start_date and schedule_interval of the SubDAG are copied from the parent
DAG.
"""
from airflow import DAG
from airflow.contrib.operators.gcs_download_operator import GoogleCloudStorageDownloadOperator
from airflow.operators.bash_operator import BashOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.subdag_operator import SubDagOperator
from datetime import datetime, timedelta
YESTERDAY = datetime.combine(datetime.today() - timedelta(days=1),
datetime.min.time())
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': YESTERDAY,
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=5),
}
def shakespeare_subdag(parent_dag, subdag_task_id, play_name):
with DAG('{}.{}'.format(parent_dag.dag_id, subdag_task_id),
schedule_interval=parent_dag.schedule_interval,
start_date=parent_dag.start_date,
default_args=parent_dag.default_args) as subdag:
download = GoogleCloudStorageDownloadOperator(
task_id='download',
bucket='smenyc2018-subdag-data',
object='{}.enc'.format(play_name),
filename='/home/airflow/gcs/data/{}.enc'.format(play_name))
decrypt = BashOperator(
task_id='decrypt',
bash_command=
'openssl enc -in /home/airflow/gcs/data/{play_name}.enc '
'-out /home/airflow/gcs/data/{play_name}.txt -d -aes-128-cbc -k "hello-nyc"'
.format(play_name=play_name))
wordcount = BashOperator(
task_id='wordcount',
bash_command=
'wc -w /home/airflow/gcs/data/{play_name}.txt | tee /home/airflow/gcs/data/{play_name}_wordcount.txt'
.format(play_name=play_name))
download >> decrypt >> wordcount
return subdag
with DAG('subdag_example_solution', default_args=default_args,
catchup=False) as dag:
start = DummyOperator(task_id='start')
start >> SubDagOperator(task_id='process_romeo',
subdag=shakespeare_subdag(dag, 'process_romeo',
'romeo'))
start >> SubDagOperator(task_id='process_othello',
subdag=shakespeare_subdag(dag, 'process_othello',
'othello'))
start >> SubDagOperator(task_id='process_hamlet',
subdag=shakespeare_subdag(dag, 'process_hamlet',
'hamlet'))
start >> SubDagOperator(task_id='process_macbeth',
subdag=shakespeare_subdag(dag, 'process_macbeth',
'macbeth'))
| [
"datetime.datetime.min.time",
"airflow.operators.dummy_operator.DummyOperator",
"airflow.DAG",
"datetime.datetime.today",
"datetime.timedelta"
] | [((744, 763), 'datetime.datetime.min.time', 'datetime.min.time', ([], {}), '()\n', (761, 763), False, 'from datetime import datetime, timedelta\n'), ((963, 983), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(5)'}), '(minutes=5)\n', (972, 983), False, 'from datetime import datetime, timedelta\n'), ((2142, 2214), 'airflow.DAG', 'DAG', (['"""subdag_example_solution"""'], {'default_args': 'default_args', 'catchup': '(False)'}), "('subdag_example_solution', default_args=default_args, catchup=False)\n", (2145, 2214), False, 'from airflow import DAG\n'), ((2244, 2274), 'airflow.operators.dummy_operator.DummyOperator', 'DummyOperator', ([], {'task_id': '"""start"""'}), "(task_id='start')\n", (2257, 2274), False, 'from airflow.operators.dummy_operator import DummyOperator\n'), ((677, 693), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (691, 693), False, 'from datetime import datetime, timedelta\n'), ((696, 713), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (705, 713), False, 'from datetime import datetime, timedelta\n')] |
#!/usr/bin/env python3.8
"""
export_blueprints.py
Connect to a Nutanix Prism Central instance, grab all Calm blueprints and export them to JSON files.
You would need to *heavily* modify this script for use in a production environment so that it contains appropriate error-checking and exception handling.
"""
__author__ = "<NAME> @ Nutanix"
__version__ = "1.1"
__maintainer__ = "<NAME> @ Nutanix"
__email__ = "<EMAIL>"
__status__ = "Development/Demo"
# default modules
import json
import getpass
import argparse
from time import localtime, strftime
import urllib3
# custom modules
import apiclient
def set_options():
global ENTITY_RESPONSE_LENGTH
"""
set ENTITY_RESPONSE_LENGTH to the maximum number of blueprints you want
to export
this is only required since the v3 list APIs will only return 20
entities by default
"""
ENTITY_RESPONSE_LENGTH = 50
def get_options():
global cluster_ip
global username
global password
# process the command-line arguments
parser = argparse.ArgumentParser(
description="Export all Calm blueprints to JSON files"
)
parser.add_argument("pc_ip", help="Prism Central IP address")
parser.add_argument("-u", "--username", help="Prism Central username")
parser.add_argument("-p", "--password", help="Prism Central password")
args = parser.parse_args()
# validate the arguments to make sure all required info has been supplied
if args.username:
username = args.username
else:
username = input("Please enter your Prism Central username: ")
if args.password:
password = args.password
else:
password = getpass.getpass()
cluster_ip = args.pc_ip
def main():
# set the global options
set_options()
# get the cluster connection info
get_options()
"""
disable insecure connection warnings
please be advised and aware of the implications
in a production environment!
"""
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# make sure all required info has been provided
if not cluster_ip:
raise Exception("Cluster IP is required.")
elif not username:
raise Exception("Username is required.")
elif not password:
raise Exception("Password is required.")
else:
"""
do a preliminary check to see if this is AOS or CE
not used in this script but is could be useful for
later modifications
"""
client = apiclient.ApiClient(
"post",
cluster_ip,
"clusters/list",
'{ "kind": "cluster" }',
username,
password,
)
results = client.get_info()
is_ce = False
for cluster in results["entities"]:
if (
"-ce-"
in cluster["status"]["resources"]["config"]["build"]["full_version"]
):
is_ce = True
endpoints = {}
endpoints["blueprints"] = ["blueprint", (f'"length":{ENTITY_RESPONSE_LENGTH}')]
# get all blueprints
for endpoint in endpoints:
if endpoints[endpoint][1] != "":
client = apiclient.ApiClient(
"post",
cluster_ip,
(f"{endpoints[endpoint][0]}s/list"),
(
f'{{ "kind": "{endpoints[endpoint][0]}", {endpoints[endpoint][1]} }}'
),
username,
password,
)
else:
client = apiclient.ApiClient(
"post",
cluster_ip,
(f"{endpoints[endpoint][0]}s/list"),
(f'{{ "kind": "{endpoints[endpoint][0]}" }}'),
username,
password,
)
results = client.get_info()
# make sure the user knows what's happening ... ;-)
print(f"\n{len(results['entities'])} blueprints collected from {cluster_ip}\n")
'''
go through all the blueprints and export them to appropriately named files
filename will match the blueprint name and should work find if blueprint name contains spaces (tested on Ubuntu Linux)
'''
for blueprint in results["entities"]:
day = strftime("%d-%b-%Y", localtime())
time = strftime("%H%M%S", localtime())
blueprint_filename = f"{day}_{time}_{blueprint['status']['name']}.json"
client = apiclient.ApiClient(
"get",
cluster_ip,
f"blueprints/{blueprint['status']['uuid']}/export_file",
'{ "kind": "cluster" }',
username,
password,
)
exported_json = client.get_info()
with open(f"./{blueprint_filename}", "w") as f:
json.dump(exported_json, f)
print(
f"Successfully exported blueprint '{blueprint['status']['name']}'"
)
print("\nFinished!\n")
if __name__ == "__main__":
main()
| [
"argparse.ArgumentParser",
"getpass.getpass",
"apiclient.ApiClient",
"urllib3.disable_warnings",
"time.localtime",
"json.dump"
] | [((1042, 1121), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Export all Calm blueprints to JSON files"""'}), "(description='Export all Calm blueprints to JSON files')\n", (1065, 1121), False, 'import argparse\n'), ((1997, 2064), 'urllib3.disable_warnings', 'urllib3.disable_warnings', (['urllib3.exceptions.InsecureRequestWarning'], {}), '(urllib3.exceptions.InsecureRequestWarning)\n', (2021, 2064), False, 'import urllib3\n'), ((1684, 1701), 'getpass.getpass', 'getpass.getpass', ([], {}), '()\n', (1699, 1701), False, 'import getpass\n'), ((2534, 2639), 'apiclient.ApiClient', 'apiclient.ApiClient', (['"""post"""', 'cluster_ip', '"""clusters/list"""', '"""{ "kind": "cluster" }"""', 'username', 'password'], {}), '(\'post\', cluster_ip, \'clusters/list\',\n \'{ "kind": "cluster" }\', username, password)\n', (2553, 2639), False, 'import apiclient\n'), ((4596, 4744), 'apiclient.ApiClient', 'apiclient.ApiClient', (['"""get"""', 'cluster_ip', 'f"""blueprints/{blueprint[\'status\'][\'uuid\']}/export_file"""', '"""{ "kind": "cluster" }"""', 'username', 'password'], {}), '(\'get\', cluster_ip,\n f"blueprints/{blueprint[\'status\'][\'uuid\']}/export_file",\n \'{ "kind": "cluster" }\', username, password)\n', (4615, 4744), False, 'import apiclient\n'), ((3237, 3410), 'apiclient.ApiClient', 'apiclient.ApiClient', (['"""post"""', 'cluster_ip', 'f"""{endpoints[endpoint][0]}s/list"""', 'f"""{{ "kind": "{endpoints[endpoint][0]}", {endpoints[endpoint][1]} }}"""', 'username', 'password'], {}), '(\'post\', cluster_ip, f\'{endpoints[endpoint][0]}s/list\',\n f\'{{ "kind": "{endpoints[endpoint][0]}", {endpoints[endpoint][1]} }}\',\n username, password)\n', (3256, 3410), False, 'import apiclient\n'), ((3635, 3778), 'apiclient.ApiClient', 'apiclient.ApiClient', (['"""post"""', 'cluster_ip', 'f"""{endpoints[endpoint][0]}s/list"""', 'f"""{{ "kind": "{endpoints[endpoint][0]}" }}"""', 'username', 'password'], {}), '(\'post\', cluster_ip, f\'{endpoints[endpoint][0]}s/list\',\n f\'{{ "kind": "{endpoints[endpoint][0]}" }}\', username, password)\n', (3654, 3778), False, 'import apiclient\n'), ((4427, 4438), 'time.localtime', 'localtime', ([], {}), '()\n', (4436, 4438), False, 'from time import localtime, strftime\n'), ((4478, 4489), 'time.localtime', 'localtime', ([], {}), '()\n', (4487, 4489), False, 'from time import localtime, strftime\n'), ((4970, 4997), 'json.dump', 'json.dump', (['exported_json', 'f'], {}), '(exported_json, f)\n', (4979, 4997), False, 'import json\n')] |
#* pyx509 - Python library for parsing X.509
#* Copyright (C) 2009-2010 CZ.NIC, z.s.p.o. (http://www.nic.cz)
#*
#* This library is free software; you can redistribute it and/or
#* modify it under the terms of the GNU Library General Public
#* License as published by the Free Software Foundation; either
#* version 2 of the License, or (at your option) any later version.
#*
#* This library is distributed in the hope that it will be useful,
#* but WITHOUT ANY WARRANTY; without even the implied warranty of
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
#* Library General Public License for more details.
#*
#* You should have received a copy of the GNU Library General Public
#* License along with this library; if not, write to the Free
#* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#*
'''
Created on Dec 3, 2009
'''
# standard library imports
import string
# dslib imports
from pyasn1.type import tag,namedtype,univ,useful
from pyasn1 import error
# local imports
from tools import *
from oid import oid_map as oid_map
from general_types import *
class Extension(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('extnID', univ.ObjectIdentifier()),
namedtype.DefaultedNamedType('critical', univ.Boolean('False')),
namedtype.NamedType('extnValue', univ.OctetString())
#namedtype.NamedType('extnValue', ExtensionValue())
)
class Extensions(univ.SequenceOf):
componentType = Extension()
sizeSpec = univ.SequenceOf.sizeSpec
class SubjectPublicKeyInfo(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('algorithm', AlgorithmIdentifier()),
namedtype.NamedType('subjectPublicKey', ConvertibleBitString())
)
class UniqueIdentifier(ConvertibleBitString): pass
class Time(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('utcTime', useful.UTCTime()),
namedtype.NamedType('generalTime', useful.GeneralizedTime())
)
def __str__(self):
return str(self.getComponent())
class Validity(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('notBefore', Time()),
namedtype.NamedType('notAfter', Time())
)
class CertificateSerialNumber(univ.Integer): pass
class Version(univ.Integer):
namedValues = namedval.NamedValues(
('v1', 0), ('v2', 1), ('v3', 2)
)
class TBSCertificate(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.DefaultedNamedType('version', Version('v1', tagSet=Version.tagSet.tagExplicitly(tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))),
namedtype.NamedType('serialNumber', CertificateSerialNumber()),
namedtype.NamedType('signature', AlgorithmIdentifier()),
namedtype.NamedType('issuer', Name()),
namedtype.NamedType('validity', Validity()),
namedtype.NamedType('subject', Name()),
namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()),
namedtype.OptionalNamedType('issuerUniqueID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.OptionalNamedType('subjectUniqueID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
namedtype.OptionalNamedType('extensions', Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
)
class Certificate(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('tbsCertificate', TBSCertificate()),
namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
namedtype.NamedType('signatureValue', ConvertibleBitString())
)
class Certificates(univ.SetOf):
componentType = Certificate()
| [
"pyasn1.type.univ.OctetString",
"pyasn1.type.tag.Tag",
"pyasn1.type.useful.UTCTime",
"pyasn1.type.univ.Boolean",
"pyasn1.type.useful.GeneralizedTime",
"pyasn1.type.univ.ObjectIdentifier"
] | [((1270, 1293), 'pyasn1.type.univ.ObjectIdentifier', 'univ.ObjectIdentifier', ([], {}), '()\n', (1291, 1293), False, 'from pyasn1.type import tag, namedtype, univ, useful\n'), ((1345, 1366), 'pyasn1.type.univ.Boolean', 'univ.Boolean', (['"""False"""'], {}), "('False')\n", (1357, 1366), False, 'from pyasn1.type import tag, namedtype, univ, useful\n'), ((1410, 1428), 'pyasn1.type.univ.OctetString', 'univ.OctetString', ([], {}), '()\n', (1426, 1428), False, 'from pyasn1.type import tag, namedtype, univ, useful\n'), ((2004, 2020), 'pyasn1.type.useful.UTCTime', 'useful.UTCTime', ([], {}), '()\n', (2018, 2020), False, 'from pyasn1.type import tag, namedtype, univ, useful\n'), ((2066, 2090), 'pyasn1.type.useful.GeneralizedTime', 'useful.GeneralizedTime', ([], {}), '()\n', (2088, 2090), False, 'from pyasn1.type import tag, namedtype, univ, useful\n'), ((3222, 3274), 'pyasn1.type.tag.Tag', 'tag.Tag', (['tag.tagClassContext', 'tag.tagFormatSimple', '(1)'], {}), '(tag.tagClassContext, tag.tagFormatSimple, 1)\n', (3229, 3274), False, 'from pyasn1.type import tag, namedtype, univ, useful\n'), ((3372, 3424), 'pyasn1.type.tag.Tag', 'tag.Tag', (['tag.tagClassContext', 'tag.tagFormatSimple', '(2)'], {}), '(tag.tagClassContext, tag.tagFormatSimple, 2)\n', (3379, 3424), False, 'from pyasn1.type import tag, namedtype, univ, useful\n'), ((3511, 3563), 'pyasn1.type.tag.Tag', 'tag.Tag', (['tag.tagClassContext', 'tag.tagFormatSimple', '(3)'], {}), '(tag.tagClassContext, tag.tagFormatSimple, 3)\n', (3518, 3563), False, 'from pyasn1.type import tag, namedtype, univ, useful\n'), ((2705, 2762), 'pyasn1.type.tag.Tag', 'tag.Tag', (['tag.tagClassContext', 'tag.tagFormatConstructed', '(0)'], {}), '(tag.tagClassContext, tag.tagFormatConstructed, 0)\n', (2712, 2762), False, 'from pyasn1.type import tag, namedtype, univ, useful\n')] |
from math import isqrt
is_square = lambda n: isqrt(n) ** 2 == n if n >= 0 else False
def is_square_soln(n):
pass
print(is_square(-1)) | [
"math.isqrt"
] | [((46, 54), 'math.isqrt', 'isqrt', (['n'], {}), '(n)\n', (51, 54), False, 'from math import isqrt\n')] |
# -*- coding: utf-8 -*-
"""
TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-用户管理(Bk-User) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
from dataclasses import dataclass, field
from pathlib import Path
from typing import Dict, Optional, Type
from uuid import UUID
import yaml
from bkuser_core.categories.constants import SyncTaskStatus
from bkuser_core.categories.loader import register_plugin
from bkuser_core.categories.models import ProfileCategory, SyncProgress, SyncTask
from bkuser_core.categories.plugins.base import LoginHandler, Syncer
from bkuser_core.categories.plugins.constants import HookType
from bkuser_core.common.models import is_obj_needed_update
from bkuser_core.user_settings.models import Setting, SettingMeta
from rest_framework import serializers
from typing_extensions import Protocol
logger = logging.getLogger(__name__)
class SyncRecordSLZ(serializers.Serializer):
detail = serializers.DictField(child=serializers.CharField())
success = serializers.BooleanField()
dt = serializers.DateTimeField()
class PluginHook(Protocol):
"""插件钩子,用于各种事件后的回调"""
def trigger(self, status: str, params: dict):
raise NotImplementedError
@dataclass
class DataSourcePlugin:
"""数据源插件,定义不同的数据源"""
name: str
syncer_cls: Type[Syncer]
# 绑定的目录类型
# 后期会将去掉目录类型的概念,只存在租户组和插件之间的直接对应关系
# 届时,将直接通过插件名获取,同时删除该变量
# TODO: remove me
category_type: Optional[str] = ""
# 额外配置,预留扩展
# 用于处理登录相关逻辑,目前只支持简单 check 逻辑
# 是否允许通过 SaaS 修改,默认不允许
allow_client_write: bool = field(default_factory=lambda: False)
login_handler_cls: Optional[Type[LoginHandler]] = None
settings_path: Optional[Path] = None
# 其他额外配置
extra_config: dict = field(default_factory=dict)
hooks: Dict[HookType, Type[PluginHook]] = field(default_factory=dict)
def register(self):
"""注册插件"""
register_plugin(self)
if self.settings_path is not None:
self.load_settings_from_yaml()
def init_settings(self, setting_meta_key: str, meta_info: dict):
namespace = meta_info.pop("namespace", "general")
try:
meta, created = SettingMeta.objects.get_or_create(
key=setting_meta_key, category_type=self.name, namespace=namespace, defaults=meta_info
)
if created:
logger.debug("\n------ SettingMeta<%s> of plugin<%s> created.", setting_meta_key, self.name)
except Exception: # pylint: disable=broad-except
logger.exception("SettingMeta<%s> of plugin<%s> can not been created.", setting_meta_key, self.name)
return
if is_obj_needed_update(meta, meta_info):
for k, v in meta_info.items():
setattr(meta, k, v)
try:
meta.save()
except Exception: # pylint: disable=broad-except
logger.exception("SettingMeta<%s> of plugin<%s> can not been updated.", setting_meta_key, self.name)
return
logger.debug("\n------ SettingMeta<%s> of plugin<%s> updated.", setting_meta_key, self.name)
# 默认在创建 meta 后创建 settings,保证新增的配置能够被正确初始化
if meta.default is not None:
# 理论上目录不能够被直接恢复, 所以已经被删除的目录不会被更新
# 仅做新增,避免覆盖已有配置
for category in ProfileCategory.objects.filter(type=self.category_type, enabled=True):
ins, created = Setting.objects.get_or_create(
meta=meta, category_id=category.id, defaults={"value": meta.default}
)
if created:
logger.debug("\n------ Setting<%s> of category<%s> created.", ins, category)
def load_settings_from_yaml(self):
"""从 yaml 中加载 SettingMeta 配置"""
with self.settings_path.open(mode="r") as f:
for key, meta_info in yaml.safe_load(f).items():
self.init_settings(key, meta_info)
def get_hook(self, type_: HookType) -> Optional[PluginHook]:
hook_cls = self.hooks.get(type_)
return hook_cls() if hook_cls else None
def sync(self, instance_id: int, task_id: UUID, *args, **kwargs):
"""同步数据"""
syncer = self.syncer_cls(category_id=instance_id)
category = syncer.category
task = SyncTask.objects.get(id=task_id)
progresses = SyncProgress.objects.init_progresses(category, task_id=task_id)
try:
syncer.sync(*args, **kwargs)
finally:
task_status = SyncTaskStatus.SUCCESSFUL.value
for item in syncer.context.report():
if not item.successful:
task_status = SyncTaskStatus.FAILED.value
progress = progresses[item.step]
fields = {
"status": SyncTaskStatus.SUCCESSFUL.value if item.successful else SyncTaskStatus.FAILED.value,
"successful_count": len(item.successful_items),
"failed_count": len(item.failed_items),
"logs": "\n".join(item.logs),
"failed_records": SyncRecordSLZ(item.failed_items, many=True).data,
}
for key, value in fields.items():
setattr(progress, key, value)
progress.save(update_fields=["status", "successful_count", "failed_count", "update_time"])
# 更新任务状态
task.status = task_status
task.save(update_fields=["status", "update_time"])
| [
"logging.getLogger",
"rest_framework.serializers.DateTimeField",
"rest_framework.serializers.BooleanField",
"bkuser_core.categories.loader.register_plugin",
"bkuser_core.common.models.is_obj_needed_update",
"bkuser_core.user_settings.models.Setting.objects.get_or_create",
"yaml.safe_load",
"bkuser_cor... | [((1411, 1438), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1428, 1438), False, 'import logging\n'), ((1566, 1592), 'rest_framework.serializers.BooleanField', 'serializers.BooleanField', ([], {}), '()\n', (1590, 1592), False, 'from rest_framework import serializers\n'), ((1602, 1629), 'rest_framework.serializers.DateTimeField', 'serializers.DateTimeField', ([], {}), '()\n', (1627, 1629), False, 'from rest_framework import serializers\n'), ((2126, 2163), 'dataclasses.field', 'field', ([], {'default_factory': '(lambda : False)'}), '(default_factory=lambda : False)\n', (2131, 2163), False, 'from dataclasses import dataclass, field\n'), ((2301, 2328), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (2306, 2328), False, 'from dataclasses import dataclass, field\n'), ((2376, 2403), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (2381, 2403), False, 'from dataclasses import dataclass, field\n'), ((2456, 2477), 'bkuser_core.categories.loader.register_plugin', 'register_plugin', (['self'], {}), '(self)\n', (2471, 2477), False, 'from bkuser_core.categories.loader import register_plugin\n'), ((3221, 3258), 'bkuser_core.common.models.is_obj_needed_update', 'is_obj_needed_update', (['meta', 'meta_info'], {}), '(meta, meta_info)\n', (3241, 3258), False, 'from bkuser_core.common.models import is_obj_needed_update\n'), ((4844, 4876), 'bkuser_core.categories.models.SyncTask.objects.get', 'SyncTask.objects.get', ([], {'id': 'task_id'}), '(id=task_id)\n', (4864, 4876), False, 'from bkuser_core.categories.models import ProfileCategory, SyncProgress, SyncTask\n'), ((4898, 4961), 'bkuser_core.categories.models.SyncProgress.objects.init_progresses', 'SyncProgress.objects.init_progresses', (['category'], {'task_id': 'task_id'}), '(category, task_id=task_id)\n', (4934, 4961), False, 'from bkuser_core.categories.models import ProfileCategory, SyncProgress, SyncTask\n'), ((1527, 1550), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (1548, 1550), False, 'from rest_framework import serializers\n'), ((2734, 2860), 'bkuser_core.user_settings.models.SettingMeta.objects.get_or_create', 'SettingMeta.objects.get_or_create', ([], {'key': 'setting_meta_key', 'category_type': 'self.name', 'namespace': 'namespace', 'defaults': 'meta_info'}), '(key=setting_meta_key, category_type=self.\n name, namespace=namespace, defaults=meta_info)\n', (2767, 2860), False, 'from bkuser_core.user_settings.models import Setting, SettingMeta\n'), ((3881, 3950), 'bkuser_core.categories.models.ProfileCategory.objects.filter', 'ProfileCategory.objects.filter', ([], {'type': 'self.category_type', 'enabled': '(True)'}), '(type=self.category_type, enabled=True)\n', (3911, 3950), False, 'from bkuser_core.categories.models import ProfileCategory, SyncProgress, SyncTask\n'), ((3983, 4087), 'bkuser_core.user_settings.models.Setting.objects.get_or_create', 'Setting.objects.get_or_create', ([], {'meta': 'meta', 'category_id': 'category.id', 'defaults': "{'value': meta.default}"}), "(meta=meta, category_id=category.id, defaults=\n {'value': meta.default})\n", (4012, 4087), False, 'from bkuser_core.user_settings.models import Setting, SettingMeta\n'), ((4413, 4430), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (4427, 4430), False, 'import yaml\n')] |
from django import template
import mistune
register = template.Library()
@register.filter
def markdown(value):
markdown = mistune.Markdown()
return markdown(value)
| [
"mistune.Markdown",
"django.template.Library"
] | [((55, 73), 'django.template.Library', 'template.Library', ([], {}), '()\n', (71, 73), False, 'from django import template\n'), ((129, 147), 'mistune.Markdown', 'mistune.Markdown', ([], {}), '()\n', (145, 147), False, 'import mistune\n')] |
import json
from finite.storage import new_uuid
class Unimplemented(Exception):
pass
class RoleFail(Exception):
pass
SUPERUSER = '*'
""" role used to bypass all permission checks """
ROOT_UUID = '00000000-0000-0000-0000-000000000000'
""" parent UUID used to initialize a stream """
DEFAULT_SCHEMA = 'base'
""" event schema to use if not provided """
class Storage(object):
SOURCE_HEADER = "from finite.storage.factom import Storage"
""" import line used to include this class in generated code """
EVENT = "_EVENT"
""" event table """
STATE = "_STATE"
""" state table """
@staticmethod
def reconnect(**kwargs):
""" create connection pool """
@staticmethod
def drop():
""" drop evenstore tables """
@staticmethod
def migrate():
""" create evenstore tables if missing """
def __init__(self, **kwargs):
""" set object uuid for storage instance """
# REVIEW: should chain be static?
print(kwargs)
def __call__(self, action, **kwargs):
""" append a new event """
# REVIEW: should chainid be a kwarg?
event_id = str(uuid.uuid4())
payload = None
new_state = None
err = None
try:
if 'multiple' in kwargs:
multiple = int(kwargs['multiple'])
else:
multiple = 1
if 'payload' in kwargs:
if isinstance(kwargs['payload'], dict):
payload = json.dumps(kwargs['payload'])
else:
# already json encoded string
payload = kwargs['payload']
else:
# cannot be null
payload = "{}"
def _txn():
# TODO access datastore
#cur.execute(sql.get_state, (self.oid, self.schema))
# FIXME
#previous = cur.fetchone()
raise Unimplemented("FIXME")
if not previous:
current_state = self.initial_vector()
parent = ROOT_UUID
else:
current_state = previous[2]
parent = previous[3]
new_state, role = self.transform(
current_state, action, multiple)
if role not in kwargs['roles'] and SUPERUSER not in kwargs['roles']:
raise RoleFail("Missing Required Role: " + role)
# TODO access datastore
# cur.execute(sql.set_state,
# (self.oid, self.schema, new_state, event_id, new_state, event_id, self.schema, self.oid)
# )
# cur.execute(sql.append_event,
# (event_id, self.oid, self.schema, action, multiple, payload, new_state, parent)
# )
_txn()
except Exception as x:
err = x
return event_id, new_state, err
def events(self):
""" list all events """
def event(self, uuid):
""" get a single event """
def state(self):
""" get state """
| [
"json.dumps"
] | [((1518, 1547), 'json.dumps', 'json.dumps', (["kwargs['payload']"], {}), "(kwargs['payload'])\n", (1528, 1547), False, 'import json\n')] |
# This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from datetime import datetime, timedelta
from uuid import uuid4
import pytest
from flask import session
from mock import MagicMock
from oauthlib.oauth2 import InvalidClientIdError
from sqlalchemy.orm.exc import NoResultFound
from indico.modules.oauth.models.applications import OAuthApplication
from indico.modules.oauth.models.tokens import OAuthGrant
from indico.modules.oauth.provider import DisabledClientIdError, load_client, load_token, save_grant, save_token
pytest_plugins = 'indico.modules.oauth.testing.fixtures'
@pytest.fixture
def token_data():
return {'access_token': unicode(uuid4()),
'expires_in': 3600,
'refresh_token': '',
'scope': 'api'}
@pytest.fixture
def create_request(dummy_application, dummy_user):
def _create_request(implicit=False):
request = MagicMock()
request.grant_type = 'authorization_code' if not implicit else None
request.client.client_id = dummy_application.client_id
request.user = dummy_user
return request
return _create_request
@pytest.fixture
def dummy_request(create_request):
return create_request()
def test_load_client(dummy_application):
assert load_client(dummy_application.client_id) == dummy_application
def test_load_client_malformed_id():
with pytest.raises(InvalidClientIdError):
load_client('foobar')
def test_load_client_disabled_app(dummy_application):
dummy_application.is_enabled = False
with pytest.raises(DisabledClientIdError):
load_client(dummy_application.client_id)
@pytest.mark.usefixtures('request_context')
def test_save_grant(mocker, freeze_time):
freeze_time(datetime.utcnow())
mocker.patch.object(OAuthGrant, 'save')
request = MagicMock()
request.scopes = 'api'
request.redirect_uri = 'http://localhost:5000'
client_id = unicode(uuid4())
code = {'code': 'foobar'}
expires = datetime.utcnow() + timedelta(seconds=120)
grant = save_grant(client_id, code, request)
assert grant.client_id == client_id
assert grant.code == code['code']
assert grant.redirect_uri == request.redirect_uri
assert grant.user == session.user
assert grant.scopes == request.scopes
assert grant.expires == expires
assert grant.save.called
@pytest.mark.usefixtures('request_context')
@pytest.mark.parametrize('access_token', (True, False))
def test_load_token_no_access_token(dummy_application, dummy_token, token_data, access_token):
access_token = dummy_token.access_token if access_token else None
token = load_token(access_token)
if access_token:
assert token == dummy_token
else:
assert token is None
@pytest.mark.usefixtures('request_context')
def test_load_token_malformed_access_token(dummy_application, dummy_token, token_data):
assert load_token('foobar') is None
@pytest.mark.usefixtures('request_context')
@pytest.mark.parametrize('app_is_enabled', (True, False))
def test_load_token_disabled_app(dummy_application, dummy_token, token_data, app_is_enabled):
dummy_application.is_enabled = app_is_enabled
token = load_token(dummy_token.access_token)
if app_is_enabled:
assert token == dummy_token
else:
assert token is None
@pytest.mark.usefixtures('request_context')
@pytest.mark.parametrize('implicit', (True, False))
def test_save_token(create_request, create_user, token_data, implicit):
request = create_request(implicit=implicit)
session.user = create_user(1)
token = save_token(token_data, request)
assert request.user != session.user
assert token.user == session.user if implicit else request.user
assert token.access_token == token_data['access_token']
assert token.scopes == set(token_data['scope'].split())
assert 'expires_in' not in token_data
assert 'refresh_token' not in token_data
@pytest.mark.parametrize(('initial_scopes', 'requested_scopes', 'expected_scopes'), (
({}, 'a', {'a'}),
({}, 'a b', {'a', 'b'}),
({'a'}, 'a', {'a'}),
({'a'}, 'b', {'a', 'b'}),
({'a', 'b'}, 'a', {'a', 'b'}),
({'a', 'b'}, 'a b', {'a', 'b'}),
))
def test_save_token_scopes(dummy_request, create_token, token_data,
initial_scopes, requested_scopes, expected_scopes):
if initial_scopes:
create_token(scopes=initial_scopes)
token_data['scope'] = requested_scopes
initial_access_token = token_data['access_token']
token = save_token(token_data, dummy_request)
assert token.scopes == expected_scopes
if not set(requested_scopes.split()) - set(initial_scopes):
assert token_data['access_token'] != initial_access_token
else:
assert token_data['access_token'] == initial_access_token
@pytest.mark.parametrize('grant_type', ('foo', ''))
def test_save_token_invalid_grant(dummy_request, token_data, grant_type):
dummy_request.grant_type = grant_type
with pytest.raises(ValueError):
save_token(token_data, dummy_request())
def test_save_token_no_application(dummy_application, dummy_request, token_data):
dummy_request.client.client_id = unicode(uuid4())
assert not OAuthApplication.find(client_id=dummy_request.client.client_id).count()
with pytest.raises(NoResultFound):
save_token(token_data, dummy_request)
| [
"mock.MagicMock",
"indico.modules.oauth.provider.load_token",
"datetime.datetime.utcnow",
"indico.modules.oauth.provider.load_client",
"uuid.uuid4",
"indico.modules.oauth.models.applications.OAuthApplication.find",
"pytest.mark.parametrize",
"pytest.raises",
"pytest.mark.usefixtures",
"datetime.ti... | [((1787, 1829), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""request_context"""'], {}), "('request_context')\n", (1810, 1829), False, 'import pytest\n'), ((2504, 2546), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""request_context"""'], {}), "('request_context')\n", (2527, 2546), False, 'import pytest\n'), ((2548, 2602), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""access_token"""', '(True, False)'], {}), "('access_token', (True, False))\n", (2571, 2602), False, 'import pytest\n'), ((2904, 2946), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""request_context"""'], {}), "('request_context')\n", (2927, 2946), False, 'import pytest\n'), ((3078, 3120), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""request_context"""'], {}), "('request_context')\n", (3101, 3120), False, 'import pytest\n'), ((3122, 3178), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""app_is_enabled"""', '(True, False)'], {}), "('app_is_enabled', (True, False))\n", (3145, 3178), False, 'import pytest\n'), ((3473, 3515), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""request_context"""'], {}), "('request_context')\n", (3496, 3515), False, 'import pytest\n'), ((3517, 3567), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""implicit"""', '(True, False)'], {}), "('implicit', (True, False))\n", (3540, 3567), False, 'import pytest\n'), ((4084, 4335), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('initial_scopes', 'requested_scopes', 'expected_scopes')", "(({}, 'a', {'a'}), ({}, 'a b', {'a', 'b'}), ({'a'}, 'a', {'a'}), ({'a'},\n 'b', {'a', 'b'}), ({'a', 'b'}, 'a', {'a', 'b'}), ({'a', 'b'}, 'a b', {\n 'a', 'b'}))"], {}), "(('initial_scopes', 'requested_scopes',\n 'expected_scopes'), (({}, 'a', {'a'}), ({}, 'a b', {'a', 'b'}), ({'a'},\n 'a', {'a'}), ({'a'}, 'b', {'a', 'b'}), ({'a', 'b'}, 'a', {'a', 'b'}), (\n {'a', 'b'}, 'a b', {'a', 'b'})))\n", (4107, 4335), False, 'import pytest\n'), ((4997, 5047), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""grant_type"""', "('foo', '')"], {}), "('grant_type', ('foo', ''))\n", (5020, 5047), False, 'import pytest\n'), ((1965, 1976), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1974, 1976), False, 'from mock import MagicMock\n'), ((2187, 2223), 'indico.modules.oauth.provider.save_grant', 'save_grant', (['client_id', 'code', 'request'], {}), '(client_id, code, request)\n', (2197, 2223), False, 'from indico.modules.oauth.provider import DisabledClientIdError, load_client, load_token, save_grant, save_token\n'), ((2780, 2804), 'indico.modules.oauth.provider.load_token', 'load_token', (['access_token'], {}), '(access_token)\n', (2790, 2804), False, 'from indico.modules.oauth.provider import DisabledClientIdError, load_client, load_token, save_grant, save_token\n'), ((3335, 3371), 'indico.modules.oauth.provider.load_token', 'load_token', (['dummy_token.access_token'], {}), '(dummy_token.access_token)\n', (3345, 3371), False, 'from indico.modules.oauth.provider import DisabledClientIdError, load_client, load_token, save_grant, save_token\n'), ((3734, 3765), 'indico.modules.oauth.provider.save_token', 'save_token', (['token_data', 'request'], {}), '(token_data, request)\n', (3744, 3765), False, 'from indico.modules.oauth.provider import DisabledClientIdError, load_client, load_token, save_grant, save_token\n'), ((4707, 4744), 'indico.modules.oauth.provider.save_token', 'save_token', (['token_data', 'dummy_request'], {}), '(token_data, dummy_request)\n', (4717, 4744), False, 'from indico.modules.oauth.provider import DisabledClientIdError, load_client, load_token, save_grant, save_token\n'), ((1044, 1055), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1053, 1055), False, 'from mock import MagicMock\n'), ((1414, 1454), 'indico.modules.oauth.provider.load_client', 'load_client', (['dummy_application.client_id'], {}), '(dummy_application.client_id)\n', (1425, 1454), False, 'from indico.modules.oauth.provider import DisabledClientIdError, load_client, load_token, save_grant, save_token\n'), ((1524, 1559), 'pytest.raises', 'pytest.raises', (['InvalidClientIdError'], {}), '(InvalidClientIdError)\n', (1537, 1559), False, 'import pytest\n'), ((1569, 1590), 'indico.modules.oauth.provider.load_client', 'load_client', (['"""foobar"""'], {}), "('foobar')\n", (1580, 1590), False, 'from indico.modules.oauth.provider import DisabledClientIdError, load_client, load_token, save_grant, save_token\n'), ((1697, 1733), 'pytest.raises', 'pytest.raises', (['DisabledClientIdError'], {}), '(DisabledClientIdError)\n', (1710, 1733), False, 'import pytest\n'), ((1743, 1783), 'indico.modules.oauth.provider.load_client', 'load_client', (['dummy_application.client_id'], {}), '(dummy_application.client_id)\n', (1754, 1783), False, 'from indico.modules.oauth.provider import DisabledClientIdError, load_client, load_token, save_grant, save_token\n'), ((1888, 1905), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1903, 1905), False, 'from datetime import datetime, timedelta\n'), ((2079, 2086), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (2084, 2086), False, 'from uuid import uuid4\n'), ((2132, 2149), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2147, 2149), False, 'from datetime import datetime, timedelta\n'), ((2152, 2174), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(120)'}), '(seconds=120)\n', (2161, 2174), False, 'from datetime import datetime, timedelta\n'), ((3046, 3066), 'indico.modules.oauth.provider.load_token', 'load_token', (['"""foobar"""'], {}), "('foobar')\n", (3056, 3066), False, 'from indico.modules.oauth.provider import DisabledClientIdError, load_client, load_token, save_grant, save_token\n'), ((5173, 5198), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (5186, 5198), False, 'import pytest\n'), ((5377, 5384), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (5382, 5384), False, 'from uuid import uuid4\n'), ((5482, 5510), 'pytest.raises', 'pytest.raises', (['NoResultFound'], {}), '(NoResultFound)\n', (5495, 5510), False, 'import pytest\n'), ((5520, 5557), 'indico.modules.oauth.provider.save_token', 'save_token', (['token_data', 'dummy_request'], {}), '(token_data, dummy_request)\n', (5530, 5557), False, 'from indico.modules.oauth.provider import DisabledClientIdError, load_client, load_token, save_grant, save_token\n'), ((813, 820), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (818, 820), False, 'from uuid import uuid4\n'), ((5401, 5464), 'indico.modules.oauth.models.applications.OAuthApplication.find', 'OAuthApplication.find', ([], {'client_id': 'dummy_request.client.client_id'}), '(client_id=dummy_request.client.client_id)\n', (5422, 5464), False, 'from indico.modules.oauth.models.applications import OAuthApplication\n')] |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from unittest.mock import MagicMock, patch
from fbpcs.service.log_cloudwatch import CloudWatchLogService
REGION = "us-west-1"
LOG_GROUP = "test-group-name"
LOG_PATH = "test-log-path"
class TestCloudWatchLogService(unittest.TestCase):
@patch("fbpcs.gateway.cloudwatch.CloudWatchGateway")
def test_fetch(self, MockCloudWatchGateway):
log_service = CloudWatchLogService(LOG_GROUP, REGION)
mocked_log = {"test-events": [{"test-event-name": "test-event-data"}]}
log_service.cloudwatch_gateway = MockCloudWatchGateway()
log_service.cloudwatch_gateway.fetch = MagicMock(return_value=mocked_log)
returned_log = log_service.cloudwatch_gateway.fetch(LOG_PATH)
log_service.cloudwatch_gateway.fetch.assert_called()
self.assertEqual(mocked_log, returned_log)
| [
"unittest.mock.MagicMock",
"fbpcs.service.log_cloudwatch.CloudWatchLogService",
"unittest.mock.patch"
] | [((460, 511), 'unittest.mock.patch', 'patch', (['"""fbpcs.gateway.cloudwatch.CloudWatchGateway"""'], {}), "('fbpcs.gateway.cloudwatch.CloudWatchGateway')\n", (465, 511), False, 'from unittest.mock import MagicMock, patch\n'), ((583, 622), 'fbpcs.service.log_cloudwatch.CloudWatchLogService', 'CloudWatchLogService', (['LOG_GROUP', 'REGION'], {}), '(LOG_GROUP, REGION)\n', (603, 622), False, 'from fbpcs.service.log_cloudwatch import CloudWatchLogService\n'), ((814, 848), 'unittest.mock.MagicMock', 'MagicMock', ([], {'return_value': 'mocked_log'}), '(return_value=mocked_log)\n', (823, 848), False, 'from unittest.mock import MagicMock, patch\n')] |
import adsk.core
import adsk.fusion
import os
from ...lib import fusion360utils as futil
from ... import config
import math
app = adsk.core.Application.get()
ui = app.userInterface
# TODO *** コマンドのID情報を指定します。 ***
CMD_ID = f'{config.COMPANY_NAME}_{config.ADDIN_NAME}_Meteor'
CMD_NAME = 'メテオ'
CMD_Description = 'ボディにZの上方向から大量の点を降り注ぎます'
# パネルにコマンドを昇格させることを指定します。
IS_PROMOTED = True
# TODO *** コマンドボタンが作成される場所を定義します。 ***
# これは、ワークスペース、タブ、パネル、および
# コマンドの横に挿入されます。配置するコマンドを指定しない場合は
# 最後に挿入されます。
WORKSPACE_ID = config.design_workspace
TAB_ID = config.design_tab_id
TAB_NAME = config.design_tab_name
PANEL_ID = config.create_panel_id
PANEL_NAME = config.create_panel_name
PANEL_AFTER = config.create_panel_after
COMMAND_BESIDE_ID = ''
# コマンドアイコンのリソースの場所、ここではこのディレクトリの中に
# "resources" という名前のサブフォルダを想定しています。
ICON_FOLDER = os.path.join(
os.path.dirname(
os.path.abspath(__file__)
),
'resources',
''
)
# イベントハンドラのローカルリストで、参照を維持するために使用されます。
# それらは解放されず、ガベージコレクションされません。
local_handlers = []
_bodyIpt: adsk.core.SelectionCommandInput = None
_countIpt: adsk.core.IntegerSpinnerCommandInput = None
# アドイン実行時に実行されます。
def start():
# コマンドの定義を作成する。
cmd_def = ui.commandDefinitions.addButtonDefinition(
CMD_ID,
CMD_NAME,
CMD_Description,
ICON_FOLDER
)
# コマンド作成イベントのイベントハンドラを定義します。
# このハンドラは、ボタンがクリックされたときに呼び出されます。
futil.add_handler(cmd_def.commandCreated, command_created)
# ******** ユーザーがコマンドを実行できるように、UIにボタンを追加します。 ********
# ボタンが作成される対象のワークスペースを取得します。
workspace = ui.workspaces.itemById(WORKSPACE_ID)
toolbar_tab = workspace.toolbarTabs.itemById(TAB_ID)
if toolbar_tab is None:
toolbar_tab = workspace.toolbarTabs.add(TAB_ID, TAB_NAME)
# ボタンが作成されるパネルを取得します。
panel = workspace.toolbarPanels.itemById(PANEL_ID)
if panel is None:
panel = toolbar_tab.toolbarPanels.add(PANEL_ID, PANEL_NAME, PANEL_AFTER, False)
# 指定された既存のコマンドの後に、UI のボタンコマンド制御を作成します。
control = panel.controls.addCommand(cmd_def, COMMAND_BESIDE_ID, False)
# コマンドをメインツールバーに昇格させるかどうかを指定します。
control.isPromoted = IS_PROMOTED
# アドイン停止時に実行されます。
def stop():
# このコマンドのさまざまなUI要素を取得する
workspace = ui.workspaces.itemById(WORKSPACE_ID)
panel = workspace.toolbarPanels.itemById(PANEL_ID)
command_control = panel.controls.itemById(CMD_ID)
command_definition = ui.commandDefinitions.itemById(CMD_ID)
# ボタンコマンドの制御を削除する。
if command_control:
command_control.deleteMe()
# コマンドの定義を削除します。
if command_definition:
command_definition.deleteMe()
def command_created(args: adsk.core.CommandCreatedEventArgs):
futil.log(f'{CMD_NAME}:{args.firingEvent.name}')
cmd: adsk.core.Command = adsk.core.Command.cast(args.command)
cmd.isPositionDependent = True
# **inputs**
inputs: adsk.core.CommandInputs = cmd.commandInputs
global _bodyIpt
_bodyIpt = inputs.addSelectionInput(
'bodyIptId',
'ボディ',
'ボディを選択'
)
_bodyIpt.addSelectionFilter('Bodies')
global _countIpt
_countIpt = inputs.addIntegerSpinnerCommandInput(
'countIptId',
'分割数',
1,
30,
1,
10
)
# **event**
futil.add_handler(
cmd.destroy,
command_destroy,
local_handlers=local_handlers
)
futil.add_handler(
cmd.executePreview,
command_executePreview,
local_handlers=local_handlers
)
def command_destroy(args: adsk.core.CommandEventArgs):
futil.log(f'{CMD_NAME}:{args.firingEvent.name}')
global local_handlers
local_handlers = []
def command_executePreview(args: adsk.core.CommandEventArgs):
futil.log(f'{CMD_NAME}:{args.firingEvent.name}')
global _countIpt
# unitMgr: adsk.core.UnitsManager = futil.app.activeProduct.unitsManager
# pitch = unitMgr.convert(
# _countIpt.value,
# unitMgr.defaultLengthUnits,
# unitMgr.internalUnits
# )
global _bodyIpt
initMeteorSketch(
_bodyIpt.selection(0).entity,
adsk.core.Vector3D.create(0,0,-1),
_countIpt.value,
)
args.isValidResult = True
# ******************
def initMeteorSketch(
targetBody: adsk.fusion.BRepBody,
rayDirection: adsk.core.Vector3D,
stepCount: int = 10,
isRev: bool = False) -> adsk.fusion.Sketch:
comp: adsk.fusion.Component = targetBody.parentComponent
pnts = getPointsFromRayDirection(
targetBody,
rayDirection,
stepCount,
)
if len(pnts) < 1:
return
skt: adsk.fusion.Sketch = comp.sketches.add(
comp.xYConstructionPlane
)
sktPnts: adsk.fusion.SketchPoints = skt.sketchPoints
skt.isComputeDeferred = True
[sktPnts.add(p) for p in pnts]
skt.isComputeDeferred = False
return skt
def getPointsFromRayDirection(
targetBody: adsk.fusion.BRepBody,
rayDirection: adsk.core.Vector3D,
stepCount: int = 10,
isRev: bool = False) -> list:
comp: adsk.fusion.Component = targetBody.parentComponent
bBox: adsk.core.BoundingBox3D = targetBody.boundingBox
minPnt: adsk.core.Point3D = bBox.minPoint
maxPnt: adsk.core.Point3D = bBox.maxPoint
stepX = (bBox.maxPoint.x - bBox.minPoint.x) / (stepCount - 1)
stepY = (bBox.maxPoint.y - bBox.minPoint.y) / (stepCount - 1)
tempPnts = []
for idxX in range(stepCount):
for idxY in range(stepCount):
tempPnts.append(
adsk.core.Point3D.create(
minPnt.x + stepX * idxX,
minPnt.y + stepY * idxY,
maxPnt.z + 1
)
)
pnts = []
hitPnts: adsk.core.ObjectCollection = adsk.core.ObjectCollection.create()
for pnt in tempPnts:
hitPnts.clear()
bodies: adsk.core.ObjectCollection = comp.findBRepUsingRay(
pnt,
rayDirection,
adsk.fusion.BRepEntityTypes.BRepBodyEntityType,
-1.0,
True,
hitPnts
)
if bodies.count < 1:
continue
bodyLst = [b for b in bodies]
hitPntLst = [p for p in hitPnts]
for body, pnt in zip(bodyLst, hitPntLst):
if body == targetBody:
pnts.append(pnt)
continue
return pnts | [
"os.path.abspath"
] | [((903, 928), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (918, 928), False, 'import os\n')] |
import re
import time
from datetime import datetime
from enum import Enum
from fishbase.fish_logger import logger
from .elk_connector import Es
from ..base.tp_base import TpBase, TestStatus, Conf, VerticalContext
from ..base.tp_base import get_params_dict
# LogTestPoint
class LogESTestPoint(TpBase):
# 类的初始化过程
# 2018.6.11 create by yanan.wu #748921
def __init__(self, tp_conf, vertical_context: VerticalContext):
TpBase.__init__(self, tp_conf, vertical_context)
self.conf_enum = LogESTestPointEnum
self.__tc_start_time = ''
self.vertical_context = vertical_context
# 准备请求参数
# 2018.6.11 create by yanan.wu #748921
def build_request(self):
tc_ctx = self.vertical_context.tc_context
try:
# 获取请参
self.req_param = {'index': self.tp_conf.get('index'),
'key_word': self.tp_conf.get('key_word')}
# 获取 tc 执行起始时间
time_struct = time.mktime(tc_ctx.start_time.timetuple())
self.__tc_start_time = datetime.utcfromtimestamp(
time_struct).strftime('%Y-%m-%dT%H:%M:%S')
return self.req_param
except RuntimeError as e:
logger.error('tp->log:get req params error: {}'.format(str(e)))
raise Exception(str(e))
# 测试案例的执行
# 2018.6.11 create by yanan.wu #748921
def execute(self, request):
try:
es_conf = {}
# 发起接口调用请求并接收响应
es = Es(es_conf['server_ip'], es_conf['server_port'],
es_conf['auth_user'], es_conf['auth_password'])
tp_utc_time = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S')
resp = es.search_match(request.get(LogESTestPointEnum.index.key),
self.__tc_start_time,
tp_utc_time, 100,
request.get(LogESTestPointEnum.key_word.key))
return resp, ''
except Exception as e:
logger.error('tp->log: execute error: {}'.format(str(e)))
raise Exception(str(e))
# 预期结果的校验
# 2018.6.11 create by yanan.wu #748921
def test_status(self):
tc_ctx = self.vertical_context.tc_context
# 获取期望返回参数
if self.tp_conf.get('expect_data'):
params_name_list = self.tp_conf.get('expect_data').split(',')
self.expect_dict = get_params_dict(params_name_list, tc_ctx.tc_detail.data)
if self.tp_conf.get('check_type') == LogCheckType.ROWS_CHECK.value:
if self.expect_dict.get(LogESTestPointEnum.expect_data.key) == str(tc_ctx.current_tp_context.response.content['hits']['total']):
return TestStatus.PASSED
else:
return TestStatus.NOT_PASSED
if self.tp_conf.get('check_type') == LogCheckType.REG_CHECK.value:
for hit in tc_ctx.current_tp_context.response.content['hits']['hits']:
match_obj = re.search(
self.expect_dict.get(LogESTestPointEnum.expect_data.key),
hit['_source']['message'])
if match_obj:
return TestStatus.PASSED
return TestStatus.NOT_PASSED
# 后处理
def post_handler(self):
pass
# 日志检查类型
# 2018.6.12 create by yanan.wu #806640
class LogCheckType(Enum):
# 行数校验
ROWS_CHECK = '01'
# 正则校验
REG_CHECK = '02'
# 日志配置文件枚举
class LogESTestPointEnum(Conf):
tp_name = 'tp_name', 'tp 名称', True, ''
key_word = 'key_word', '查询关键字', False, ''
index = 'index', '查询索引', True, ''
check_type = 'check_type', '校验方式', True, '01'
expect_data = 'expect_data', '期望返回结果', True, '' | [
"datetime.datetime.utcfromtimestamp",
"datetime.datetime.utcnow"
] | [((1053, 1091), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', (['time_struct'], {}), '(time_struct)\n', (1078, 1091), False, 'from datetime import datetime\n'), ((1637, 1654), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1652, 1654), False, 'from datetime import datetime\n')] |
import math
from typing import List
import numpy as np
import torch
a = torch.randn(4, 3,2)
print(a)
print(torch.argmax(a, -1)) | [
"torch.randn",
"torch.argmax"
] | [((73, 93), 'torch.randn', 'torch.randn', (['(4)', '(3)', '(2)'], {}), '(4, 3, 2)\n', (84, 93), False, 'import torch\n'), ((108, 127), 'torch.argmax', 'torch.argmax', (['a', '(-1)'], {}), '(a, -1)\n', (120, 127), False, 'import torch\n')] |
import sys
import json
import boto3
from botocore.exceptions import ClientError
from . import config
def status_instance(instance_id, dry_run=False):
ec2 = boto3.client('ec2',
region_name='ap-northeast-1',
aws_access_key_id=config.AWS_ACCESS_KEY_ID,
aws_secret_access_key=config.AWS_SECRET_KEY)
if (dry_run):
try:
ec2.describe_instance_status(
Instance_Ids=[instance_id], DryRun=True)
except ClientError as e:
if 'DryRunOperation' not in str(e):
raise
else:
print(e)
try:
response = ec2.describe_instance_status(
InstanceIds=[instance_id], DryRun=False)
print(response)
return response
except ClientError as e:
print(e)
return ''
def start_instance(instance_id, dry_run=False):
ec2 = boto3.client('ec2',
region_name='ap-northeast-1',
aws_access_key_id=config.AWS_ACCESS_KEY_ID,
aws_secret_access_key=config.AWS_SECRET_KEY)
if (dry_run):
# DryRunで確認
try:
ec2.start_instances(InstanceIds=[instance_id], DryRun=True)
except ClientError as e:
if 'DryRunOperation' not in str(e):
raise
else:
print(e)
try:
response = ec2.start_instances(InstanceIds=[instance_id], DryRun=False)
return response
except ClientError as e:
print(e)
return ''
def stop_instance(instance_id, dry_run=False):
ec2 = boto3.client('ec2',
region_name='ap-northeast-1',
aws_access_key_id=config.AWS_ACCESS_KEY_ID,
aws_secret_access_key=config.AWS_SECRET_KEY)
if (dry_run):
try:
ec2.stop_instances(InstanceIds=[instance_id], DryRun=True)
except ClientError as e:
if 'DryRunOperation' not in str(e):
raise
else:
print(e)
try:
response = ec2.stop_instances(InstanceIds=[instance_id], DryRun=False)
return response
except ClientError as e:
print(e)
return ''
def test():
instance_id = 'i-xxxxxxxxxxxxxxxxx'
ec2 = boto3.client('ec2',
region_name='ap-northeast-1',
aws_access_key_id=config.AWS_ACCESS_KEY_ID,
aws_secret_access_key=config.AWS_SECRET_KEY)
try:
response = ec2.describe_instance_status(
InstanceIds=[instance_id], DryRun=False)
print(response)
except ClientError as e:
print(e)
if __name__ == '__main__':
test()
| [
"boto3.client"
] | [((170, 313), 'boto3.client', 'boto3.client', (['"""ec2"""'], {'region_name': '"""ap-northeast-1"""', 'aws_access_key_id': 'config.AWS_ACCESS_KEY_ID', 'aws_secret_access_key': 'config.AWS_SECRET_KEY'}), "('ec2', region_name='ap-northeast-1', aws_access_key_id=config.\n AWS_ACCESS_KEY_ID, aws_secret_access_key=config.AWS_SECRET_KEY)\n", (182, 313), False, 'import boto3\n'), ((961, 1104), 'boto3.client', 'boto3.client', (['"""ec2"""'], {'region_name': '"""ap-northeast-1"""', 'aws_access_key_id': 'config.AWS_ACCESS_KEY_ID', 'aws_secret_access_key': 'config.AWS_SECRET_KEY'}), "('ec2', region_name='ap-northeast-1', aws_access_key_id=config.\n AWS_ACCESS_KEY_ID, aws_secret_access_key=config.AWS_SECRET_KEY)\n", (973, 1104), False, 'import boto3\n'), ((1698, 1841), 'boto3.client', 'boto3.client', (['"""ec2"""'], {'region_name': '"""ap-northeast-1"""', 'aws_access_key_id': 'config.AWS_ACCESS_KEY_ID', 'aws_secret_access_key': 'config.AWS_SECRET_KEY'}), "('ec2', region_name='ap-northeast-1', aws_access_key_id=config.\n AWS_ACCESS_KEY_ID, aws_secret_access_key=config.AWS_SECRET_KEY)\n", (1710, 1841), False, 'import boto3\n'), ((2418, 2561), 'boto3.client', 'boto3.client', (['"""ec2"""'], {'region_name': '"""ap-northeast-1"""', 'aws_access_key_id': 'config.AWS_ACCESS_KEY_ID', 'aws_secret_access_key': 'config.AWS_SECRET_KEY'}), "('ec2', region_name='ap-northeast-1', aws_access_key_id=config.\n AWS_ACCESS_KEY_ID, aws_secret_access_key=config.AWS_SECRET_KEY)\n", (2430, 2561), False, 'import boto3\n')] |
from django import template
register = template.Library()
@register.filter
def has_group(user, name):
return user.groups.filter(name=name).exists()
| [
"django.template.Library"
] | [((40, 58), 'django.template.Library', 'template.Library', ([], {}), '()\n', (56, 58), False, 'from django import template\n')] |
import importlib.util
import inspect
from skaio import log
from skaio.core.publisher import Publisher
from skaio.core.base.task import BaseTask
from skaio.utils.common import get_loop
tasks = ['samples.simple_tasks']
class Scheduler:
def start(self):
publisher = Publisher()
loop = get_loop()
for task_mod in tasks:
m = importlib.import_module(task_mod)
task_classes = filter(lambda x: inspect.isclass(x[1])
and x[1].__name__ != 'BaseTask'
and issubclass(x[1], BaseTask),
inspect.getmembers(m))
for name, task in task_classes:
log.info(f'Sending tasks for {name}')
loop.run_until_complete(publisher.publish(task))
| [
"skaio.utils.common.get_loop",
"inspect.getmembers",
"skaio.core.publisher.Publisher",
"inspect.isclass",
"skaio.log.info"
] | [((278, 289), 'skaio.core.publisher.Publisher', 'Publisher', ([], {}), '()\n', (287, 289), False, 'from skaio.core.publisher import Publisher\n'), ((305, 315), 'skaio.utils.common.get_loop', 'get_loop', ([], {}), '()\n', (313, 315), False, 'from skaio.utils.common import get_loop\n'), ((629, 650), 'inspect.getmembers', 'inspect.getmembers', (['m'], {}), '(m)\n', (647, 650), False, 'import inspect\n'), ((713, 750), 'skaio.log.info', 'log.info', (['f"""Sending tasks for {name}"""'], {}), "(f'Sending tasks for {name}')\n", (721, 750), False, 'from skaio import log\n'), ((441, 462), 'inspect.isclass', 'inspect.isclass', (['x[1]'], {}), '(x[1])\n', (456, 462), False, 'import inspect\n')] |
# Copyright (c) 2020 <NAME>
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
from bq_test_kit.interpolators.shell_interpolator import ShellInterpolator
def test_interpolate():
si = ShellInterpolator({"LOCAL_KEY": "VALUE"})
result = si.interpolate("Local key has value ${LOCAL_KEY}."
" Global key has value ${GLOBAL_KEY}", {"GLOBAL_KEY": "G_VALUE"})
assert result == ("Local key has value VALUE."
" Global key has value G_VALUE")
| [
"bq_test_kit.interpolators.shell_interpolator.ShellInterpolator"
] | [((230, 271), 'bq_test_kit.interpolators.shell_interpolator.ShellInterpolator', 'ShellInterpolator', (["{'LOCAL_KEY': 'VALUE'}"], {}), "({'LOCAL_KEY': 'VALUE'})\n", (247, 271), False, 'from bq_test_kit.interpolators.shell_interpolator import ShellInterpolator\n')] |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import os, uuid, sys, pickle, shutil, io, logging
from azure.storage.filedatalake import DataLakeServiceClient
from azure.core._match_conditions import MatchConditions
from azure.storage.filedatalake._models import ContentSettings
from utility_functions.az_storage_reader import *
# Enter the name of the Azure Data Lake Storage Gen2 Account
DATA_LAKE_NAME=""
# Enter the name of the filesystem
DATA_LAKE_FILE_SYSTEM_NAME=""
# Enter the Primary Key of the Data Lake Account
DATA_LAKE_PRIMARY_KEY=""
file_system_client = connect_to_adls(DATA_LAKE_NAME, DATA_LAKE_PRIMARY_KEY, DATA_LAKE_FILE_SYSTEM_NAME)
dirs_to_write = ["itemFactors", "metadata", "userFactors"]
prep_dirs_for_write(dirs_to_write, "retailai_recommendation_model")
for directory in dirs_to_write:
copy_files_from_directory(file_system_client, "user/trusted-service-user/retailai_recommendation_model/"+directory, directory, "retailai_recommendation_model")
shutil.make_archive("retailai_recommendation_model", 'zip', "model\\retailai_recommendation_model") | [
"shutil.make_archive"
] | [((1024, 1127), 'shutil.make_archive', 'shutil.make_archive', (['"""retailai_recommendation_model"""', '"""zip"""', '"""model\\\\retailai_recommendation_model"""'], {}), "('retailai_recommendation_model', 'zip',\n 'model\\\\retailai_recommendation_model')\n", (1043, 1127), False, 'import os, uuid, sys, pickle, shutil, io, logging\n')] |